mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-07-22 12:50:22 -07:00
Update validate-modules (#20932)
* Update validate-modules * Validates ANSIBLE_METADATA * Ensures imports happen after documentation vars * Some pep8 cleanup * Clean up some left over unneeded code * Update modules for new module guidelines and validate-modules checks * Update imports for ec2_vpc_route_table and ec2_vpc_nat_gateway
This commit is contained in:
parent
1718719d77
commit
829c0b8f62
178 changed files with 1849 additions and 1783 deletions
|
@ -43,3 +43,13 @@ doc_schema = Schema(
|
|||
},
|
||||
extra=ALLOW_EXTRA
|
||||
)
|
||||
|
||||
metadata_schema = Schema(
|
||||
{
|
||||
Required('status'): [Any('stableinterface', 'preview', 'deprecated',
|
||||
'removed')],
|
||||
Required('version'): '1.0',
|
||||
Required('supported_by'): Any('core', 'community', 'unmaintained',
|
||||
'committer')
|
||||
}
|
||||
)
|
||||
|
|
|
@ -10,17 +10,17 @@
|
|||
# 2) source hacking/env-setup
|
||||
# 3) PYTHONPATH=./lib nosetests -d -w test -v --nocapture sanity/validate-modules
|
||||
|
||||
import os
|
||||
import re
|
||||
from ansible.compat.tests import unittest
|
||||
|
||||
#TYPE_REGEX = re.compile(r'.*\stype\(.*')
|
||||
#TYPE_REGEX = re.compile(r'.*(if|or)\stype\(.*')
|
||||
#TYPE_REGEX = re.compile(r'.*(if|or)(\s+.*|\s+)type\(.*')
|
||||
#TYPE_REGEX = re.compile(r'.*(if|or)(\s+.*|\s+)type\(.*')
|
||||
#TYPE_REGEX = re.compile(r'.*(if|\sor)(\s+.*|\s+)type\(.*')
|
||||
#TYPE_REGEX = re.compile(r'.*(if|\sor)(\s+.*|\s+)(?<!_)type\(.*')
|
||||
TYPE_REGEX = re.compile(r'.*(if|or)(\s+.*|\s+)(?<!_)(?<!str\()type\(.*')
|
||||
# TYPE_REGEX = re.compile(r'.*\stype\(.*')
|
||||
# TYPE_REGEX = re.compile(r'.*(if|or)\stype\(.*')
|
||||
# TYPE_REGEX = re.compile(r'.*(if|or)(\s+.*|\s+)type\(.*')
|
||||
# TYPE_REGEX = re.compile(r'.*(if|or)(\s+.*|\s+)type\(.*')
|
||||
# TYPE_REGEX = re.compile(r'.*(if|\sor)(\s+.*|\s+)type\(.*')
|
||||
# TYPE_REGEX = re.compile(r'.*(if|\sor)(\s+.*|\s+)(?<!_)type\(.*')
|
||||
TYPE_REGEX = re.compile(r'.*(if|or)(\s+.*|\s+)(?<!_)(?<!str\()type\(.*')
|
||||
|
||||
|
||||
class TestValidateModulesRegex(unittest.TestCase):
|
||||
|
||||
|
@ -42,18 +42,18 @@ class TestValidateModulesRegex(unittest.TestCase):
|
|||
['if foo or type(bar) != Bar', True],
|
||||
['x = type(foo)', False],
|
||||
["error = err.message + ' ' + str(err) + ' - ' + str(type(err))", False],
|
||||
#cloud/amazon/ec2_group.py
|
||||
# cloud/amazon/ec2_group.py
|
||||
["module.fail_json(msg='Invalid rule parameter type [%s].' % type(rule))", False],
|
||||
#files/patch.py
|
||||
["p = type('Params', (), module.params)", False], #files/patch.py
|
||||
#system/osx_defaults.py
|
||||
# files/patch.py
|
||||
["p = type('Params', (), module.params)", False], # files/patch.py
|
||||
# system/osx_defaults.py
|
||||
["if self.current_value is not None and not isinstance(self.current_value, type(self.value)):", True],
|
||||
#system/osx_defaults.py
|
||||
# system/osx_defaults.py
|
||||
['raise OSXDefaultsException("Type mismatch. Type in defaults: " + type(self.current_value).__name__)', False],
|
||||
#network/nxos/nxos_interface.py
|
||||
# network/nxos/nxos_interface.py
|
||||
["if get_interface_type(interface) == 'svi':", False],
|
||||
]
|
||||
for idc,check in enumerate(checks):
|
||||
for idc, check in enumerate(checks):
|
||||
cstring = check[0]
|
||||
cexpected = check[1]
|
||||
|
||||
|
@ -62,4 +62,3 @@ class TestValidateModulesRegex(unittest.TestCase):
|
|||
assert False, "%s should have matched" % cstring
|
||||
elif not cexpected and match:
|
||||
assert False, "%s should not have matched" % cstring
|
||||
|
||||
|
|
|
@ -22,6 +22,9 @@ import sys
|
|||
# We only use StringIO, since we cannot setattr on cStringIO
|
||||
from StringIO import StringIO
|
||||
|
||||
import yaml
|
||||
import yaml.reader
|
||||
|
||||
|
||||
def find_globals(g, tree):
|
||||
"""Uses AST to find globals in an ast tree"""
|
||||
|
@ -66,3 +69,28 @@ class CaptureStd():
|
|||
"""Return ``(stdout, stderr)``"""
|
||||
|
||||
return self.stdout.getvalue(), self.stderr.getvalue()
|
||||
|
||||
|
||||
def parse_yaml(value, lineno, module, name):
|
||||
traces = []
|
||||
errors = []
|
||||
data = None
|
||||
try:
|
||||
data = yaml.safe_load(value)
|
||||
except yaml.MarkedYAMLError as e:
|
||||
e.problem_mark.line += lineno - 1
|
||||
e.problem_mark.name = '%s.%s' % (module, name)
|
||||
errors.append('%s is not valid YAML. Line %d column %d' %
|
||||
(name, e.problem_mark.line + 1,
|
||||
e.problem_mark.column + 1))
|
||||
traces.append(e)
|
||||
except yaml.reader.ReaderError as e:
|
||||
traces.append(e)
|
||||
errors.append('%s is not valid YAML. Character '
|
||||
'0x%x at position %d.' %
|
||||
(name, e.character, e.position))
|
||||
except yaml.YAMLError as e:
|
||||
traces.append(e)
|
||||
errors.append('%s is not valid YAML: %s: %s' % (name, type(e), e))
|
||||
|
||||
return data, errors, traces
|
||||
|
|
|
@ -36,18 +36,15 @@ from ansible.utils.module_docs import BLACKLIST_MODULES, get_docstring
|
|||
|
||||
from module_args import get_argument_spec
|
||||
|
||||
from schema import doc_schema, option_schema
|
||||
from schema import doc_schema, option_schema, metadata_schema
|
||||
|
||||
from utils import CaptureStd
|
||||
from utils import CaptureStd, parse_yaml
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
import yaml
|
||||
import yaml.reader
|
||||
|
||||
|
||||
BLACKLIST_DIRS = frozenset(('.git', 'test', '.github', '.idea'))
|
||||
INDENT_REGEX = re.compile(r'([\t]*)')
|
||||
TYPE_REGEX = re.compile(r'.*(if|or)(\s+.*|\s+)(?<!_)(?<!str\()type\(.*')
|
||||
TYPE_REGEX = re.compile(r'.*(if|or)(\s+.*|\s+)(?<!_)(?<!str\()type\(.*')
|
||||
BLACKLIST_IMPORTS = {
|
||||
'requests': {
|
||||
'new_only': True,
|
||||
|
@ -205,7 +202,10 @@ class ModuleValidator(Validator):
|
|||
for line_no, line in enumerate(self.text.splitlines()):
|
||||
typekeyword = TYPE_REGEX.match(line)
|
||||
if typekeyword:
|
||||
self.errors.append('Type comparison using type() found on line %d. Use isinstance() instead' % (line_no + 1))
|
||||
self.errors.append(
|
||||
'Type comparison using type() found on '
|
||||
'line %d. Use isinstance() instead' % (line_no + 1)
|
||||
)
|
||||
|
||||
def _check_for_sys_exit(self):
|
||||
if 'sys.exit(' in self.text:
|
||||
|
@ -325,6 +325,37 @@ class ModuleValidator(Validator):
|
|||
self.warnings.append('Found Try/Except block without HAS_ '
|
||||
'assginment')
|
||||
|
||||
def _ensure_imports_below_docs(self, doc_info):
|
||||
doc_lines = [doc_info[key]['lineno'] for key in doc_info]
|
||||
|
||||
for child in self.ast.body:
|
||||
if isinstance(child, (ast.Import, ast.ImportFrom)):
|
||||
for lineno in doc_lines:
|
||||
if child.lineno < lineno:
|
||||
self.errors.append(
|
||||
'Import found before documentation variables. '
|
||||
'All imports must appear below '
|
||||
'DOCUMENTATION/EXAMPLES/RETURN/ANSIBLE_METADATA. '
|
||||
'line %d' % (child.lineno,)
|
||||
)
|
||||
break
|
||||
elif isinstance(child, ast.TryExcept):
|
||||
bodies = child.body
|
||||
for handler in child.handlers:
|
||||
bodies.extend(handler.body)
|
||||
for grandchild in bodies:
|
||||
if isinstance(grandchild, (ast.Import, ast.ImportFrom)):
|
||||
for lineno in doc_lines:
|
||||
if child.lineno < lineno:
|
||||
self.errors.append(
|
||||
'Import found before documentation '
|
||||
'variables. All imports must appear below '
|
||||
'DOCUMENTATION/EXAMPLES/RETURN/'
|
||||
'ANSIBLE_METADATA. line %d' %
|
||||
(child.lineno,)
|
||||
)
|
||||
break
|
||||
|
||||
def _find_ps_replacers(self):
|
||||
if 'WANT_JSON' not in self.text:
|
||||
self.errors.append('WANT_JSON not found in module')
|
||||
|
@ -352,6 +383,10 @@ class ModuleValidator(Validator):
|
|||
'RETURN': {
|
||||
'value': None,
|
||||
'lineno': 0
|
||||
},
|
||||
'ANSIBLE_METADATA': {
|
||||
'value': None,
|
||||
'lineno': 0
|
||||
}
|
||||
}
|
||||
for child in self.ast.body:
|
||||
|
@ -366,13 +401,16 @@ class ModuleValidator(Validator):
|
|||
elif grandchild.id == 'RETURN':
|
||||
docs['RETURN']['value'] = child.value.s
|
||||
docs['RETURN']['lineno'] = child.lineno
|
||||
elif grandchild.id == 'ANSIBLE_METADATA':
|
||||
docs['ANSIBLE_METADATA']['value'] = child.value
|
||||
docs['ANSIBLE_METADATA']['lineno'] = child.lineno
|
||||
|
||||
return docs
|
||||
|
||||
def _validate_docs_schema(self, doc):
|
||||
def _validate_docs_schema(self, doc, schema, name):
|
||||
errors = []
|
||||
try:
|
||||
doc_schema(doc)
|
||||
schema(doc)
|
||||
except Exception as e:
|
||||
for error in e.errors:
|
||||
error.data = doc
|
||||
|
@ -390,52 +428,40 @@ class ModuleValidator(Validator):
|
|||
|
||||
for error in errors:
|
||||
path = [str(p) for p in error.path]
|
||||
self.errors.append('DOCUMENTATION.%s: %s' %
|
||||
('.'.join(path), humanize_error(error.data, error)))
|
||||
self.errors.append('%s.%s: %s' %
|
||||
(name, '.'.join(path),
|
||||
humanize_error(error.data, error)))
|
||||
|
||||
def _validate_docs(self):
|
||||
doc_info = self._get_docs()
|
||||
try:
|
||||
doc = yaml.safe_load(doc_info['DOCUMENTATION']['value'])
|
||||
except yaml.MarkedYAMLError as e:
|
||||
doc = None
|
||||
# This offsets the error line number to where the
|
||||
# DOCUMENTATION starts so we can just go to that line in the
|
||||
# module
|
||||
e.problem_mark.line += (
|
||||
doc_info['DOCUMENTATION']['lineno'] - 1
|
||||
)
|
||||
e.problem_mark.name = '%s.DOCUMENTATION' % self.name
|
||||
self.traces.append(e)
|
||||
self.errors.append('DOCUMENTATION is not valid YAML. Line %d '
|
||||
'column %d' %
|
||||
(e.problem_mark.line + 1,
|
||||
e.problem_mark.column + 1))
|
||||
except yaml.reader.ReaderError as e:
|
||||
self.traces.append(e)
|
||||
self.errors.append('DOCUMENTATION is not valid YAML. Character 0x%x at position %d.' %
|
||||
(e.character, e.position))
|
||||
except yaml.YAMLError as e:
|
||||
self.traces.append(e)
|
||||
self.errors.append('DOCUMENTATION is not valid YAML: %s: %s' % (type(e), e))
|
||||
except AttributeError:
|
||||
if not bool(doc_info['DOCUMENTATION']['value']):
|
||||
self.errors.append('No DOCUMENTATION provided')
|
||||
else:
|
||||
with CaptureStd():
|
||||
try:
|
||||
get_docstring(self.path, verbose=True)
|
||||
except AssertionError:
|
||||
fragment = doc['extends_documentation_fragment']
|
||||
self.errors.append('DOCUMENTATION fragment missing: %s' %
|
||||
fragment)
|
||||
except Exception as e:
|
||||
self.traces.append(e)
|
||||
self.errors.append('Unknown DOCUMENTATION error, see '
|
||||
'TRACE')
|
||||
doc, errors, traces = parse_yaml(
|
||||
doc_info['DOCUMENTATION']['value'],
|
||||
doc_info['DOCUMENTATION']['lineno'],
|
||||
self.name, 'DOCUMENTATION'
|
||||
)
|
||||
self.errors.extend(errors)
|
||||
self.traces.extend(traces)
|
||||
if not errors and not traces:
|
||||
with CaptureStd():
|
||||
try:
|
||||
get_docstring(self.path, verbose=True)
|
||||
except AssertionError:
|
||||
fragment = doc['extends_documentation_fragment']
|
||||
self.errors.append(
|
||||
'DOCUMENTATION fragment missing: %s' %
|
||||
fragment
|
||||
)
|
||||
except Exception as e:
|
||||
self.traces.append(e)
|
||||
self.errors.append('Unknown DOCUMENTATION error, see '
|
||||
'TRACE')
|
||||
|
||||
self._validate_docs_schema(doc)
|
||||
self._check_version_added(doc)
|
||||
self._check_for_new_args(doc)
|
||||
self._validate_docs_schema(doc, doc_schema, 'DOCUMENTATION')
|
||||
self._check_version_added(doc)
|
||||
self._check_for_new_args(doc)
|
||||
|
||||
if not bool(doc_info['EXAMPLES']['value']):
|
||||
self.errors.append('No EXAMPLES provided')
|
||||
|
@ -446,25 +472,34 @@ class ModuleValidator(Validator):
|
|||
else:
|
||||
self.warnings.append('No RETURN provided')
|
||||
else:
|
||||
try:
|
||||
yaml.safe_load(doc_info['RETURN']['value'])
|
||||
except yaml.MarkedYAMLError as e:
|
||||
e.problem_mark.line += (
|
||||
doc_info['RETURN']['lineno'] - 1
|
||||
_, errors, traces = parse_yaml(doc_info['RETURN']['value'],
|
||||
doc_info['RETURN']['lineno'],
|
||||
self.name, 'RETURN')
|
||||
self.errors.extend(errors)
|
||||
self.traces.extend(traces)
|
||||
|
||||
if not bool(doc_info['ANSIBLE_METADATA']['value']):
|
||||
self.errors.append('No ANSIBLE_METADATA provided')
|
||||
else:
|
||||
metadata = None
|
||||
if isinstance(doc_info['ANSIBLE_METADATA']['value'], ast.Dict):
|
||||
metadata = ast.literal_eval(
|
||||
doc_info['ANSIBLE_METADATA']['value']
|
||||
)
|
||||
e.problem_mark.name = '%s.RETURN' % self.name
|
||||
self.errors.append('RETURN is not valid YAML. Line %d '
|
||||
'column %d' %
|
||||
(e.problem_mark.line + 1,
|
||||
e.problem_mark.column + 1))
|
||||
self.traces.append(e)
|
||||
except yaml.reader.ReaderError as e:
|
||||
self.traces.append(e)
|
||||
self.errors.append('RETURN is not valid YAML. Character 0x%x at position %d.' %
|
||||
(e.character, e.position))
|
||||
except yaml.YAMLError as e:
|
||||
self.traces.append(e)
|
||||
self.errors.append('RETURN is not valid YAML: %s: %s' % (type(e), e))
|
||||
else:
|
||||
metadata, errors, traces = parse_yaml(
|
||||
doc_info['ANSIBLE_METADATA']['value'].s,
|
||||
doc_info['ANSIBLE_METADATA']['lineno'],
|
||||
self.name, 'ANSIBLE_METADATA'
|
||||
)
|
||||
self.errors.extend(errors)
|
||||
self.traces.extend(traces)
|
||||
|
||||
if metadata:
|
||||
self._validate_docs_schema(metadata, metadata_schema,
|
||||
'ANSIBLE_METADATA')
|
||||
|
||||
return doc_info
|
||||
|
||||
def _check_version_added(self, doc):
|
||||
if not self._is_new_module():
|
||||
|
@ -587,7 +622,7 @@ class ModuleValidator(Validator):
|
|||
return
|
||||
|
||||
if self._python_module():
|
||||
self._validate_docs()
|
||||
doc_info = self._validate_docs()
|
||||
|
||||
if self._python_module() and not self._just_docs():
|
||||
self._validate_argument_spec()
|
||||
|
@ -597,6 +632,7 @@ class ModuleValidator(Validator):
|
|||
self._find_module_utils(main)
|
||||
self._find_has_import()
|
||||
self._check_for_tabs()
|
||||
self._ensure_imports_below_docs(doc_info)
|
||||
|
||||
if self._powershell_module():
|
||||
self._find_ps_replacers()
|
||||
|
@ -605,7 +641,9 @@ class ModuleValidator(Validator):
|
|||
self._check_for_gpl3_header()
|
||||
if not self._just_docs():
|
||||
self._check_interpreter(powershell=self._powershell_module())
|
||||
self._check_type_instead_of_isinstance(powershell=self._powershell_module())
|
||||
self._check_type_instead_of_isinstance(
|
||||
powershell=self._powershell_module()
|
||||
)
|
||||
|
||||
|
||||
class PythonPackageValidator(Validator):
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue