validate-modules: support collections (#60247)

* Start of work to support collections

* remove version_added from base schema

* If a collection, pass that to validate-modules

* clean ups

* Allow version_added in a collection, just make it optional

* Don't traceback on missing doc_fragment

* Don't validate metadata in a collection
This commit is contained in:
Matt Martz 2019-08-16 14:28:34 -05:00 committed by GitHub
parent 7f280434de
commit def3d1f815
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 96 additions and 53 deletions

View file

@ -236,7 +236,7 @@ class ModuleValidator(Validator):
WHITELIST_FUTURE_IMPORTS = frozenset(('absolute_import', 'division', 'print_function'))
def __init__(self, path, analyze_arg_spec=False, base_branch=None, git_cache=None, reporter=None):
def __init__(self, path, analyze_arg_spec=False, collection=None, base_branch=None, git_cache=None, reporter=None):
super(ModuleValidator, self).__init__(reporter=reporter or Reporter())
self.path = path
@ -245,6 +245,8 @@ class ModuleValidator(Validator):
self.analyze_arg_spec = analyze_arg_spec
self.collection = collection
self.base_branch = base_branch
self.git_cache = git_cache or GitCache()
@ -283,6 +285,11 @@ class ModuleValidator(Validator):
def object_path(self):
return self.path
def _get_collection_meta(self):
"""Implement if we need this for version_added comparisons
"""
pass
def _python_module(self):
if self.path.endswith('.py') or self._python_module_override:
return True
@ -886,44 +893,45 @@ class ModuleValidator(Validator):
# Have to check the metadata first so that we know if the module is removed or deprecated
metadata = None
if not bool(doc_info['ANSIBLE_METADATA']['value']):
self.reporter.error(
path=self.object_path,
code=314,
msg='No ANSIBLE_METADATA provided'
)
else:
if isinstance(doc_info['ANSIBLE_METADATA']['value'], ast.Dict):
metadata = ast.literal_eval(
doc_info['ANSIBLE_METADATA']['value']
)
else:
# ANSIBLE_METADATA doesn't properly support YAML
# we should consider removing it from the spec
# Below code kept, incase we change our minds
# metadata, errors, traces = parse_yaml(
# doc_info['ANSIBLE_METADATA']['value'].s,
# doc_info['ANSIBLE_METADATA']['lineno'],
# self.name, 'ANSIBLE_METADATA'
# )
# for error in errors:
# self.reporter.error(
# path=self.object_path,
# code=315,
# **error
# )
# for trace in traces:
# self.reporter.trace(
# path=self.object_path,
# tracebk=trace
# )
if not self.collection:
if not bool(doc_info['ANSIBLE_METADATA']['value']):
self.reporter.error(
path=self.object_path,
code=315,
msg='ANSIBLE_METADATA was not provided as a dict, YAML not supported'
code=314,
msg='No ANSIBLE_METADATA provided'
)
else:
if isinstance(doc_info['ANSIBLE_METADATA']['value'], ast.Dict):
metadata = ast.literal_eval(
doc_info['ANSIBLE_METADATA']['value']
)
else:
# ANSIBLE_METADATA doesn't properly support YAML
# we should consider removing it from the spec
# Below code kept, incase we change our minds
# metadata, errors, traces = parse_yaml(
# doc_info['ANSIBLE_METADATA']['value'].s,
# doc_info['ANSIBLE_METADATA']['lineno'],
# self.name, 'ANSIBLE_METADATA'
# )
# for error in errors:
# self.reporter.error(
# path=self.object_path,
# code=315,
# **error
# )
# for trace in traces:
# self.reporter.trace(
# path=self.object_path,
# tracebk=trace
# )
self.reporter.error(
path=self.object_path,
code=315,
msg='ANSIBLE_METADATA was not provided as a dict, YAML not supported'
)
if metadata:
self._validate_docs_schema(metadata, metadata_1_1_schema(),
@ -968,6 +976,7 @@ class ModuleValidator(Validator):
tracebk=trace
)
if not errors and not traces:
missing_fragment = False
with CaptureStd():
try:
get_docstring(self.path, fragment_loader, verbose=True)
@ -978,6 +987,7 @@ class ModuleValidator(Validator):
code=303,
msg='DOCUMENTATION fragment missing: %s' % fragment
)
missing_fragment = True
except Exception as e:
self.reporter.trace(
path=self.object_path,
@ -989,7 +999,8 @@ class ModuleValidator(Validator):
msg='Unknown DOCUMENTATION error, see TRACE: %s' % e
)
add_fragments(doc, self.object_path, fragment_loader=fragment_loader)
if not missing_fragment:
add_fragments(doc, self.object_path, fragment_loader=fragment_loader)
if 'options' in doc and doc['options'] is None:
self.reporter.error(
@ -1006,13 +1017,30 @@ class ModuleValidator(Validator):
if os.path.islink(self.object_path):
# This module has an alias, which we can tell as it's a symlink
# Rather than checking for `module: $filename` we need to check against the true filename
self._validate_docs_schema(doc, doc_schema(os.readlink(self.object_path).split('.')[0]), 'DOCUMENTATION', 305)
self._validate_docs_schema(
doc,
doc_schema(
os.readlink(self.object_path).split('.')[0],
version_added=not bool(self.collection)
),
'DOCUMENTATION',
305
)
else:
# This is the normal case
self._validate_docs_schema(doc, doc_schema(self.object_name.split('.')[0]), 'DOCUMENTATION', 305)
self._validate_docs_schema(
doc,
doc_schema(
self.object_name.split('.')[0],
version_added=not bool(self.collection)
),
'DOCUMENTATION',
305
)
existing_doc = self._check_for_new_args(doc, metadata)
self._check_version_added(doc, existing_doc)
if not self.collection:
existing_doc = self._check_for_new_args(doc, metadata)
self._check_version_added(doc, existing_doc)
if not bool(doc_info['EXAMPLES']['value']):
self.reporter.error(
@ -1723,6 +1751,11 @@ def run():
parser.add_argument('--output', default='-',
help='Output location, use "-" for stdout. '
'Default "%(default)s"')
parser.add_argument('--collection',
help='Specifies the path to the collection, when '
'validating files within a collection. Ensure '
'that ANSIBLE_COLLECTIONS_PATHS is set so the '
'contents of the collection can be located')
args = parser.parse_args()
@ -1740,7 +1773,7 @@ def run():
continue
if ModuleValidator.is_blacklisted(path):
continue
with ModuleValidator(path, analyze_arg_spec=args.arg_spec,
with ModuleValidator(path, collection=args.collection, analyze_arg_spec=args.arg_spec,
base_branch=args.base_branch, git_cache=git_cache, reporter=reporter) as mv1:
mv1.validate()
check_dirs.add(os.path.dirname(path))
@ -1763,13 +1796,14 @@ def run():
continue
if ModuleValidator.is_blacklisted(path):
continue
with ModuleValidator(path, analyze_arg_spec=args.arg_spec,
with ModuleValidator(path, collection=args.collection, analyze_arg_spec=args.arg_spec,
base_branch=args.base_branch, git_cache=git_cache, reporter=reporter) as mv2:
mv2.validate()
for path in sorted(check_dirs):
pv = PythonPackageValidator(path, reporter=reporter)
pv.validate()
if not args.collection:
for path in sorted(check_dirs):
pv = PythonPackageValidator(path, reporter=reporter)
pv.validate()
if args.format == 'plain':
sys.exit(reporter.plain(warnings=args.warnings, output=args.output))

View file

@ -28,7 +28,7 @@ from contextlib import contextmanager
from ansible.module_utils.six import reraise
from .utils import find_executable
from .utils import CaptureStd, find_executable
class AnsibleModuleCallError(RuntimeError):
@ -110,9 +110,10 @@ def get_py_argument_spec(filename):
# We use ``module`` here instead of ``__main__``
# which helps with some import issues in this tool
# where modules may import things that conflict
mod = imp.load_source('module', filename)
if not fake.called:
mod.main()
with CaptureStd():
mod = imp.load_source('module', filename)
if not fake.called:
mod.main()
except AnsibleModuleCallError:
pass
except Exception as e:
@ -124,7 +125,7 @@ def get_py_argument_spec(filename):
return fake.kwargs['argument_spec'], fake.args, fake.kwargs
except KeyError:
return fake.args[0], fake.args, fake.kwargs
except TypeError:
except (TypeError, IndexError):
return {}, (), {}

View file

@ -177,7 +177,7 @@ def author(value):
raise Invalid("Invalid author")
def doc_schema(module_name):
def doc_schema(module_name, version_added=True):
deprecated_module = False
if module_name.startswith('_'):
@ -187,7 +187,6 @@ def doc_schema(module_name):
Required('module'): module_name,
Required('short_description'): Any(*string_types),
Required('description'): Any(list_string_types, *string_types),
Required('version_added'): Any(float, *string_types),
Required('author'): All(Any(None, list_string_types, *string_types), author),
'notes': Any(None, list_string_types),
'seealso': Any(None, seealso_schema),
@ -197,6 +196,12 @@ def doc_schema(module_name):
'extends_documentation_fragment': Any(list_string_types, *string_types)
}
if version_added:
doc_schema_dict[Required('version_added')] = Any(float, *string_types)
else:
# Optional
doc_schema_dict['version_added'] = Any(float, *string_types)
if deprecated_module:
deprecation_required_scheme = {
Required('deprecated'): Any(deprecation_schema),

View file

@ -80,6 +80,9 @@ class ValidateModulesTest(SanitySingleVersion):
'--arg-spec',
] + paths
if data_context().content.collection:
cmd.extend(['--collection', data_context().content.collection.directory])
if args.base_branch:
cmd.extend([
'--base-branch', args.base_branch,