2018-01-17 00:08:56 +01:00
|
|
|
#!/usr/bin/env python
|
|
|
|
"""Wrapper around yamllint that supports YAML embedded in Ansible modules."""
|
2019-07-12 08:46:20 +02:00
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
2018-01-17 00:08:56 +01:00
|
|
|
|
|
|
|
import ast
|
|
|
|
import json
|
|
|
|
import os
|
2020-08-12 16:17:45 +02:00
|
|
|
import re
|
2018-01-17 00:08:56 +01:00
|
|
|
import sys
|
|
|
|
|
2019-11-25 17:11:24 +01:00
|
|
|
import yaml
|
|
|
|
from yaml.resolver import Resolver
|
|
|
|
from yaml.constructor import SafeConstructor
|
|
|
|
from yaml.error import MarkedYAMLError
|
|
|
|
from _yaml import CParser # pylint: disable=no-name-in-module
|
|
|
|
|
2018-01-17 00:08:56 +01:00
|
|
|
from yamllint import linter
|
|
|
|
from yamllint.config import YamlLintConfig
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
|
|
|
"""Main program body."""
|
2018-02-28 00:05:39 +01:00
|
|
|
paths = sys.argv[1:] or sys.stdin.read().splitlines()
|
2018-01-17 00:08:56 +01:00
|
|
|
|
|
|
|
checker = YamlChecker()
|
|
|
|
checker.check(paths)
|
|
|
|
checker.report()
|
|
|
|
|
|
|
|
|
2019-11-25 17:11:24 +01:00
|
|
|
class TestConstructor(SafeConstructor):
|
|
|
|
"""Yaml Safe Constructor that knows about Ansible tags"""
|
|
|
|
|
2020-11-10 16:48:20 +01:00
|
|
|
def construct_yaml_unsafe(self, node):
|
|
|
|
try:
|
|
|
|
constructor = getattr(node, 'id', 'object')
|
|
|
|
if constructor is not None:
|
|
|
|
constructor = getattr(self, 'construct_%s' % constructor)
|
|
|
|
except AttributeError:
|
|
|
|
constructor = self.construct_object
|
|
|
|
|
|
|
|
value = constructor(node)
|
|
|
|
|
|
|
|
return value
|
|
|
|
|
2019-11-25 17:11:24 +01:00
|
|
|
|
|
|
|
TestConstructor.add_constructor(
|
|
|
|
u'!unsafe',
|
2020-11-10 16:48:20 +01:00
|
|
|
TestConstructor.construct_yaml_unsafe)
|
2019-11-25 17:11:24 +01:00
|
|
|
|
|
|
|
|
|
|
|
TestConstructor.add_constructor(
|
|
|
|
u'!vault',
|
|
|
|
TestConstructor.construct_yaml_str)
|
|
|
|
|
|
|
|
|
|
|
|
TestConstructor.add_constructor(
|
|
|
|
u'!vault-encrypted',
|
|
|
|
TestConstructor.construct_yaml_str)
|
|
|
|
|
|
|
|
|
|
|
|
class TestLoader(CParser, TestConstructor, Resolver):
|
|
|
|
def __init__(self, stream):
|
|
|
|
CParser.__init__(self, stream)
|
|
|
|
TestConstructor.__init__(self)
|
|
|
|
Resolver.__init__(self)
|
|
|
|
|
|
|
|
|
2019-07-12 22:17:20 +02:00
|
|
|
class YamlChecker:
|
2018-01-17 00:08:56 +01:00
|
|
|
"""Wrapper around yamllint that supports YAML embedded in Ansible modules."""
|
|
|
|
def __init__(self):
|
|
|
|
self.messages = []
|
|
|
|
|
|
|
|
def report(self):
|
|
|
|
"""Print yamllint report to stdout."""
|
|
|
|
report = dict(
|
|
|
|
messages=self.messages,
|
|
|
|
)
|
|
|
|
|
|
|
|
print(json.dumps(report, indent=4, sort_keys=True))
|
|
|
|
|
|
|
|
def check(self, paths):
|
|
|
|
"""
|
2021-01-26 23:02:08 +01:00
|
|
|
:type paths: t.List[str]
|
2018-01-17 00:08:56 +01:00
|
|
|
"""
|
2019-07-10 02:31:04 +02:00
|
|
|
config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'config')
|
|
|
|
|
|
|
|
yaml_conf = YamlLintConfig(file=os.path.join(config_path, 'default.yml'))
|
|
|
|
module_conf = YamlLintConfig(file=os.path.join(config_path, 'modules.yml'))
|
|
|
|
plugin_conf = YamlLintConfig(file=os.path.join(config_path, 'plugins.yml'))
|
2018-01-17 00:08:56 +01:00
|
|
|
|
|
|
|
for path in paths:
|
|
|
|
extension = os.path.splitext(path)[1]
|
|
|
|
|
|
|
|
with open(path) as f:
|
|
|
|
contents = f.read()
|
|
|
|
|
|
|
|
if extension in ('.yml', '.yaml'):
|
|
|
|
self.check_yaml(yaml_conf, path, contents)
|
|
|
|
elif extension == '.py':
|
2019-07-23 04:24:48 +02:00
|
|
|
if path.startswith('lib/ansible/modules/') or path.startswith('plugins/modules/'):
|
2018-05-23 14:49:30 +02:00
|
|
|
conf = module_conf
|
2019-07-23 04:24:48 +02:00
|
|
|
else:
|
|
|
|
conf = plugin_conf
|
2018-05-23 14:49:30 +02:00
|
|
|
|
|
|
|
self.check_module(conf, path, contents)
|
2018-01-17 00:08:56 +01:00
|
|
|
else:
|
|
|
|
raise Exception('unsupported extension: %s' % extension)
|
|
|
|
|
|
|
|
def check_yaml(self, conf, path, contents):
|
|
|
|
"""
|
|
|
|
:type conf: YamlLintConfig
|
|
|
|
:type path: str
|
|
|
|
:type contents: str
|
|
|
|
"""
|
2019-11-25 17:11:24 +01:00
|
|
|
self.check_parsable(path, contents)
|
2018-01-17 00:08:56 +01:00
|
|
|
self.messages += [self.result_to_message(r, path) for r in linter.run(contents, conf, path)]
|
|
|
|
|
|
|
|
def check_module(self, conf, path, contents):
|
|
|
|
"""
|
|
|
|
:type conf: YamlLintConfig
|
|
|
|
:type path: str
|
|
|
|
:type contents: str
|
|
|
|
"""
|
|
|
|
docs = self.get_module_docs(path, contents)
|
|
|
|
|
|
|
|
for key, value in docs.items():
|
2019-11-25 17:11:24 +01:00
|
|
|
yaml_data = value['yaml']
|
2018-01-17 00:08:56 +01:00
|
|
|
lineno = value['lineno']
|
2020-08-12 16:17:45 +02:00
|
|
|
fmt = value['fmt']
|
|
|
|
|
|
|
|
if fmt != 'yaml':
|
|
|
|
continue
|
2018-01-17 00:08:56 +01:00
|
|
|
|
2019-11-25 17:11:24 +01:00
|
|
|
if yaml_data.startswith('\n'):
|
|
|
|
yaml_data = yaml_data[1:]
|
2018-01-17 00:08:56 +01:00
|
|
|
lineno += 1
|
|
|
|
|
2020-06-18 18:27:05 +02:00
|
|
|
self.check_parsable(path, yaml_data, lineno)
|
2019-11-25 17:11:24 +01:00
|
|
|
|
|
|
|
messages = list(linter.run(yaml_data, conf, path))
|
2018-01-17 00:08:56 +01:00
|
|
|
|
|
|
|
self.messages += [self.result_to_message(r, path, lineno - 1, key) for r in messages]
|
|
|
|
|
2020-06-18 18:27:05 +02:00
|
|
|
def check_parsable(self, path, contents, lineno=1):
|
2019-11-25 17:11:24 +01:00
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
:type contents: str
|
2020-06-18 18:27:05 +02:00
|
|
|
:type lineno: int
|
2019-11-25 17:11:24 +01:00
|
|
|
"""
|
|
|
|
try:
|
|
|
|
yaml.load(contents, Loader=TestLoader)
|
|
|
|
except MarkedYAMLError as e:
|
|
|
|
self.messages += [{'code': 'unparsable-with-libyaml',
|
|
|
|
'message': '%s - %s' % (e.args[0], e.args[2]),
|
|
|
|
'path': path,
|
2020-06-18 18:27:05 +02:00
|
|
|
'line': e.problem_mark.line + lineno,
|
2019-11-25 17:11:24 +01:00
|
|
|
'column': e.problem_mark.column + 1,
|
|
|
|
'level': 'error',
|
|
|
|
}]
|
|
|
|
|
2018-01-17 00:08:56 +01:00
|
|
|
@staticmethod
|
|
|
|
def result_to_message(result, path, line_offset=0, prefix=''):
|
|
|
|
"""
|
|
|
|
:type result: any
|
|
|
|
:type path: str
|
|
|
|
:type line_offset: int
|
|
|
|
:type prefix: str
|
|
|
|
:rtype: dict[str, any]
|
|
|
|
"""
|
|
|
|
if prefix:
|
|
|
|
prefix = '%s: ' % prefix
|
|
|
|
|
|
|
|
return dict(
|
|
|
|
code=result.rule or result.level,
|
|
|
|
message=prefix + result.desc,
|
|
|
|
path=path,
|
|
|
|
line=result.line + line_offset,
|
|
|
|
column=result.column,
|
|
|
|
level=result.level,
|
|
|
|
)
|
|
|
|
|
|
|
|
def get_module_docs(self, path, contents):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
:type contents: str
|
|
|
|
:rtype: dict[str, any]
|
|
|
|
"""
|
|
|
|
module_doc_types = [
|
|
|
|
'DOCUMENTATION',
|
|
|
|
'EXAMPLES',
|
|
|
|
'RETURN',
|
|
|
|
]
|
|
|
|
|
|
|
|
docs = {}
|
|
|
|
|
2020-08-12 16:17:45 +02:00
|
|
|
fmt_re = re.compile(r'^# fmt:\s+(\S+)')
|
|
|
|
|
2018-01-17 00:08:56 +01:00
|
|
|
def check_assignment(statement, doc_types=None):
|
|
|
|
"""Check the given statement for a documentation assignment."""
|
|
|
|
for target in statement.targets:
|
2021-02-09 16:44:17 +01:00
|
|
|
if not isinstance(target, ast.Name):
|
2018-03-23 04:52:44 +01:00
|
|
|
continue
|
|
|
|
|
2018-01-17 00:08:56 +01:00
|
|
|
if doc_types and target.id not in doc_types:
|
|
|
|
continue
|
|
|
|
|
2020-08-12 16:17:45 +02:00
|
|
|
fmt_match = fmt_re.match(statement.value.s.lstrip())
|
|
|
|
fmt = 'yaml'
|
|
|
|
if fmt_match:
|
|
|
|
fmt = fmt_match.group(1)
|
|
|
|
|
2018-01-17 00:08:56 +01:00
|
|
|
docs[target.id] = dict(
|
|
|
|
yaml=statement.value.s,
|
|
|
|
lineno=statement.lineno,
|
2020-08-12 16:17:45 +02:00
|
|
|
end_lineno=statement.lineno + len(statement.value.s.splitlines()),
|
|
|
|
fmt=fmt.lower(),
|
2018-01-17 00:08:56 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
module_ast = self.parse_module(path, contents)
|
|
|
|
|
|
|
|
if not module_ast:
|
|
|
|
return {}
|
|
|
|
|
2019-07-23 04:24:48 +02:00
|
|
|
is_plugin = path.startswith('lib/ansible/modules/') or path.startswith('lib/ansible/plugins/') or path.startswith('plugins/')
|
|
|
|
is_doc_fragment = path.startswith('lib/ansible/plugins/doc_fragments/') or path.startswith('plugins/doc_fragments/')
|
|
|
|
|
|
|
|
if is_plugin and not is_doc_fragment:
|
2018-01-17 00:08:56 +01:00
|
|
|
for body_statement in module_ast.body:
|
|
|
|
if isinstance(body_statement, ast.Assign):
|
|
|
|
check_assignment(body_statement, module_doc_types)
|
2019-07-23 04:24:48 +02:00
|
|
|
elif is_doc_fragment:
|
2018-01-17 00:08:56 +01:00
|
|
|
for body_statement in module_ast.body:
|
|
|
|
if isinstance(body_statement, ast.ClassDef):
|
|
|
|
for class_statement in body_statement.body:
|
|
|
|
if isinstance(class_statement, ast.Assign):
|
|
|
|
check_assignment(class_statement)
|
|
|
|
else:
|
|
|
|
raise Exception('unsupported path: %s' % path)
|
|
|
|
|
|
|
|
return docs
|
|
|
|
|
|
|
|
def parse_module(self, path, contents):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
:type contents: str
|
|
|
|
:rtype: ast.Module | None
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
return ast.parse(contents)
|
|
|
|
except SyntaxError as ex:
|
|
|
|
self.messages.append(dict(
|
|
|
|
code='python-syntax-error',
|
|
|
|
message=str(ex),
|
|
|
|
path=path,
|
|
|
|
line=ex.lineno,
|
|
|
|
column=ex.offset,
|
|
|
|
level='error',
|
|
|
|
))
|
2019-07-12 22:17:20 +02:00
|
|
|
except Exception as ex: # pylint: disable=broad-except
|
2018-01-17 00:08:56 +01:00
|
|
|
self.messages.append(dict(
|
|
|
|
code='python-parse-error',
|
|
|
|
message=str(ex),
|
|
|
|
path=path,
|
|
|
|
line=0,
|
|
|
|
column=0,
|
|
|
|
level='error',
|
|
|
|
))
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|