fix a-doc listing plugins and add tests (#68600)
* add docs listing tests * added collection module docs test * always safe_load * force 'type consistency' for uniquing paths * bytified * use our json encoder Co-Authored-By: Matt Clay <matt@mystile.com>
This commit is contained in:
parent
a67d5dbcb7
commit
23ab8f37df
12 changed files with 311 additions and 30 deletions
|
@ -18,33 +18,39 @@ from ansible import constants as C
|
|||
from ansible import context
|
||||
from ansible.cli import CLI
|
||||
from ansible.cli.arguments import option_helpers as opt_help
|
||||
from ansible.collections.list import list_collection_dirs, get_collection_name_from_path
|
||||
from ansible.collections.list import list_collection_dirs
|
||||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||
from ansible.module_utils._text import to_native
|
||||
from ansible.module_utils._text import to_native, to_text
|
||||
from ansible.module_utils.common._collections_compat import Container, Sequence
|
||||
from ansible.module_utils.common.json import AnsibleJSONEncoder
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.parsing.metadata import extract_metadata
|
||||
from ansible.parsing.plugin_docs import read_docstub
|
||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||
from ansible.plugins.loader import action_loader, fragment_loader
|
||||
from ansible.utils.collection_loader import set_collection_playbook_paths
|
||||
from ansible.utils.collection_loader import set_collection_playbook_paths, get_collection_name_from_path
|
||||
from ansible.utils.display import Display
|
||||
from ansible.utils.plugin_docs import BLACKLIST, get_docstring, get_versioned_doclink
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
def jdump(text):
|
||||
display.display(json.dumps(text, sort_keys=True, indent=4))
|
||||
try:
|
||||
display.display(json.dumps(text, cls=AnsibleJSONEncoder, sort_keys=True, indent=4))
|
||||
except TypeError as e:
|
||||
raise AnsibleError('We could not convert all the documentation into JSON as there was a conversion issue: %s' % to_native(e))
|
||||
|
||||
|
||||
def add_collection_plugins(plugin_list, plugin_type, coll_filter=None):
|
||||
|
||||
# TODO: take into account routing.yml once implemented
|
||||
colldirs = list_collection_dirs(coll_filter=coll_filter)
|
||||
for path in colldirs:
|
||||
collname = get_collection_name_from_path(path)
|
||||
b_colldirs = list_collection_dirs(coll_filter=coll_filter)
|
||||
for b_path in b_colldirs:
|
||||
path = to_text(b_path, errors='surrogate_or_strict')
|
||||
collname = get_collection_name_from_path(b_path)
|
||||
ptype = C.COLLECTION_PTYPE_COMPAT.get(plugin_type, plugin_type)
|
||||
plugin_list.update(DocCLI.find_plugins(os.path.join(path, 'plugins', ptype), plugin_type, collname))
|
||||
plugin_list.update(DocCLI.find_plugins(os.path.join(path, 'plugins', ptype), plugin_type, collection=collname))
|
||||
|
||||
|
||||
class RemovedPlugin(Exception):
|
||||
|
@ -235,7 +241,7 @@ class DocCLI(CLI):
|
|||
# Some changes to how json docs are formatted
|
||||
for plugin, doc_data in plugin_docs.items():
|
||||
try:
|
||||
doc_data['return'] = yaml.load(doc_data['return'])
|
||||
doc_data['return'] = yaml.safe_load(doc_data['return'])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
@ -474,6 +480,7 @@ class DocCLI(CLI):
|
|||
# Uses a list to get the order right
|
||||
ret = []
|
||||
for i in finder._get_paths(subdirs=False):
|
||||
i = to_text(i, errors='surrogate_or_strict')
|
||||
if i not in ret:
|
||||
ret.append(i)
|
||||
return os.pathsep.join(ret)
|
||||
|
|
|
@ -6,8 +6,9 @@ __metaclass__ = type
|
|||
|
||||
import os
|
||||
|
||||
from ansible.module_utils._text import to_bytes
|
||||
|
||||
FLAG_FILES = frozenset(['MANIFEST.json', 'galaxy.yml'])
|
||||
B_FLAG_FILES = frozenset([b'MANIFEST.json', b'galaxy.yml'])
|
||||
|
||||
|
||||
def is_collection_path(path):
|
||||
|
@ -18,9 +19,10 @@ def is_collection_path(path):
|
|||
"""
|
||||
|
||||
is_coll = False
|
||||
if os.path.isdir(path):
|
||||
for flag in FLAG_FILES:
|
||||
if os.path.exists(os.path.join(path, flag)):
|
||||
b_path = to_bytes(path)
|
||||
if os.path.isdir(b_path):
|
||||
for b_flag in B_FLAG_FILES:
|
||||
if os.path.exists(os.path.join(b_path, b_flag)):
|
||||
is_coll = True
|
||||
break
|
||||
|
||||
|
|
|
@ -9,7 +9,8 @@ import os
|
|||
from collections import defaultdict
|
||||
|
||||
from ansible.collections import is_collection_path
|
||||
from ansible.utils.collection_loader import AnsibleCollectionLoader, get_collection_name_from_path
|
||||
from ansible.module_utils._text import to_bytes
|
||||
from ansible.utils.collection_loader import AnsibleCollectionLoader
|
||||
from ansible.utils.display import Display
|
||||
|
||||
display = Display()
|
||||
|
@ -24,17 +25,20 @@ def list_valid_collection_paths(search_paths=None, warn=False):
|
|||
"""
|
||||
|
||||
if search_paths is None:
|
||||
search_paths = AnsibleCollectionLoader().n_collection_paths
|
||||
search_paths = []
|
||||
|
||||
search_paths.extend(AnsibleCollectionLoader().n_collection_paths)
|
||||
|
||||
for path in search_paths:
|
||||
|
||||
if not os.path.exists(path):
|
||||
b_path = to_bytes(path)
|
||||
if not os.path.exists(b_path):
|
||||
# warn for missing, but not if default
|
||||
if warn:
|
||||
display.warning("The configured collection path {0} does not exist.".format(path))
|
||||
continue
|
||||
|
||||
if not os.path.isdir(path):
|
||||
if not os.path.isdir(b_path):
|
||||
if warn:
|
||||
display.warning("The configured collection path {0}, exists, but it is not a directory.".format(path))
|
||||
continue
|
||||
|
@ -53,14 +57,14 @@ def list_collection_dirs(search_paths=None, coll_filter=None):
|
|||
collections = defaultdict(dict)
|
||||
for path in list_valid_collection_paths(search_paths):
|
||||
|
||||
if os.path.isdir(path):
|
||||
coll_root = os.path.join(path, 'ansible_collections')
|
||||
|
||||
if os.path.exists(coll_root) and os.path.isdir(coll_root):
|
||||
b_path = to_bytes(path)
|
||||
if os.path.isdir(b_path):
|
||||
b_coll_root = to_bytes(os.path.join(path, 'ansible_collections'))
|
||||
|
||||
if os.path.exists(b_coll_root) and os.path.isdir(b_coll_root):
|
||||
coll = None
|
||||
if coll_filter is None:
|
||||
namespaces = os.listdir(coll_root)
|
||||
namespaces = os.listdir(b_coll_root)
|
||||
else:
|
||||
if '.' in coll_filter:
|
||||
(nsp, coll) = coll_filter.split('.')
|
||||
|
@ -69,12 +73,12 @@ def list_collection_dirs(search_paths=None, coll_filter=None):
|
|||
namespaces = [nsp]
|
||||
|
||||
for ns in namespaces:
|
||||
namespace_dir = os.path.join(coll_root, ns)
|
||||
b_namespace_dir = os.path.join(b_coll_root, to_bytes(ns))
|
||||
|
||||
if os.path.isdir(namespace_dir):
|
||||
if os.path.isdir(b_namespace_dir):
|
||||
|
||||
if coll is None:
|
||||
colls = os.listdir(namespace_dir)
|
||||
colls = os.listdir(b_namespace_dir)
|
||||
else:
|
||||
colls = [coll]
|
||||
|
||||
|
@ -82,8 +86,8 @@ def list_collection_dirs(search_paths=None, coll_filter=None):
|
|||
|
||||
# skip dupe collections as they will be masked in execution
|
||||
if collection not in collections[ns]:
|
||||
coll_dir = os.path.join(namespace_dir, collection)
|
||||
if is_collection_path(coll_dir):
|
||||
cpath = os.path.join(namespace_dir, collection)
|
||||
collections[ns][collection] = cpath
|
||||
yield cpath
|
||||
b_coll = to_bytes(collection)
|
||||
b_coll_dir = os.path.join(b_namespace_dir, b_coll)
|
||||
if is_collection_path(b_coll_dir):
|
||||
collections[ns][collection] = b_coll_dir
|
||||
yield b_coll_dir
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
{
|
||||
"collection_info": {
|
||||
"description": null,
|
||||
"repository": "",
|
||||
"tags": [],
|
||||
"dependencies": {},
|
||||
"authors": [
|
||||
"Ansible (https://ansible.com)"
|
||||
],
|
||||
"issues": "",
|
||||
"name": "testcol",
|
||||
"license": [
|
||||
"GPL-3.0-or-later"
|
||||
],
|
||||
"documentation": "",
|
||||
"namespace": "testns",
|
||||
"version": "0.1.1231",
|
||||
"readme": "README.md",
|
||||
"license_file": "COPYING",
|
||||
"homepage": "",
|
||||
},
|
||||
"file_manifest_file": {
|
||||
"format": 1,
|
||||
"ftype": "file",
|
||||
"chksum_sha256": "4c15a867ceba8ba1eaf2f4a58844bb5dbb82fec00645fc7eb74a3d31964900f6",
|
||||
"name": "FILES.json",
|
||||
"chksum_type": "sha256"
|
||||
},
|
||||
"format": 1
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
# (c) 2020 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
cache: testns.testcol.notjsonfile
|
||||
short_description: JSON formatted files.
|
||||
description:
|
||||
- This cache uses JSON formatted, per host, files saved to the filesystem.
|
||||
author: Ansible Core (@ansible-core)
|
||||
options:
|
||||
_uri:
|
||||
required: True
|
||||
description:
|
||||
- Path in which the cache plugin will save the JSON files
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
|
||||
ini:
|
||||
- key: fact_caching_connection
|
||||
section: defaults
|
||||
_prefix:
|
||||
description: User defined prefix to use when creating the JSON files
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
|
||||
ini:
|
||||
- key: fact_caching_prefix
|
||||
section: defaults
|
||||
_timeout:
|
||||
default: 86400
|
||||
description: Expiration timeout for the cache plugin data
|
||||
env:
|
||||
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
|
||||
ini:
|
||||
- key: fact_caching_timeout
|
||||
section: defaults
|
||||
type: integer
|
||||
'''
|
||||
|
||||
from ansible.plugins.cache import BaseFileCacheModule
|
||||
|
||||
|
||||
class CacheModule(BaseFileCacheModule):
|
||||
"""
|
||||
A caching module backed by json files.
|
||||
"""
|
||||
pass
|
|
@ -0,0 +1,35 @@
|
|||
# Copyright (c) 2018 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
inventory: testns.testcol.statichost
|
||||
short_description: Add a single host
|
||||
description: Add a single host
|
||||
extends_documentation_fragment:
|
||||
- inventory_cache
|
||||
options:
|
||||
plugin:
|
||||
description: plugin name (must be statichost)
|
||||
required: true
|
||||
hostname:
|
||||
description: Toggle display of stderr even when script was successful
|
||||
required: True
|
||||
'''
|
||||
|
||||
from ansible.errors import AnsibleParserError
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable
|
||||
|
||||
|
||||
class InventoryModule(BaseInventoryPlugin, Cacheable):
|
||||
|
||||
NAME = 'testns.content_adj.statichost'
|
||||
|
||||
def verify_file(self, path):
|
||||
pass
|
||||
|
||||
def parse(self, inventory, loader, path, cache=None):
|
||||
|
||||
pass
|
|
@ -0,0 +1,37 @@
|
|||
# (c) 2020 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = """
|
||||
lookup: testns.testcol.noop
|
||||
author: Ansible core team
|
||||
short_description: returns input
|
||||
description:
|
||||
- this is a noop
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
- name: do nothing
|
||||
debug: msg="{{ lookup('testns.testcol.noop', [1,2,3,4] }}"
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
_list:
|
||||
description: input given
|
||||
"""
|
||||
|
||||
from ansible.module_utils.common._collections_compat import Sequence
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.errors import AnsibleError
|
||||
|
||||
|
||||
class LookupModule(LookupBase):
|
||||
|
||||
def run(self, terms, **kwargs):
|
||||
if not isinstance(terms, Sequence):
|
||||
raise AnsibleError("testns.testcol.noop expects a list")
|
||||
return terms
|
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/python
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
DOCUMENTATION = """
|
||||
module: fakemodule
|
||||
short_desciptoin: fake module
|
||||
description:
|
||||
- this is a fake module
|
||||
options:
|
||||
_notreal:
|
||||
description: really not a real option
|
||||
author:
|
||||
- me
|
||||
"""
|
||||
|
||||
import json
|
||||
|
||||
|
||||
def main():
|
||||
print(json.dumps(dict(changed=False, source='testns.testcol.fakemodule')))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,13 @@
|
|||
#!/usr/bin/python
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
|
||||
|
||||
def main():
|
||||
print(json.dumps(dict(changed=False, source='testns.testcol.notrealmodule')))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,27 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
DOCUMENTATION = '''
|
||||
vars: noop_vars_plugin
|
||||
short_description: Do NOT load host and group vars
|
||||
description: don't test loading host and group vars from a collection
|
||||
options:
|
||||
stage:
|
||||
default: all
|
||||
choices: ['all', 'inventory', 'task']
|
||||
type: str
|
||||
ini:
|
||||
- key: stage
|
||||
section: testns.testcol.noop_vars_plugin
|
||||
env:
|
||||
- name: ANSIBLE_VARS_PLUGIN_STAGE
|
||||
'''
|
||||
|
||||
from ansible.plugins.vars import BaseVarsPlugin
|
||||
|
||||
|
||||
class VarsModule(BaseVarsPlugin):
|
||||
|
||||
def get_vars(self, loader, path, entities, cache=True):
|
||||
super(VarsModule, self).get_vars(loader, path, entities)
|
||||
return {'collection': 'yes', 'notreal': 'value'}
|
20
test/integration/targets/ansible-doc/fakemodule.output
Normal file
20
test/integration/targets/ansible-doc/fakemodule.output
Normal file
|
@ -0,0 +1,20 @@
|
|||
> FAKEMODULE (./collections/ansible_collections/testns/testcol/plugins/modules/fakemodule.py)
|
||||
|
||||
this is a fake module
|
||||
|
||||
* This module is maintained by The Ansible Community
|
||||
OPTIONS (= is mandatory):
|
||||
|
||||
- _notreal
|
||||
really not a real option
|
||||
[Default: (null)]
|
||||
|
||||
|
||||
AUTHOR: me
|
||||
METADATA:
|
||||
status:
|
||||
- preview
|
||||
supported_by: community
|
||||
|
||||
SHORT_DESCIPTOIN: fake module
|
||||
|
|
@ -2,3 +2,34 @@
|
|||
|
||||
set -eux
|
||||
ansible-playbook test.yml -i inventory "$@"
|
||||
|
||||
(
|
||||
unset ANSIBLE_PLAYBOOK_DIR
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# test module docs from collection
|
||||
current_out="$(ansible-doc --playbook-dir ./ testns.testcol.fakemodule)"
|
||||
expected_out="$(cat fakemodule.output)"
|
||||
test "$current_out" == "$expected_out"
|
||||
|
||||
# test listing diff plugin types from collection
|
||||
for ptype in cache inventory lookup vars
|
||||
do
|
||||
# each plugin type adds 1 from collection
|
||||
pre=$(ansible-doc -l -t ${ptype}|wc -l)
|
||||
post=$(ansible-doc -l -t ${ptype} --playbook-dir ./|wc -l)
|
||||
test "$pre" -eq $((post - 1))
|
||||
|
||||
# ensure we ONLY list from the collection
|
||||
justcol=$(ansible-doc -l -t ${ptype} --playbook-dir ./ testns.testcol|wc -l)
|
||||
test "$justcol" -eq 1
|
||||
|
||||
# ensure we get 0 plugins when restricting to collection, but not supplying it
|
||||
justcol=$(ansible-doc -l -t ${ptype} testns.testcol|wc -l)
|
||||
test "$justcol" -eq 0
|
||||
|
||||
# ensure we get 1 plugins when restricting namespace
|
||||
justcol=$(ansible-doc -l -t ${ptype} --playbook-dir ./ testns|wc -l)
|
||||
test "$justcol" -eq 1
|
||||
done
|
||||
)
|
||||
|
|
Loading…
Reference in a new issue