fixes to ansible-doc (#47209)

* Adoc fixes (#47137)
* removed hardcoded loader/plugins list
* updated a few errors to keep orig object
* fix httpapi/cliconf listing
* ansible-doc fixes
* show undocumented as UNDOCUMENTEd
* added missing undoc

(cherry picked from commit fce9673ac1)

* An earlier optimization of ansible-doc -l caused failures. (#47012)
The optimization quickly searches the plugin code for short_description
fields and then uses that in the -l output.  The searching was a bit too
naive and ended up pulling out malformed yaml.  This caused those
plugins to be omitted from the list of plugins of that type with
a warning that their documentation strings were wrong.

This change makes the documentation parser aware that the documentation
string could have a relative indent for all of its fields which makes it
robust in the face of this particular problem.

* Don't search for space after short_description:

Any whitespace would be valid.  In particular newline

(cherry picked from commit 61ae6424a3)
This commit is contained in:
Brian Coca 2018-10-17 12:40:02 -04:00 committed by Alicia Cozine
parent 8810e9f9e2
commit b2381cfac9
4 changed files with 45 additions and 38 deletions

View file

@ -0,0 +1,2 @@
bugfixes:
- ansible-doc, removed local hardcoded listing, now uses the 'central' list from constants and other minor issues

View file

@ -0,0 +1,3 @@
---
bugfixes:
- 'Fixed an issue with ansible-doc -l failing when parsing some plugin documentation.'

View file

@ -24,6 +24,7 @@ import traceback
import yaml
from collections import Sequence
import ansible.plugins.loader as plugin_loader
from ansible import constants as C
from ansible.cli import CLI
@ -33,8 +34,7 @@ from ansible.module_utils.six import string_types
from ansible.parsing.metadata import extract_metadata
from ansible.parsing.plugin_docs import read_docstub
from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.plugins.loader import module_loader, action_loader, lookup_loader, callback_loader, cache_loader, \
vars_loader, connection_loader, strategy_loader, inventory_loader, shell_loader, fragment_loader
from ansible.plugins.loader import action_loader, fragment_loader
from ansible.utils.plugin_docs import BLACKLIST, get_docstring
try:
@ -58,18 +58,6 @@ class DocCLI(CLI):
super(DocCLI, self).__init__(args)
self.plugin_list = set()
self.loader_map = {
'cache': cache_loader,
'callback': callback_loader,
'connection': connection_loader,
'lookup': lookup_loader,
'strategy': strategy_loader,
'vars': vars_loader,
'inventory': inventory_loader,
'shell': shell_loader,
'module': module_loader,
}
def parse(self):
self.parser = CLI.base_parser(
@ -104,7 +92,11 @@ class DocCLI(CLI):
super(DocCLI, self).run()
plugin_type = self.options.type
loader = self.loader_map.get(plugin_type, self.loader_map['module'])
if plugin_type in C.DOCUMENTABLE_PLUGINS:
loader = getattr(plugin_loader, '%s_loader' % plugin_type)
else:
raise AnsibleOptionsError("Unknown or undocumentable plugin type: %s" % plugin_type)
# add to plugin path from command line
if self.options.module_path:
@ -137,16 +129,16 @@ class DocCLI(CLI):
# process all plugins of type
if self.options.all_plugins:
self.args = self.get_all_plugins_of_type(plugin_type)
self.args = self.get_all_plugins_of_type(plugin_type, loader)
# dump plugin metadata as JSON
if self.options.json_dump:
plugin_data = {}
for plugin_type in self.loader_map.keys():
for plugin_type in C.DOCUMENTABLE_PLUGINS:
plugin_data[plugin_type] = dict()
plugin_names = self.get_all_plugins_of_type(plugin_type)
plugin_names = self.get_all_plugins_of_type(plugin_type, loader)
for plugin_name in plugin_names:
plugin_info = self.get_plugin_metadata(plugin_type, plugin_name)
plugin_info = self.get_plugin_metadata(plugin_type, plugin_name, loader)
if plugin_info is not None:
plugin_data[plugin_type][plugin_name] = plugin_info
@ -170,8 +162,7 @@ class DocCLI(CLI):
return 0
def get_all_plugins_of_type(self, plugin_type):
loader = self.loader_map[plugin_type]
def get_all_plugins_of_type(self, plugin_type, loader):
plugin_list = set()
paths = loader._get_paths()
for path in paths:
@ -179,9 +170,8 @@ class DocCLI(CLI):
plugin_list.update(plugins_to_add)
return sorted(set(plugin_list))
def get_plugin_metadata(self, plugin_type, plugin_name):
def get_plugin_metadata(self, plugin_type, plugin_name, loader):
# if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
loader = self.loader_map[plugin_type]
filename = loader.find_plugin(plugin_name, mod_type='.py', ignore_deprecated=True, check_aliases=True)
if filename is None:
raise AnsibleError("unable to load {0} plugin named {1} ".format(plugin_type, plugin_name))
@ -350,11 +340,12 @@ class DocCLI(CLI):
if not doc or not isinstance(doc, dict):
with open(filename) as f:
metadata = extract_metadata(module_data=f.read())
if 'removed' not in metadata[0].get('status', []):
desc = 'UNDOCUMENTED'
display.warning("%s parsing did not produce documentation." % plugin)
else:
continue
if metadata[0]:
if 'removed' not in metadata[0].get('status', []):
display.warning("%s parsing did not produce documentation." % plugin)
else:
continue
desc = 'UNDOCUMENTED'
else:
desc = self.tty_ify(doc.get('short_description', 'INVALID SHORT DESCRIPTION').strip())
@ -366,7 +357,7 @@ class DocCLI(CLI):
else:
text.append("%-*s %-*.*s" % (displace, plugin, linelimit, len(desc), desc))
except Exception as e:
raise AnsibleError("Failed reading docs at %s: %s" % (plugin, to_native(e)))
raise AnsibleError("Failed reading docs at %s: %s" % (plugin, to_native(e)), orig_exc=e)
if len(deprecated) > 0:
text.append("\nDEPRECATED:")
@ -395,7 +386,7 @@ class DocCLI(CLI):
text.append("%-*s %-*.*s" % (displace, plugin, linelimit, len(filename), filename))
except Exception as e:
raise AnsibleError("Failed reading docs at %s: %s" % (plugin, to_native(e)))
raise AnsibleError("Failed reading docs at %s: %s" % (plugin, to_native(e)), orig_exc=e)
return "\n".join(text)

View file

@ -90,18 +90,29 @@ def read_docstub(filename):
"""
t_module_data = open(filename, 'r')
in_documentation = False
capturing = False
indent_detection = ''
doc_stub = []
for line in t_module_data:
# start capturing the stub until indentation returns
if capturing and line[0] == ' ':
doc_stub.append(line)
elif capturing and line[0] != ' ':
break
if 'short_description:' in line:
capturing = True
doc_stub.append(line)
if in_documentation:
# start capturing the stub until indentation returns
if capturing and line.startswith(indent_detection):
doc_stub.append(line)
elif capturing and not line.startswith(indent_detection):
break
elif line.lstrip().startswith('short_description:'):
capturing = True
# Detect that the short_description continues on the next line if it's indented more
# than short_description itself.
indent_detection = ' ' * (len(line) - len(line.lstrip()) + 1)
doc_stub.append(line)
elif line.startswith('DOCUMENTATION') and '=' in line:
in_documentation = True
data = AnsibleLoader(r"".join(doc_stub), file_name=filename).get_single_data()