[2.10] ansible-doc: include collection name in text output / plugin loader: return collection name; ansible-doc: handle ansible.builtin correctly (#70572)
* ansible-doc: include collection name in text output (#70401) * ansible-doc: include collection name in text output * Be more careful to not accidentally pass ansible.builtin for user-supplied modules. (cherry picked from commitf4c89eab23
) * plugin loader: return collection name; ansible-doc: handle ansible.builtin correctly (#70026) * Determine collection in plugin loader. * Fix test. * Use PluginPathContext objects in PluginLoader._plugin_path_cache instead of tuples. (cherry picked from commit24dcaf8974
)
This commit is contained in:
parent
827c47d9bc
commit
15355ed059
10 changed files with 83 additions and 53 deletions
2
changelogs/fragments/ansible-doc-collection-name.yml
Normal file
2
changelogs/fragments/ansible-doc-collection-name.yml
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
bugfixes:
|
||||||
|
- "ansible-doc - include the collection name in the text output (https://github.com/ansible/ansible/pull/70401)."
|
2
changelogs/fragments/plugin-loader-collection-name.yml
Normal file
2
changelogs/fragments/plugin-loader-collection-name.yml
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
minor_changes:
|
||||||
|
- "The plugin loader now keeps track of the collection where a plugin was resolved to, in particular whether the plugin was loaded from ansible-base's internal paths (``ansible.builtin``) or from user-supplied paths (no collection name)."
|
|
@ -55,7 +55,7 @@ def add_collection_plugins(plugin_list, plugin_type, coll_filter=None):
|
||||||
path = to_text(b_path, errors='surrogate_or_strict')
|
path = to_text(b_path, errors='surrogate_or_strict')
|
||||||
collname = _get_collection_name_from_path(b_path)
|
collname = _get_collection_name_from_path(b_path)
|
||||||
ptype = C.COLLECTION_PTYPE_COMPAT.get(plugin_type, plugin_type)
|
ptype = C.COLLECTION_PTYPE_COMPAT.get(plugin_type, plugin_type)
|
||||||
plugin_list.update(DocCLI.find_plugins(os.path.join(path, 'plugins', ptype), plugin_type, collection=collname))
|
plugin_list.update(DocCLI.find_plugins(os.path.join(path, 'plugins', ptype), False, plugin_type, collection=collname))
|
||||||
|
|
||||||
|
|
||||||
class PluginNotFound(Exception):
|
class PluginNotFound(Exception):
|
||||||
|
@ -69,7 +69,7 @@ class DocCLI(CLI):
|
||||||
and it can create a short "snippet" which can be pasted into a playbook. '''
|
and it can create a short "snippet" which can be pasted into a playbook. '''
|
||||||
|
|
||||||
# default ignore list for detailed views
|
# default ignore list for detailed views
|
||||||
IGNORE = ('module', 'docuri', 'version_added', 'short_description', 'now_date', 'plainexamples', 'returndocs')
|
IGNORE = ('module', 'docuri', 'version_added', 'short_description', 'now_date', 'plainexamples', 'returndocs', 'collection')
|
||||||
|
|
||||||
def __init__(self, args):
|
def __init__(self, args):
|
||||||
|
|
||||||
|
@ -181,9 +181,10 @@ class DocCLI(CLI):
|
||||||
coll_filter = context.CLIARGS['args'][0]
|
coll_filter = context.CLIARGS['args'][0]
|
||||||
|
|
||||||
if coll_filter in ('', None):
|
if coll_filter in ('', None):
|
||||||
paths = loader._get_paths()
|
paths = loader._get_paths_with_context()
|
||||||
for path in paths:
|
for path_context in paths:
|
||||||
self.plugin_list.update(DocCLI.find_plugins(path, plugin_type))
|
self.plugin_list.update(
|
||||||
|
DocCLI.find_plugins(path_context.path, path_context.internal, plugin_type))
|
||||||
|
|
||||||
add_collection_plugins(self.plugin_list, plugin_type, coll_filter=coll_filter)
|
add_collection_plugins(self.plugin_list, plugin_type, coll_filter=coll_filter)
|
||||||
|
|
||||||
|
@ -258,9 +259,9 @@ class DocCLI(CLI):
|
||||||
def get_all_plugins_of_type(plugin_type):
|
def get_all_plugins_of_type(plugin_type):
|
||||||
loader = getattr(plugin_loader, '%s_loader' % plugin_type)
|
loader = getattr(plugin_loader, '%s_loader' % plugin_type)
|
||||||
plugin_list = set()
|
plugin_list = set()
|
||||||
paths = loader._get_paths()
|
paths = loader._get_paths_with_context()
|
||||||
for path in paths:
|
for path_context in paths:
|
||||||
plugins_to_add = DocCLI.find_plugins(path, plugin_type)
|
plugins_to_add = DocCLI.find_plugins(path_context.path, path_context.internal, plugin_type)
|
||||||
plugin_list.update(plugins_to_add)
|
plugin_list.update(plugins_to_add)
|
||||||
return sorted(set(plugin_list))
|
return sorted(set(plugin_list))
|
||||||
|
|
||||||
|
@ -268,13 +269,11 @@ class DocCLI(CLI):
|
||||||
def get_plugin_metadata(plugin_type, plugin_name):
|
def get_plugin_metadata(plugin_type, plugin_name):
|
||||||
# if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
|
# if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
|
||||||
loader = getattr(plugin_loader, '%s_loader' % plugin_type)
|
loader = getattr(plugin_loader, '%s_loader' % plugin_type)
|
||||||
filename = loader.find_plugin(plugin_name, mod_type='.py', ignore_deprecated=True, check_aliases=True)
|
result = loader.find_plugin_with_context(plugin_name, mod_type='.py', ignore_deprecated=True, check_aliases=True)
|
||||||
if filename is None:
|
if not result.resolved:
|
||||||
raise AnsibleError("unable to load {0} plugin named {1} ".format(plugin_type, plugin_name))
|
raise AnsibleError("unable to load {0} plugin named {1} ".format(plugin_type, plugin_name))
|
||||||
|
filename = result.plugin_resolved_path
|
||||||
collection_name = 'ansible.builtin'
|
collection_name = result.plugin_resolved_collection
|
||||||
if plugin_name.startswith('ansible_collections.'):
|
|
||||||
collection_name = '.'.join(plugin_name.split('.')[1:3])
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
doc, __, __, __ = get_docstring(filename, fragment_loader, verbose=(context.CLIARGS['verbosity'] > 0),
|
doc, __, __, __ = get_docstring(filename, fragment_loader, verbose=(context.CLIARGS['verbosity'] > 0),
|
||||||
|
@ -313,11 +312,9 @@ class DocCLI(CLI):
|
||||||
result = loader.find_plugin_with_context(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True)
|
result = loader.find_plugin_with_context(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True)
|
||||||
if not result.resolved:
|
if not result.resolved:
|
||||||
raise PluginNotFound('%s was not found in %s' % (plugin, search_paths))
|
raise PluginNotFound('%s was not found in %s' % (plugin, search_paths))
|
||||||
plugin_name, filename = result.plugin_resolved_name, result.plugin_resolved_path
|
plugin_name = result.plugin_resolved_name
|
||||||
|
filename = result.plugin_resolved_path
|
||||||
collection_name = 'ansible.builtin'
|
collection_name = result.plugin_resolved_collection
|
||||||
if plugin_name.startswith('ansible_collections.'):
|
|
||||||
collection_name = '.'.join(plugin_name.split('.')[1:3])
|
|
||||||
|
|
||||||
doc, plainexamples, returndocs, metadata = get_docstring(
|
doc, plainexamples, returndocs, metadata = get_docstring(
|
||||||
filename, fragment_loader, verbose=(context.CLIARGS['verbosity'] > 0),
|
filename, fragment_loader, verbose=(context.CLIARGS['verbosity'] > 0),
|
||||||
|
@ -328,13 +325,12 @@ class DocCLI(CLI):
|
||||||
raise ValueError('%s did not contain a DOCUMENTATION attribute' % plugin)
|
raise ValueError('%s did not contain a DOCUMENTATION attribute' % plugin)
|
||||||
|
|
||||||
doc['filename'] = filename
|
doc['filename'] = filename
|
||||||
|
doc['collection'] = collection_name
|
||||||
return doc, plainexamples, returndocs, metadata
|
return doc, plainexamples, returndocs, metadata
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def format_plugin_doc(plugin, plugin_type, doc, plainexamples, returndocs, metadata):
|
def format_plugin_doc(plugin, plugin_type, doc, plainexamples, returndocs, metadata):
|
||||||
collection_name = 'ansible.builtin'
|
collection_name = doc['collection']
|
||||||
if plugin.startswith('ansible_collections.'):
|
|
||||||
collection_name = '.'.join(plugin.split('.')[1:3])
|
|
||||||
|
|
||||||
# TODO: do we really want this?
|
# TODO: do we really want this?
|
||||||
# add_collection_to_versions_and_dates(doc, '(unknown)', is_module=(plugin_type == 'module'))
|
# add_collection_to_versions_and_dates(doc, '(unknown)', is_module=(plugin_type == 'module'))
|
||||||
|
@ -363,14 +359,15 @@ class DocCLI(CLI):
|
||||||
text = DocCLI.get_snippet_text(doc)
|
text = DocCLI.get_snippet_text(doc)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
text = DocCLI.get_man_text(doc)
|
text = DocCLI.get_man_text(doc, collection_name)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleError("Unable to retrieve documentation from '%s' due to: %s" % (plugin, to_native(e)))
|
raise AnsibleError("Unable to retrieve documentation from '%s' due to: %s" % (plugin, to_native(e)))
|
||||||
|
|
||||||
return text
|
return text
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def find_plugins(path, ptype, collection=None):
|
def find_plugins(path, internal, ptype, collection=None):
|
||||||
|
# if internal, collection could be set to `ansible.builtin`
|
||||||
|
|
||||||
display.vvvv("Searching %s for plugins" % path)
|
display.vvvv("Searching %s for plugins" % path)
|
||||||
|
|
||||||
|
@ -596,7 +593,7 @@ class DocCLI(CLI):
|
||||||
text.append('')
|
text.append('')
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_man_text(doc):
|
def get_man_text(doc, collection_name=''):
|
||||||
# Create a copy so we don't modify the original
|
# Create a copy so we don't modify the original
|
||||||
doc = dict(doc)
|
doc = dict(doc)
|
||||||
|
|
||||||
|
@ -606,7 +603,11 @@ class DocCLI(CLI):
|
||||||
pad = display.columns * 0.20
|
pad = display.columns * 0.20
|
||||||
limit = max(display.columns - int(pad), 70)
|
limit = max(display.columns - int(pad), 70)
|
||||||
|
|
||||||
text.append("> %s (%s)\n" % (doc.get(context.CLIARGS['type'], doc.get('plugin_type')).upper(), doc.pop('filename')))
|
plugin_name = doc.get(context.CLIARGS['type'], doc.get('plugin_type'))
|
||||||
|
if collection_name:
|
||||||
|
plugin_name = '%s.%s' % (collection_name, plugin_name)
|
||||||
|
|
||||||
|
text.append("> %s (%s)\n" % (plugin_name.upper(), doc.pop('filename')))
|
||||||
|
|
||||||
if isinstance(doc['description'], list):
|
if isinstance(doc['description'], list):
|
||||||
desc = " ".join(doc.pop('description'))
|
desc = " ".join(doc.pop('description'))
|
||||||
|
|
|
@ -112,6 +112,12 @@ def add_dirs_to_loader(which_loader, paths):
|
||||||
loader.add_directory(path, with_subdir=True)
|
loader.add_directory(path, with_subdir=True)
|
||||||
|
|
||||||
|
|
||||||
|
class PluginPathContext(object):
|
||||||
|
def __init__(self, path, internal):
|
||||||
|
self.path = path
|
||||||
|
self.internal = internal
|
||||||
|
|
||||||
|
|
||||||
class PluginLoadContext(object):
|
class PluginLoadContext(object):
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.original_name = None
|
self.original_name = None
|
||||||
|
@ -123,6 +129,7 @@ class PluginLoadContext(object):
|
||||||
self.exit_reason = None
|
self.exit_reason = None
|
||||||
self.plugin_resolved_path = None
|
self.plugin_resolved_path = None
|
||||||
self.plugin_resolved_name = None
|
self.plugin_resolved_name = None
|
||||||
|
self.plugin_resolved_collection = None # empty string for resolved plugins from user-supplied paths
|
||||||
self.deprecated = False
|
self.deprecated = False
|
||||||
self.removal_date = None
|
self.removal_date = None
|
||||||
self.removal_version = None
|
self.removal_version = None
|
||||||
|
@ -152,10 +159,11 @@ class PluginLoadContext(object):
|
||||||
self.deprecation_warnings.append(warning_text)
|
self.deprecation_warnings.append(warning_text)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def resolve(self, resolved_name, resolved_path, exit_reason):
|
def resolve(self, resolved_name, resolved_path, resolved_collection, exit_reason):
|
||||||
self.pending_redirect = None
|
self.pending_redirect = None
|
||||||
self.plugin_resolved_name = resolved_name
|
self.plugin_resolved_name = resolved_name
|
||||||
self.plugin_resolved_path = resolved_path
|
self.plugin_resolved_path = resolved_path
|
||||||
|
self.plugin_resolved_collection = resolved_collection
|
||||||
self.exit_reason = exit_reason
|
self.exit_reason = exit_reason
|
||||||
self.resolved = True
|
self.resolved = True
|
||||||
return self
|
return self
|
||||||
|
@ -309,8 +317,8 @@ class PluginLoader:
|
||||||
return self._all_directories(self.package_path)
|
return self._all_directories(self.package_path)
|
||||||
return [self.package_path]
|
return [self.package_path]
|
||||||
|
|
||||||
def _get_paths(self, subdirs=True):
|
def _get_paths_with_context(self, subdirs=True):
|
||||||
''' Return a list of paths to search for plugins in '''
|
''' Return a list of PluginPathContext objects to search for plugins in '''
|
||||||
|
|
||||||
# FIXME: This is potentially buggy if subdirs is sometimes True and sometimes False.
|
# FIXME: This is potentially buggy if subdirs is sometimes True and sometimes False.
|
||||||
# In current usage, everything calls this with subdirs=True except for module_utils_loader and ansible-doc
|
# In current usage, everything calls this with subdirs=True except for module_utils_loader and ansible-doc
|
||||||
|
@ -318,7 +326,7 @@ class PluginLoader:
|
||||||
if self._paths is not None:
|
if self._paths is not None:
|
||||||
return self._paths
|
return self._paths
|
||||||
|
|
||||||
ret = self._extra_dirs[:]
|
ret = [PluginPathContext(p, False) for p in self._extra_dirs]
|
||||||
|
|
||||||
# look in any configured plugin paths, allow one level deep for subcategories
|
# look in any configured plugin paths, allow one level deep for subcategories
|
||||||
if self.config is not None:
|
if self.config is not None:
|
||||||
|
@ -328,14 +336,14 @@ class PluginLoader:
|
||||||
contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
|
contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
|
||||||
for c in contents:
|
for c in contents:
|
||||||
if os.path.isdir(c) and c not in ret:
|
if os.path.isdir(c) and c not in ret:
|
||||||
ret.append(c)
|
ret.append(PluginPathContext(c, False))
|
||||||
if path not in ret:
|
if path not in ret:
|
||||||
ret.append(path)
|
ret.append(PluginPathContext(path, False))
|
||||||
|
|
||||||
# look for any plugins installed in the package subtree
|
# look for any plugins installed in the package subtree
|
||||||
# Note package path always gets added last so that every other type of
|
# Note package path always gets added last so that every other type of
|
||||||
# path is searched before it.
|
# path is searched before it.
|
||||||
ret.extend(self._get_package_paths(subdirs=subdirs))
|
ret.extend([PluginPathContext(p, True) for p in self._get_package_paths(subdirs=subdirs)])
|
||||||
|
|
||||||
# HACK: because powershell modules are in the same directory
|
# HACK: because powershell modules are in the same directory
|
||||||
# hierarchy as other modules we have to process them last. This is
|
# hierarchy as other modules we have to process them last. This is
|
||||||
|
@ -353,12 +361,18 @@ class PluginLoader:
|
||||||
# The expected sort order is paths in the order in 'ret' with paths ending in '/windows' at the end,
|
# The expected sort order is paths in the order in 'ret' with paths ending in '/windows' at the end,
|
||||||
# also in the original order they were found in 'ret'.
|
# also in the original order they were found in 'ret'.
|
||||||
# The .sort() method is guaranteed to be stable, so original order is preserved.
|
# The .sort() method is guaranteed to be stable, so original order is preserved.
|
||||||
ret.sort(key=lambda p: p.endswith('/windows'))
|
ret.sort(key=lambda p: p.path.endswith('/windows'))
|
||||||
|
|
||||||
# cache and return the result
|
# cache and return the result
|
||||||
self._paths = ret
|
self._paths = ret
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
|
def _get_paths(self, subdirs=True):
|
||||||
|
''' Return a list of paths to search for plugins in '''
|
||||||
|
|
||||||
|
paths_with_context = self._get_paths_with_context(subdirs=subdirs)
|
||||||
|
return [path_with_context.path for path_with_context in paths_with_context]
|
||||||
|
|
||||||
def _load_config_defs(self, name, module, path):
|
def _load_config_defs(self, name, module, path):
|
||||||
''' Reads plugin docs to find configuration setting definitions, to push to config manager for later use '''
|
''' Reads plugin docs to find configuration setting definitions, to push to config manager for later use '''
|
||||||
|
|
||||||
|
@ -487,7 +501,8 @@ class PluginLoader:
|
||||||
|
|
||||||
# FIXME: and is file or file link or ...
|
# FIXME: and is file or file link or ...
|
||||||
if os.path.exists(n_resource_path):
|
if os.path.exists(n_resource_path):
|
||||||
return plugin_load_context.resolve(full_name, to_text(n_resource_path), 'found exact match for {0} in {1}'.format(full_name, acr.collection))
|
return plugin_load_context.resolve(
|
||||||
|
full_name, to_text(n_resource_path), acr.collection, 'found exact match for {0} in {1}'.format(full_name, acr.collection))
|
||||||
|
|
||||||
if extension:
|
if extension:
|
||||||
# the request was extension-specific, don't try for an extensionless match
|
# the request was extension-specific, don't try for an extensionless match
|
||||||
|
@ -505,7 +520,8 @@ class PluginLoader:
|
||||||
# TODO: warn?
|
# TODO: warn?
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return plugin_load_context.resolve(full_name, to_text(found_files[0]), 'found fuzzy extension match for {0} in {1}'.format(full_name, acr.collection))
|
return plugin_load_context.resolve(
|
||||||
|
full_name, to_text(found_files[0]), acr.collection, 'found fuzzy extension match for {0} in {1}'.format(full_name, acr.collection))
|
||||||
|
|
||||||
def find_plugin(self, name, mod_type='', ignore_deprecated=False, check_aliases=False, collection_list=None):
|
def find_plugin(self, name, mod_type='', ignore_deprecated=False, check_aliases=False, collection_list=None):
|
||||||
''' Find a plugin named name '''
|
''' Find a plugin named name '''
|
||||||
|
@ -626,8 +642,10 @@ class PluginLoader:
|
||||||
# requested mod_type
|
# requested mod_type
|
||||||
pull_cache = self._plugin_path_cache[suffix]
|
pull_cache = self._plugin_path_cache[suffix]
|
||||||
try:
|
try:
|
||||||
plugin_load_context.plugin_resolved_path = pull_cache[name]
|
path_with_context = pull_cache[name]
|
||||||
|
plugin_load_context.plugin_resolved_path = path_with_context.path
|
||||||
plugin_load_context.plugin_resolved_name = name
|
plugin_load_context.plugin_resolved_name = name
|
||||||
|
plugin_load_context.plugin_resolved_collection = 'ansible.builtin' if path_with_context.internal else ''
|
||||||
plugin_load_context.resolved = True
|
plugin_load_context.resolved = True
|
||||||
return plugin_load_context
|
return plugin_load_context
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
@ -638,8 +656,9 @@ class PluginLoader:
|
||||||
# self._searched_paths we could use an iterator. Before enabling that
|
# self._searched_paths we could use an iterator. Before enabling that
|
||||||
# we need to make sure we don't want to add additional directories
|
# we need to make sure we don't want to add additional directories
|
||||||
# (add_directory()) once we start using the iterator.
|
# (add_directory()) once we start using the iterator.
|
||||||
# We can use _get_paths() since add_directory() forces a cache refresh.
|
# We can use _get_paths_with_context() since add_directory() forces a cache refresh.
|
||||||
for path in (p for p in self._get_paths() if p not in self._searched_paths and os.path.isdir(p)):
|
for path_context in (p for p in self._get_paths_with_context() if p.path not in self._searched_paths and os.path.isdir(p.path)):
|
||||||
|
path = path_context.path
|
||||||
display.debug('trying %s' % path)
|
display.debug('trying %s' % path)
|
||||||
plugin_load_context.load_attempts.append(path)
|
plugin_load_context.load_attempts.append(path)
|
||||||
try:
|
try:
|
||||||
|
@ -658,6 +677,7 @@ class PluginLoader:
|
||||||
|
|
||||||
splitname = os.path.splitext(full_name)
|
splitname = os.path.splitext(full_name)
|
||||||
base_name = splitname[0]
|
base_name = splitname[0]
|
||||||
|
internal = path_context.internal
|
||||||
try:
|
try:
|
||||||
extension = splitname[1]
|
extension = splitname[1]
|
||||||
except IndexError:
|
except IndexError:
|
||||||
|
@ -665,21 +685,23 @@ class PluginLoader:
|
||||||
|
|
||||||
# Module found, now enter it into the caches that match this file
|
# Module found, now enter it into the caches that match this file
|
||||||
if base_name not in self._plugin_path_cache['']:
|
if base_name not in self._plugin_path_cache['']:
|
||||||
self._plugin_path_cache[''][base_name] = full_path
|
self._plugin_path_cache[''][base_name] = PluginPathContext(full_path, internal)
|
||||||
|
|
||||||
if full_name not in self._plugin_path_cache['']:
|
if full_name not in self._plugin_path_cache['']:
|
||||||
self._plugin_path_cache[''][full_name] = full_path
|
self._plugin_path_cache[''][full_name] = PluginPathContext(full_path, internal)
|
||||||
|
|
||||||
if base_name not in self._plugin_path_cache[extension]:
|
if base_name not in self._plugin_path_cache[extension]:
|
||||||
self._plugin_path_cache[extension][base_name] = full_path
|
self._plugin_path_cache[extension][base_name] = PluginPathContext(full_path, internal)
|
||||||
|
|
||||||
if full_name not in self._plugin_path_cache[extension]:
|
if full_name not in self._plugin_path_cache[extension]:
|
||||||
self._plugin_path_cache[extension][full_name] = full_path
|
self._plugin_path_cache[extension][full_name] = PluginPathContext(full_path, internal)
|
||||||
|
|
||||||
self._searched_paths.add(path)
|
self._searched_paths.add(path)
|
||||||
try:
|
try:
|
||||||
plugin_load_context.plugin_resolved_path = pull_cache[name]
|
path_with_context = pull_cache[name]
|
||||||
|
plugin_load_context.plugin_resolved_path = path_with_context.path
|
||||||
plugin_load_context.plugin_resolved_name = name
|
plugin_load_context.plugin_resolved_name = name
|
||||||
|
plugin_load_context.plugin_resolved_collection = 'ansible.builtin' if path_with_context.internal else ''
|
||||||
plugin_load_context.resolved = True
|
plugin_load_context.resolved = True
|
||||||
return plugin_load_context
|
return plugin_load_context
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
@ -691,12 +713,14 @@ class PluginLoader:
|
||||||
alias_name = '_' + name
|
alias_name = '_' + name
|
||||||
# We've already cached all the paths at this point
|
# We've already cached all the paths at this point
|
||||||
if alias_name in pull_cache:
|
if alias_name in pull_cache:
|
||||||
if not ignore_deprecated and not os.path.islink(pull_cache[alias_name]):
|
path_with_context = pull_cache[alias_name]
|
||||||
|
if not ignore_deprecated and not os.path.islink(path_with_context.path):
|
||||||
# FIXME: this is not always the case, some are just aliases
|
# FIXME: this is not always the case, some are just aliases
|
||||||
display.deprecated('%s is kept for backwards compatibility but usage is discouraged. ' # pylint: disable=ansible-deprecated-no-version
|
display.deprecated('%s is kept for backwards compatibility but usage is discouraged. ' # pylint: disable=ansible-deprecated-no-version
|
||||||
'The module documentation details page may explain more about this rationale.' % name.lstrip('_'))
|
'The module documentation details page may explain more about this rationale.' % name.lstrip('_'))
|
||||||
plugin_load_context.plugin_resolved_path = pull_cache[alias_name]
|
plugin_load_context.plugin_resolved_path = path_with_context.path
|
||||||
plugin_load_context.plugin_resolved_name = alias_name
|
plugin_load_context.plugin_resolved_name = alias_name
|
||||||
|
plugin_load_context.plugin_resolved_collection = 'ansible.builtin' if path_with_context.internal else ''
|
||||||
plugin_load_context.resolved = True
|
plugin_load_context.resolved = True
|
||||||
return plugin_load_context
|
return plugin_load_context
|
||||||
|
|
||||||
|
|
|
@ -6,7 +6,7 @@ from __future__ import (absolute_import, division, print_function)
|
||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
cache: testns.testcol.notjsonfile
|
cache: notjsonfile
|
||||||
short_description: JSON formatted files.
|
short_description: JSON formatted files.
|
||||||
description:
|
description:
|
||||||
- This cache uses JSON formatted, per host, files saved to the filesystem.
|
- This cache uses JSON formatted, per host, files saved to the filesystem.
|
||||||
|
|
|
@ -5,7 +5,7 @@ from __future__ import (absolute_import, division, print_function)
|
||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
inventory: testns.testcol.statichost
|
inventory: statichost
|
||||||
short_description: Add a single host
|
short_description: Add a single host
|
||||||
description: Add a single host
|
description: Add a single host
|
||||||
extends_documentation_fragment:
|
extends_documentation_fragment:
|
||||||
|
|
|
@ -7,7 +7,7 @@ from __future__ import (absolute_import, division, print_function)
|
||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = """
|
DOCUMENTATION = """
|
||||||
lookup: testns.testcol.noop
|
lookup: noop
|
||||||
author: Ansible core team
|
author: Ansible core team
|
||||||
short_description: returns input
|
short_description: returns input
|
||||||
description:
|
description:
|
||||||
|
|
|
@ -2,7 +2,7 @@ from __future__ import (absolute_import, division, print_function)
|
||||||
__metaclass__ = type
|
__metaclass__ = type
|
||||||
|
|
||||||
DOCUMENTATION = '''
|
DOCUMENTATION = '''
|
||||||
vars: testns.testcol.noop_vars_plugin
|
vars: noop_vars_plugin
|
||||||
short_description: Do NOT load host and group vars
|
short_description: Do NOT load host and group vars
|
||||||
description: don't test loading host and group vars from a collection
|
description: don't test loading host and group vars from a collection
|
||||||
options:
|
options:
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
> FAKEMODULE (./collections/ansible_collections/testns/testcol/plugins/modules/fakemodule.py)
|
> TESTNS.TESTCOL.FAKEMODULE (./collections/ansible_collections/testns/testcol/plugins/modules/fakemodule.py)
|
||||||
|
|
||||||
this is a fake module
|
this is a fake module
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ import os
|
||||||
from units.compat import unittest
|
from units.compat import unittest
|
||||||
from units.compat.builtins import BUILTINS
|
from units.compat.builtins import BUILTINS
|
||||||
from units.compat.mock import patch, MagicMock
|
from units.compat.mock import patch, MagicMock
|
||||||
from ansible.plugins.loader import PluginLoader
|
from ansible.plugins.loader import PluginLoader, PluginPathContext
|
||||||
|
|
||||||
|
|
||||||
class TestErrors(unittest.TestCase):
|
class TestErrors(unittest.TestCase):
|
||||||
|
@ -59,7 +59,8 @@ class TestErrors(unittest.TestCase):
|
||||||
|
|
||||||
def test_plugins__get_paths(self):
|
def test_plugins__get_paths(self):
|
||||||
pl = PluginLoader('test', '', 'test', 'test_plugin')
|
pl = PluginLoader('test', '', 'test', 'test_plugin')
|
||||||
pl._paths = ['/path/one', '/path/two']
|
pl._paths = [PluginPathContext('/path/one', False),
|
||||||
|
PluginPathContext('/path/two', True)]
|
||||||
self.assertEqual(pl._get_paths(), ['/path/one', '/path/two'])
|
self.assertEqual(pl._get_paths(), ['/path/one', '/path/two'])
|
||||||
|
|
||||||
# NOT YET WORKING
|
# NOT YET WORKING
|
||||||
|
|
Loading…
Reference in a new issue