collection routing (#67684)
* `meta/` directory in collections * runtime metadata for redirection/deprecation/removal of plugin loads * a compatibility layer to keep existing content working on ansible-base + collections * a Python import redirection layer to keep collections-hosted (and otherwise moved) content importable by things that don't know better * supported Ansible version validation on collection loads
This commit is contained in:
parent
fdfa6fec75
commit
f7dfa817ae
76 changed files with 2908 additions and 1100 deletions
10
changelogs/fragments/collection_routing.yml
Normal file
10
changelogs/fragments/collection_routing.yml
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
major_changes:
|
||||||
|
- Plugin routing allows collections to declare deprecation, redirection targets, and removals for all plugin types.
|
||||||
|
- Routing data built into Ansible 2.10 ensures that 2.9 content should work unmodified on 2.10. Formerly included
|
||||||
|
modules and plugins that were moved to collections are still accessible by their original unqualified names, so long
|
||||||
|
as their destination collections are installed.
|
||||||
|
- Collections may declare a list of supported/tested Ansible versions for the collection. A warning is issued if a
|
||||||
|
collection does not support the Ansible version that loads it (can also be configured as silent or a fatal error).
|
||||||
|
Collections that do not declare supported Ansible versions do not issue a warning/error.
|
||||||
|
- Plugins that import module_utils and other ansible namespaces that have moved to collections should
|
||||||
|
continue to work unmodified.
|
|
@ -1,2 +1,2 @@
|
||||||
minor_changes:
|
minor_changes:
|
||||||
- validate-modules checks for deprecated in collections against meta/routing.yml
|
- validate-modules checks for deprecated in collections against meta/runtime.yml
|
||||||
|
|
|
@ -26,7 +26,8 @@ from ansible.parsing.dataloader import DataLoader
|
||||||
from ansible.parsing.vault import PromptVaultSecret, get_file_vault_secret
|
from ansible.parsing.vault import PromptVaultSecret, get_file_vault_secret
|
||||||
from ansible.plugins.loader import add_all_plugin_dirs
|
from ansible.plugins.loader import add_all_plugin_dirs
|
||||||
from ansible.release import __version__
|
from ansible.release import __version__
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionLoader, get_collection_name_from_path, set_collection_playbook_paths
|
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||||
|
from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
from ansible.utils.path import unfrackpath
|
from ansible.utils.path import unfrackpath
|
||||||
from ansible.utils.unsafe_proxy import to_unsafe_text
|
from ansible.utils.unsafe_proxy import to_unsafe_text
|
||||||
|
@ -455,11 +456,11 @@ class CLI(with_metaclass(ABCMeta, object)):
|
||||||
if basedir:
|
if basedir:
|
||||||
loader.set_basedir(basedir)
|
loader.set_basedir(basedir)
|
||||||
add_all_plugin_dirs(basedir)
|
add_all_plugin_dirs(basedir)
|
||||||
set_collection_playbook_paths(basedir)
|
AnsibleCollectionConfig.playbook_paths = basedir
|
||||||
default_collection = get_collection_name_from_path(basedir)
|
default_collection = _get_collection_name_from_path(basedir)
|
||||||
if default_collection:
|
if default_collection:
|
||||||
display.warning(u'running with default collection {0}'.format(default_collection))
|
display.warning(u'running with default collection {0}'.format(default_collection))
|
||||||
AnsibleCollectionLoader().set_default_collection(default_collection)
|
AnsibleCollectionConfig.default_collection = default_collection
|
||||||
|
|
||||||
vault_ids = list(options['vault_ids'])
|
vault_ids = list(options['vault_ids'])
|
||||||
default_vault_ids = C.DEFAULT_VAULT_IDENTITY_LIST
|
default_vault_ids = C.DEFAULT_VAULT_IDENTITY_LIST
|
||||||
|
|
|
@ -28,7 +28,8 @@ from ansible.parsing.metadata import extract_metadata
|
||||||
from ansible.parsing.plugin_docs import read_docstub
|
from ansible.parsing.plugin_docs import read_docstub
|
||||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||||
from ansible.plugins.loader import action_loader, fragment_loader
|
from ansible.plugins.loader import action_loader, fragment_loader
|
||||||
from ansible.utils.collection_loader import set_collection_playbook_paths, get_collection_name_from_path
|
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||||
|
from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
from ansible.utils.plugin_docs import BLACKLIST, get_docstring, get_versioned_doclink
|
from ansible.utils.plugin_docs import BLACKLIST, get_docstring, get_versioned_doclink
|
||||||
|
|
||||||
|
@ -44,11 +45,11 @@ def jdump(text):
|
||||||
|
|
||||||
def add_collection_plugins(plugin_list, plugin_type, coll_filter=None):
|
def add_collection_plugins(plugin_list, plugin_type, coll_filter=None):
|
||||||
|
|
||||||
# TODO: take into account routing.yml once implemented
|
# TODO: take into account runtime.yml once implemented
|
||||||
b_colldirs = list_collection_dirs(coll_filter=coll_filter)
|
b_colldirs = list_collection_dirs(coll_filter=coll_filter)
|
||||||
for b_path in b_colldirs:
|
for b_path in b_colldirs:
|
||||||
path = to_text(b_path, errors='surrogate_or_strict')
|
path = to_text(b_path, errors='surrogate_or_strict')
|
||||||
collname = get_collection_name_from_path(b_path)
|
collname = _get_collection_name_from_path(b_path)
|
||||||
ptype = C.COLLECTION_PTYPE_COMPAT.get(plugin_type, plugin_type)
|
ptype = C.COLLECTION_PTYPE_COMPAT.get(plugin_type, plugin_type)
|
||||||
plugin_list.update(DocCLI.find_plugins(os.path.join(path, 'plugins', ptype), plugin_type, collection=collname))
|
plugin_list.update(DocCLI.find_plugins(os.path.join(path, 'plugins', ptype), plugin_type, collection=collname))
|
||||||
|
|
||||||
|
@ -127,7 +128,7 @@ class DocCLI(CLI):
|
||||||
# add to plugin paths from command line
|
# add to plugin paths from command line
|
||||||
basedir = context.CLIARGS['basedir']
|
basedir = context.CLIARGS['basedir']
|
||||||
if basedir:
|
if basedir:
|
||||||
set_collection_playbook_paths(basedir)
|
AnsibleCollectionConfig.playbook_paths = basedir
|
||||||
loader.add_directory(basedir, with_subdir=True)
|
loader.add_directory(basedir, with_subdir=True)
|
||||||
if context.CLIARGS['module_path']:
|
if context.CLIARGS['module_path']:
|
||||||
for path in context.CLIARGS['module_path']:
|
for path in context.CLIARGS['module_path']:
|
||||||
|
|
|
@ -16,7 +16,8 @@ from ansible.executor.playbook_executor import PlaybookExecutor
|
||||||
from ansible.module_utils._text import to_bytes
|
from ansible.module_utils._text import to_bytes
|
||||||
from ansible.playbook.block import Block
|
from ansible.playbook.block import Block
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionLoader, get_collection_name_from_path, set_collection_playbook_paths
|
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||||
|
from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path
|
||||||
from ansible.plugins.loader import add_all_plugin_dirs
|
from ansible.plugins.loader import add_all_plugin_dirs
|
||||||
|
|
||||||
|
|
||||||
|
@ -90,13 +91,13 @@ class PlaybookCLI(CLI):
|
||||||
|
|
||||||
b_playbook_dirs.append(b_playbook_dir)
|
b_playbook_dirs.append(b_playbook_dir)
|
||||||
|
|
||||||
set_collection_playbook_paths(b_playbook_dirs)
|
AnsibleCollectionConfig.playbook_paths = b_playbook_dirs
|
||||||
|
|
||||||
playbook_collection = get_collection_name_from_path(b_playbook_dirs[0])
|
playbook_collection = _get_collection_name_from_path(b_playbook_dirs[0])
|
||||||
|
|
||||||
if playbook_collection:
|
if playbook_collection:
|
||||||
display.warning("running playbook inside collection {0}".format(playbook_collection))
|
display.warning("running playbook inside collection {0}".format(playbook_collection))
|
||||||
AnsibleCollectionLoader().set_default_collection(playbook_collection)
|
AnsibleCollectionConfig.default_collection = playbook_collection
|
||||||
|
|
||||||
# don't deal with privilege escalation or passwords when we don't need to
|
# don't deal with privilege escalation or passwords when we don't need to
|
||||||
if not (context.CLIARGS['listhosts'] or context.CLIARGS['listtasks'] or
|
if not (context.CLIARGS['listhosts'] or context.CLIARGS['listtasks'] or
|
||||||
|
|
|
@ -10,7 +10,7 @@ from collections import defaultdict
|
||||||
|
|
||||||
from ansible.collections import is_collection_path
|
from ansible.collections import is_collection_path
|
||||||
from ansible.module_utils._text import to_bytes
|
from ansible.module_utils._text import to_bytes
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionLoader
|
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
display = Display()
|
display = Display()
|
||||||
|
@ -27,7 +27,7 @@ def list_valid_collection_paths(search_paths=None, warn=False):
|
||||||
if search_paths is None:
|
if search_paths is None:
|
||||||
search_paths = []
|
search_paths = []
|
||||||
|
|
||||||
search_paths.extend(AnsibleCollectionLoader().n_collection_paths)
|
search_paths.extend(AnsibleCollectionConfig.collection_paths)
|
||||||
|
|
||||||
for path in search_paths:
|
for path in search_paths:
|
||||||
|
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
# Copyright (c) 2020 Ansible Project
|
# Copyright (c) 2020 Ansible Project
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
# This file is only for 2.9 backwards compatiblity, expect it to go away in future ansible versions.
|
|
||||||
plugin_routing:
|
plugin_routing:
|
||||||
connection:
|
connection:
|
||||||
|
# test entry
|
||||||
|
redirected_local:
|
||||||
|
redirect: ansible.builtin.local
|
||||||
buildah:
|
buildah:
|
||||||
redirect: containers.podman.buildah
|
redirect: containers.podman.buildah
|
||||||
podman:
|
podman:
|
||||||
|
@ -48,6 +50,12 @@ plugin_routing:
|
||||||
persistent:
|
persistent:
|
||||||
redirect: ansible.netcommon.persistent
|
redirect: ansible.netcommon.persistent
|
||||||
modules:
|
modules:
|
||||||
|
# test entry
|
||||||
|
formerly_core_ping:
|
||||||
|
redirect: testns.testcoll.ping
|
||||||
|
# test entry
|
||||||
|
uses_redirected_action:
|
||||||
|
redirect: ansible.builtin.ping
|
||||||
podman_container_info:
|
podman_container_info:
|
||||||
redirect: containers.podman.podman_container_info
|
redirect: containers.podman.podman_container_info
|
||||||
podman_image_info:
|
podman_image_info:
|
||||||
|
@ -4100,8 +4108,8 @@ plugin_routing:
|
||||||
redirect: ansible.posix.sysctl
|
redirect: ansible.posix.sysctl
|
||||||
async_status:
|
async_status:
|
||||||
redirect: ansible.windows.async_status
|
redirect: ansible.windows.async_status
|
||||||
setup:
|
setup.ps1:
|
||||||
redirect: ansible.windows.setup
|
redirect: ansible.windows.setup.ps1
|
||||||
slurp:
|
slurp:
|
||||||
redirect: ansible.windows.slurp
|
redirect: ansible.windows.slurp
|
||||||
win_acl:
|
win_acl:
|
||||||
|
@ -7553,6 +7561,10 @@ plugin_routing:
|
||||||
cpm_user:
|
cpm_user:
|
||||||
redirect: wti.remote.cpm_user
|
redirect: wti.remote.cpm_user
|
||||||
module_utils:
|
module_utils:
|
||||||
|
formerly_core:
|
||||||
|
redirect: ansible_collections.testns.testcoll.plugins.module_utils.base
|
||||||
|
sub1.sub2.formerly_core:
|
||||||
|
redirect: ansible_collections.testns.testcoll.plugins.module_utils.base
|
||||||
common:
|
common:
|
||||||
redirect: f5networks.f5_modules.common
|
redirect: f5networks.f5_modules.common
|
||||||
frr:
|
frr:
|
||||||
|
@ -8052,6 +8064,9 @@ plugin_routing:
|
||||||
vyos:
|
vyos:
|
||||||
redirect: vyos.vyos.vyos
|
redirect: vyos.vyos.vyos
|
||||||
action:
|
action:
|
||||||
|
# test entry, overloaded with module of same name to use a different base action (ie not "normal.py")
|
||||||
|
uses_redirected_action:
|
||||||
|
redirect: testns.testcoll.subclassed_norm
|
||||||
aireos:
|
aireos:
|
||||||
redirect: community.general.aireos
|
redirect: community.general.aireos
|
||||||
aruba:
|
aruba:
|
||||||
|
@ -8493,6 +8508,11 @@ plugin_routing:
|
||||||
vyos:
|
vyos:
|
||||||
redirect: vyos.vyos.vyos
|
redirect: vyos.vyos.vyos
|
||||||
filter:
|
filter:
|
||||||
|
# test entries
|
||||||
|
formerly_core_filter:
|
||||||
|
redirect: ansible.builtin.bool
|
||||||
|
formerly_core_masked_filter:
|
||||||
|
redirect: ansible.builtin.bool
|
||||||
gcp_kms_encrypt:
|
gcp_kms_encrypt:
|
||||||
redirect: google.cloud.gcp_kms_encrypt
|
redirect: google.cloud.gcp_kms_encrypt
|
||||||
gcp_kms_decrypt:
|
gcp_kms_decrypt:
|
||||||
|
@ -8575,6 +8595,9 @@ plugin_routing:
|
||||||
qradar:
|
qradar:
|
||||||
redirect: ibm.qradar.qradar
|
redirect: ibm.qradar.qradar
|
||||||
inventory:
|
inventory:
|
||||||
|
# test entry
|
||||||
|
formerly_core_inventory:
|
||||||
|
redirect: testns.content_adj.statichost
|
||||||
cloudscale:
|
cloudscale:
|
||||||
redirect: community.general.cloudscale
|
redirect: community.general.cloudscale
|
||||||
docker_machine:
|
docker_machine:
|
||||||
|
@ -8622,6 +8645,9 @@ plugin_routing:
|
||||||
azure_rm:
|
azure_rm:
|
||||||
redirect: azure.azcollection.azure_rm
|
redirect: azure.azcollection.azure_rm
|
||||||
lookup:
|
lookup:
|
||||||
|
# test entry
|
||||||
|
formerly_core_lookup:
|
||||||
|
redirect: testns.testcoll.mylookup
|
||||||
avi:
|
avi:
|
||||||
redirect: community.general.avi
|
redirect: community.general.avi
|
||||||
cartesian:
|
cartesian:
|
||||||
|
@ -8714,7 +8740,29 @@ plugin_routing:
|
||||||
junos:
|
junos:
|
||||||
redirect: junipernetworks.junos.junos
|
redirect: junipernetworks.junos.junos
|
||||||
shell:
|
shell:
|
||||||
|
# test entry
|
||||||
|
formerly_core_powershell:
|
||||||
|
redirect: ansible.builtin.powershell
|
||||||
csh:
|
csh:
|
||||||
redirect: ansible.posix.csh
|
redirect: ansible.posix.csh
|
||||||
fish:
|
fish:
|
||||||
redirect: ansible.posix.fish
|
redirect: ansible.posix.fish
|
||||||
|
test:
|
||||||
|
# test entries
|
||||||
|
formerly_core_test:
|
||||||
|
redirect: ansible.builtin.search
|
||||||
|
formerly_core_masked_test:
|
||||||
|
redirect: ansible.builtin.search
|
||||||
|
import_redirection:
|
||||||
|
# test entry
|
||||||
|
ansible.module_utils.formerly_core:
|
||||||
|
redirect: ansible_collections.testns.testcoll.plugins.module_utils.base
|
||||||
|
ansible.module_utils.known_hosts:
|
||||||
|
redirect: ansible_collections.community.general.plugins.module_utils.known_hosts
|
||||||
|
# ansible.builtin synthetic collection redirection hackery
|
||||||
|
ansible_collections.ansible.builtin.plugins.modules:
|
||||||
|
redirect: ansible.modules
|
||||||
|
ansible_collections.ansible.builtin.plugins.module_utils:
|
||||||
|
redirect: ansible.module_utils
|
||||||
|
ansible_collections.ansible.builtin.plugins:
|
||||||
|
redirect: ansible.plugins
|
|
@ -215,6 +215,14 @@ CACHE_PLUGIN_TIMEOUT:
|
||||||
- {key: fact_caching_timeout, section: defaults}
|
- {key: fact_caching_timeout, section: defaults}
|
||||||
type: integer
|
type: integer
|
||||||
yaml: {key: facts.cache.timeout}
|
yaml: {key: facts.cache.timeout}
|
||||||
|
COLLECTIONS_SCAN_SYS_PATH:
|
||||||
|
name: enable/disable scanning sys.path for installed collections
|
||||||
|
default: true
|
||||||
|
type: boolean
|
||||||
|
env:
|
||||||
|
- {name: ANSIBLE_COLLECTIONS_SCAN_SYS_PATH}
|
||||||
|
ini:
|
||||||
|
- {key: collections_scan_sys_path, section: defaults}
|
||||||
COLLECTIONS_PATHS:
|
COLLECTIONS_PATHS:
|
||||||
name: ordered list of root paths for loading installed Ansible collections content
|
name: ordered list of root paths for loading installed Ansible collections content
|
||||||
description: Colon separated paths in which Ansible will search for collections content.
|
description: Colon separated paths in which Ansible will search for collections content.
|
||||||
|
@ -224,6 +232,16 @@ COLLECTIONS_PATHS:
|
||||||
- {name: ANSIBLE_COLLECTIONS_PATHS}
|
- {name: ANSIBLE_COLLECTIONS_PATHS}
|
||||||
ini:
|
ini:
|
||||||
- {key: collections_paths, section: defaults}
|
- {key: collections_paths, section: defaults}
|
||||||
|
COLLECTIONS_ON_ANSIBLE_VERSION_MISMATCH:
|
||||||
|
name: Defines behavior when loading a collection that does not support the current Ansible version
|
||||||
|
description:
|
||||||
|
- When a collection is loaded that does not support the running Ansible version (via the collection metadata key
|
||||||
|
`requires_ansible`), the default behavior is to issue a warning and continue anyway. Setting this value to `ignore`
|
||||||
|
skips the warning entirely, while setting it to `fatal` will immediately halt Ansible execution.
|
||||||
|
env: [{name: ANSIBLE_COLLECTIONS_ON_ANSIBLE_VERSION_MISMATCH}]
|
||||||
|
ini: [{key: collections_on_ansible_version_mismatch, section: defaults}]
|
||||||
|
choices: [error, warning, ignore]
|
||||||
|
default: warning
|
||||||
COLOR_CHANGED:
|
COLOR_CHANGED:
|
||||||
name: Color for 'changed' task status
|
name: Color for 'changed' task status
|
||||||
default: yellow
|
default: yellow
|
||||||
|
|
|
@ -276,6 +276,21 @@ class AnsibleFileNotFound(AnsibleRuntimeError):
|
||||||
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc)
|
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc)
|
||||||
|
|
||||||
|
|
||||||
|
class AnsiblePluginRemoved(AnsibleRuntimeError):
|
||||||
|
''' a requested plugin has been removed '''
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class AnsiblePluginCircularRedirect(AnsibleRuntimeError):
|
||||||
|
'''a cycle was detected in plugin redirection'''
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class AnsibleCollectionUnsupportedVersionError(AnsibleRuntimeError):
|
||||||
|
'''a collection is not supported by this version of Ansible'''
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
# These Exceptions are temporary, using them as flow control until we can get a better solution.
|
# These Exceptions are temporary, using them as flow control until we can get a better solution.
|
||||||
# DO NOT USE as they will probably be removed soon.
|
# DO NOT USE as they will probably be removed soon.
|
||||||
# We will port the action modules in our tree to use a context manager instead.
|
# We will port the action modules in our tree to use a context manager instead.
|
||||||
|
|
|
@ -36,9 +36,10 @@ from ansible import constants as C
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError
|
||||||
from ansible.executor.interpreter_discovery import InterpreterDiscoveryRequiredError
|
from ansible.executor.interpreter_discovery import InterpreterDiscoveryRequiredError
|
||||||
from ansible.executor.powershell import module_manifest as ps_manifest
|
from ansible.executor.powershell import module_manifest as ps_manifest
|
||||||
from ansible.module_utils._text import to_bytes, to_text, to_native
|
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
||||||
from ansible.module_utils.compat.importlib import import_module
|
|
||||||
from ansible.plugins.loader import module_utils_loader
|
from ansible.plugins.loader import module_utils_loader
|
||||||
|
from ansible.utils.collection_loader._collection_finder import _get_collection_metadata
|
||||||
|
|
||||||
# Must import strategy and use write_locks from there
|
# Must import strategy and use write_locks from there
|
||||||
# If we import write_locks directly then we end up binding a
|
# If we import write_locks directly then we end up binding a
|
||||||
# variable to the object and then it never gets updated.
|
# variable to the object and then it never gets updated.
|
||||||
|
@ -601,7 +602,8 @@ class ModuleInfo:
|
||||||
path = None
|
path = None
|
||||||
|
|
||||||
if imp is None:
|
if imp is None:
|
||||||
self._info = info = importlib.machinery.PathFinder.find_spec(name, paths)
|
# don't pretend this is a top-level module, prefix the rest of the namespace
|
||||||
|
self._info = info = importlib.machinery.PathFinder.find_spec('ansible.module_utils.' + name, paths)
|
||||||
if info is not None:
|
if info is not None:
|
||||||
self.py_src = os.path.splitext(info.origin)[1] in importlib.machinery.SOURCE_SUFFIXES
|
self.py_src = os.path.splitext(info.origin)[1] in importlib.machinery.SOURCE_SUFFIXES
|
||||||
self.pkg_dir = info.origin.endswith('/__init__.py')
|
self.pkg_dir = info.origin.endswith('/__init__.py')
|
||||||
|
@ -632,33 +634,61 @@ class ModuleInfo:
|
||||||
|
|
||||||
|
|
||||||
class CollectionModuleInfo(ModuleInfo):
|
class CollectionModuleInfo(ModuleInfo):
|
||||||
def __init__(self, name, paths):
|
def __init__(self, name, pkg):
|
||||||
self._mod_name = name
|
self._mod_name = name
|
||||||
self.py_src = True
|
self.py_src = True
|
||||||
# FIXME: Implement pkg_dir so that we can place __init__.py files
|
|
||||||
self.pkg_dir = False
|
self.pkg_dir = False
|
||||||
|
|
||||||
for path in paths:
|
split_name = pkg.split('.')
|
||||||
self._package_name = '.'.join(path.split('/'))
|
split_name.append(name)
|
||||||
try:
|
if len(split_name) < 5 or split_name[0] != 'ansible_collections' or split_name[3] != 'plugins' or split_name[4] != 'module_utils':
|
||||||
self.get_source()
|
raise ValueError('must search for something beneath a collection module_utils, not {0}.{1}'.format(to_native(pkg), to_native(name)))
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
# NB: we can't use pkgutil.get_data safely here, since we don't want to import/execute package/module code on
|
||||||
else:
|
# the controller while analyzing/assembling the module, so we'll have to manually import the collection's
|
||||||
self.path = os.path.join(path, self._mod_name) + '.py'
|
# Python package to locate it (import root collection, reassemble resource path beneath, fetch source)
|
||||||
break
|
|
||||||
else:
|
# FIXME: handle MU redirection logic here
|
||||||
# FIXME (nitz): implement package fallback code
|
|
||||||
|
collection_pkg_name = '.'.join(split_name[0:3])
|
||||||
|
resource_base_path = os.path.join(*split_name[3:])
|
||||||
|
# look for package_dir first, then module
|
||||||
|
|
||||||
|
self._src = pkgutil.get_data(collection_pkg_name, to_native(os.path.join(resource_base_path, '__init__.py')))
|
||||||
|
|
||||||
|
if self._src is not None: # empty string is OK
|
||||||
|
return
|
||||||
|
|
||||||
|
self._src = pkgutil.get_data(collection_pkg_name, to_native(resource_base_path + '.py'))
|
||||||
|
|
||||||
|
if not self._src:
|
||||||
raise ImportError('unable to load collection-hosted module_util'
|
raise ImportError('unable to load collection-hosted module_util'
|
||||||
' {0}.{1}'.format(to_native(self._package_name),
|
' {0}.{1}'.format(to_native(pkg), to_native(name)))
|
||||||
to_native(name)))
|
|
||||||
|
|
||||||
def get_source(self):
|
def get_source(self):
|
||||||
# FIXME (nitz): need this in py2 for some reason TBD, but we shouldn't (get_data delegates
|
return self._src
|
||||||
# to wrong loader without it)
|
|
||||||
pkg = import_module(self._package_name)
|
|
||||||
data = pkgutil.get_data(to_native(self._package_name), to_native(self._mod_name + '.py'))
|
class InternalRedirectModuleInfo(ModuleInfo):
|
||||||
return data
|
def __init__(self, name, full_name):
|
||||||
|
self.pkg_dir = None
|
||||||
|
self._original_name = full_name
|
||||||
|
self.path = full_name.replace('.', '/') + '.py'
|
||||||
|
collection_meta = _get_collection_metadata('ansible.builtin')
|
||||||
|
redirect = collection_meta.get('plugin_routing', {}).get('module_utils', {}).get(name, {}).get('redirect', None)
|
||||||
|
if not redirect:
|
||||||
|
raise ImportError('no redirect found for {0}'.format(name))
|
||||||
|
self._redirect = redirect
|
||||||
|
self.py_src = True
|
||||||
|
self._shim_src = """
|
||||||
|
import sys
|
||||||
|
import {1} as mod
|
||||||
|
|
||||||
|
sys.modules['{0}'] = mod
|
||||||
|
""".format(self._original_name, self._redirect)
|
||||||
|
|
||||||
|
def get_source(self):
|
||||||
|
return self._shim_src
|
||||||
|
|
||||||
|
|
||||||
def recursive_finder(name, module_fqn, data, py_module_names, py_module_cache, zf):
|
def recursive_finder(name, module_fqn, data, py_module_names, py_module_cache, zf):
|
||||||
|
@ -721,8 +751,7 @@ def recursive_finder(name, module_fqn, data, py_module_names, py_module_cache, z
|
||||||
break
|
break
|
||||||
try:
|
try:
|
||||||
# this is a collection-hosted MU; look it up with pkgutil.get_data()
|
# this is a collection-hosted MU; look it up with pkgutil.get_data()
|
||||||
module_info = CollectionModuleInfo(py_module_name[-idx],
|
module_info = CollectionModuleInfo(py_module_name[-idx], '.'.join(py_module_name[:-idx]))
|
||||||
[os.path.join(*py_module_name[:-idx])])
|
|
||||||
break
|
break
|
||||||
except ImportError:
|
except ImportError:
|
||||||
continue
|
continue
|
||||||
|
@ -740,7 +769,13 @@ def recursive_finder(name, module_fqn, data, py_module_names, py_module_cache, z
|
||||||
[os.path.join(p, *relative_module_utils_dir[:-idx]) for p in module_utils_paths])
|
[os.path.join(p, *relative_module_utils_dir[:-idx]) for p in module_utils_paths])
|
||||||
break
|
break
|
||||||
except ImportError:
|
except ImportError:
|
||||||
continue
|
# check metadata for redirect, generate stub if present
|
||||||
|
try:
|
||||||
|
module_info = InternalRedirectModuleInfo(py_module_name[-idx],
|
||||||
|
'.'.join(py_module_name[:(None if idx == 1 else -1)]))
|
||||||
|
break
|
||||||
|
except ImportError:
|
||||||
|
continue
|
||||||
else:
|
else:
|
||||||
# If we get here, it's because of a bug in ModuleDepFinder. If we get a reproducer we
|
# If we get here, it's because of a bug in ModuleDepFinder. If we get a reproducer we
|
||||||
# should then fix ModuleDepFinder
|
# should then fix ModuleDepFinder
|
||||||
|
|
|
@ -142,7 +142,7 @@ class PSModuleDepFinder(object):
|
||||||
if name in self.exec_scripts.keys():
|
if name in self.exec_scripts.keys():
|
||||||
return
|
return
|
||||||
|
|
||||||
data = pkgutil.get_data("ansible.executor.powershell", name + ".ps1")
|
data = pkgutil.get_data("ansible.executor.powershell", to_native(name + ".ps1"))
|
||||||
if data is None:
|
if data is None:
|
||||||
raise AnsibleError("Could not find executor powershell script "
|
raise AnsibleError("Could not find executor powershell script "
|
||||||
"for '%s'" % name)
|
"for '%s'" % name)
|
||||||
|
|
|
@ -27,7 +27,7 @@ from ansible.playbook.conditional import Conditional
|
||||||
from ansible.playbook.task import Task
|
from ansible.playbook.task import Task
|
||||||
from ansible.plugins.loader import become_loader, cliconf_loader, connection_loader, httpapi_loader, netconf_loader, terminal_loader
|
from ansible.plugins.loader import become_loader, cliconf_loader, connection_loader, httpapi_loader, netconf_loader, terminal_loader
|
||||||
from ansible.template import Templar
|
from ansible.template import Templar
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionLoader
|
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||||
from ansible.utils.listify import listify_lookup_plugin_terms
|
from ansible.utils.listify import listify_lookup_plugin_terms
|
||||||
from ansible.utils.unsafe_proxy import to_unsafe_text, wrap_var
|
from ansible.utils.unsafe_proxy import to_unsafe_text, wrap_var
|
||||||
from ansible.vars.clean import namespace_facts, clean_facts
|
from ansible.vars.clean import namespace_facts, clean_facts
|
||||||
|
@ -1101,7 +1101,7 @@ def start_connection(play_context, variables, task_uuid):
|
||||||
# can.
|
# can.
|
||||||
'ANSIBLE_BECOME_PLUGINS': become_loader.print_paths(),
|
'ANSIBLE_BECOME_PLUGINS': become_loader.print_paths(),
|
||||||
'ANSIBLE_CLICONF_PLUGINS': cliconf_loader.print_paths(),
|
'ANSIBLE_CLICONF_PLUGINS': cliconf_loader.print_paths(),
|
||||||
'ANSIBLE_COLLECTIONS_PATHS': os.pathsep.join(AnsibleCollectionLoader().n_collection_paths),
|
'ANSIBLE_COLLECTIONS_PATHS': to_native(os.pathsep.join(AnsibleCollectionConfig.collection_paths)),
|
||||||
'ANSIBLE_CONNECTION_PLUGINS': connection_loader.print_paths(),
|
'ANSIBLE_CONNECTION_PLUGINS': connection_loader.print_paths(),
|
||||||
'ANSIBLE_HTTPAPI_PLUGINS': httpapi_loader.print_paths(),
|
'ANSIBLE_HTTPAPI_PLUGINS': httpapi_loader.print_paths(),
|
||||||
'ANSIBLE_NETCONF_PLUGINS': netconf_loader.print_paths(),
|
'ANSIBLE_NETCONF_PLUGINS': netconf_loader.print_paths(),
|
||||||
|
|
|
@ -6,7 +6,7 @@ __metaclass__ = type
|
||||||
|
|
||||||
from ansible.module_utils.six import string_types
|
from ansible.module_utils.six import string_types
|
||||||
from ansible.playbook.attribute import FieldAttribute
|
from ansible.playbook.attribute import FieldAttribute
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionLoader
|
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||||
from ansible.template import is_template, Environment
|
from ansible.template import is_template, Environment
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ display = Display()
|
||||||
|
|
||||||
|
|
||||||
def _ensure_default_collection(collection_list=None):
|
def _ensure_default_collection(collection_list=None):
|
||||||
default_collection = AnsibleCollectionLoader().default_collection
|
default_collection = AnsibleCollectionConfig.default_collection
|
||||||
|
|
||||||
# Will be None when used as the default
|
# Will be None when used as the default
|
||||||
if collection_list is None:
|
if collection_list is None:
|
||||||
|
|
|
@ -25,7 +25,6 @@ from ansible.errors import AnsibleParserError, AnsibleUndefinedVariable, Ansible
|
||||||
from ansible.module_utils._text import to_native
|
from ansible.module_utils._text import to_native
|
||||||
from ansible.module_utils.six import string_types
|
from ansible.module_utils.six import string_types
|
||||||
from ansible.parsing.mod_args import ModuleArgsParser
|
from ansible.parsing.mod_args import ModuleArgsParser
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionLoader
|
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
display = Display()
|
display = Display()
|
||||||
|
|
|
@ -32,7 +32,7 @@ from ansible.playbook.helpers import load_list_of_blocks
|
||||||
from ansible.playbook.role.metadata import RoleMetadata
|
from ansible.playbook.role.metadata import RoleMetadata
|
||||||
from ansible.playbook.taggable import Taggable
|
from ansible.playbook.taggable import Taggable
|
||||||
from ansible.plugins.loader import add_all_plugin_dirs
|
from ansible.plugins.loader import add_all_plugin_dirs
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionLoader
|
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||||
from ansible.utils.vars import combine_vars
|
from ansible.utils.vars import combine_vars
|
||||||
|
|
||||||
|
|
||||||
|
@ -234,7 +234,7 @@ class Role(Base, Conditional, Taggable, CollectionSearch):
|
||||||
if self._role_collection: # this is a collection-hosted role
|
if self._role_collection: # this is a collection-hosted role
|
||||||
self.collections.insert(0, self._role_collection)
|
self.collections.insert(0, self._role_collection)
|
||||||
else: # this is a legacy role, but set the default collection if there is one
|
else: # this is a legacy role, but set the default collection if there is one
|
||||||
default_collection = AnsibleCollectionLoader().default_collection
|
default_collection = AnsibleCollectionConfig.default_collection
|
||||||
if default_collection:
|
if default_collection:
|
||||||
self.collections.insert(0, default_collection)
|
self.collections.insert(0, default_collection)
|
||||||
# legacy role, ensure all plugin dirs under the role are added to plugin search path
|
# legacy role, ensure all plugin dirs under the role are added to plugin search path
|
||||||
|
|
|
@ -31,7 +31,8 @@ from ansible.playbook.collectionsearch import CollectionSearch
|
||||||
from ansible.playbook.conditional import Conditional
|
from ansible.playbook.conditional import Conditional
|
||||||
from ansible.playbook.taggable import Taggable
|
from ansible.playbook.taggable import Taggable
|
||||||
from ansible.template import Templar
|
from ansible.template import Templar
|
||||||
from ansible.utils.collection_loader import get_collection_role_path, AnsibleCollectionRef
|
from ansible.utils.collection_loader import AnsibleCollectionRef
|
||||||
|
from ansible.utils.collection_loader._collection_finder import _get_collection_role_path
|
||||||
from ansible.utils.path import unfrackpath
|
from ansible.utils.path import unfrackpath
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
|
||||||
|
@ -155,7 +156,7 @@ class RoleDefinition(Base, Conditional, Taggable, CollectionSearch):
|
||||||
|
|
||||||
# try to load as a collection-based role first
|
# try to load as a collection-based role first
|
||||||
if self._collection_list or AnsibleCollectionRef.is_valid_fqcr(role_name):
|
if self._collection_list or AnsibleCollectionRef.is_valid_fqcr(role_name):
|
||||||
role_tuple = get_collection_role_path(role_name, self._collection_list)
|
role_tuple = _get_collection_role_path(role_name, self._collection_list)
|
||||||
|
|
||||||
if role_tuple:
|
if role_tuple:
|
||||||
# we found it, stash collection data and return the name/path tuple
|
# we found it, stash collection data and return the name/path tuple
|
||||||
|
|
|
@ -36,7 +36,7 @@ from ansible.playbook.conditional import Conditional
|
||||||
from ansible.playbook.loop_control import LoopControl
|
from ansible.playbook.loop_control import LoopControl
|
||||||
from ansible.playbook.role import Role
|
from ansible.playbook.role import Role
|
||||||
from ansible.playbook.taggable import Taggable
|
from ansible.playbook.taggable import Taggable
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionLoader
|
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
from ansible.utils.sentinel import Sentinel
|
from ansible.utils.sentinel import Sentinel
|
||||||
|
|
||||||
|
@ -182,7 +182,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
|
||||||
new_ds.ansible_pos = ds.ansible_pos
|
new_ds.ansible_pos = ds.ansible_pos
|
||||||
|
|
||||||
# since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator
|
# since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator
|
||||||
default_collection = AnsibleCollectionLoader().default_collection
|
default_collection = AnsibleCollectionConfig.default_collection
|
||||||
|
|
||||||
collections_list = ds.get('collections')
|
collections_list = ds.get('collections')
|
||||||
if collections_list is None:
|
if collections_list is None:
|
||||||
|
@ -293,7 +293,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
|
||||||
if self._parent:
|
if self._parent:
|
||||||
self._parent.post_validate(templar)
|
self._parent.post_validate(templar)
|
||||||
|
|
||||||
if AnsibleCollectionLoader().default_collection:
|
if AnsibleCollectionConfig.default_collection:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
super(Task, self).post_validate(templar)
|
super(Task, self).post_validate(templar)
|
||||||
|
|
|
@ -216,10 +216,13 @@ class BaseInventoryPlugin(AnsiblePlugin):
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise AnsibleParserError(to_native(e))
|
raise AnsibleParserError(to_native(e))
|
||||||
|
|
||||||
|
# a plugin can be loaded via many different names with redirection- if so, we want to accept any of those names
|
||||||
|
valid_names = getattr(self, '_redirected_names') or [self.NAME]
|
||||||
|
|
||||||
if not config:
|
if not config:
|
||||||
# no data
|
# no data
|
||||||
raise AnsibleParserError("%s is empty" % (to_native(path)))
|
raise AnsibleParserError("%s is empty" % (to_native(path)))
|
||||||
elif config.get('plugin') != self.NAME:
|
elif config.get('plugin') not in valid_names:
|
||||||
# this is not my config file
|
# this is not my config file
|
||||||
raise AnsibleParserError("Incorrect plugin name in file: %s" % config.get('plugin', 'none found'))
|
raise AnsibleParserError("Incorrect plugin name in file: %s" % config.get('plugin', 'none found'))
|
||||||
elif not isinstance(config, Mapping):
|
elif not isinstance(config, Mapping):
|
||||||
|
|
|
@ -16,25 +16,45 @@ import warnings
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from ansible import constants as C
|
from ansible import constants as C
|
||||||
from ansible.errors import AnsibleError
|
from ansible.errors import AnsibleError, AnsiblePluginCircularRedirect, AnsiblePluginRemoved, AnsibleCollectionUnsupportedVersionError
|
||||||
from ansible.module_utils._text import to_bytes, to_text, to_native
|
from ansible.module_utils._text import to_bytes, to_text, to_native
|
||||||
from ansible.module_utils.compat.importlib import import_module
|
from ansible.module_utils.compat.importlib import import_module
|
||||||
from ansible.module_utils.six import string_types
|
from ansible.module_utils.six import string_types
|
||||||
from ansible.parsing.utils.yaml import from_yaml
|
from ansible.parsing.utils.yaml import from_yaml
|
||||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
from ansible.parsing.yaml.loader import AnsibleLoader
|
||||||
from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE
|
from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionLoader, AnsibleFlatMapLoader, AnsibleCollectionRef
|
from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleCollectionRef
|
||||||
|
from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder, _get_collection_metadata
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
from ansible.utils.plugin_docs import add_fragments
|
from ansible.utils.plugin_docs import add_fragments
|
||||||
|
from ansible import __version__ as ansible_version
|
||||||
|
|
||||||
|
# TODO: take the packaging dep, or vendor SpecifierSet?
|
||||||
|
|
||||||
|
try:
|
||||||
|
from packaging.specifiers import SpecifierSet
|
||||||
|
from packaging.version import Version
|
||||||
|
except ImportError:
|
||||||
|
SpecifierSet = None
|
||||||
|
Version = None
|
||||||
|
|
||||||
|
try:
|
||||||
|
# use C version if possible for speedup
|
||||||
|
from yaml import CSafeLoader as SafeLoader
|
||||||
|
except ImportError:
|
||||||
|
from yaml import SafeLoader
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import importlib.util
|
import importlib.util
|
||||||
imp = None
|
imp = None
|
||||||
except ImportError:
|
except ImportError:
|
||||||
import imp
|
import imp
|
||||||
|
ModuleNotFoundError = None
|
||||||
|
|
||||||
display = Display()
|
display = Display()
|
||||||
|
|
||||||
|
_tombstones = None
|
||||||
|
|
||||||
|
|
||||||
def get_all_plugin_loaders():
|
def get_all_plugin_loaders():
|
||||||
return [(name, obj) for (name, obj) in globals().items() if isinstance(obj, PluginLoader)]
|
return [(name, obj) for (name, obj) in globals().items() if isinstance(obj, PluginLoader)]
|
||||||
|
@ -93,6 +113,61 @@ def add_dirs_to_loader(which_loader, paths):
|
||||||
loader.add_directory(path, with_subdir=True)
|
loader.add_directory(path, with_subdir=True)
|
||||||
|
|
||||||
|
|
||||||
|
class PluginLoadContext(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.original_name = None
|
||||||
|
self.redirect_list = []
|
||||||
|
self.error_list = []
|
||||||
|
self.import_error_list = []
|
||||||
|
self.load_attempts = []
|
||||||
|
self.pending_redirect = None
|
||||||
|
self.exit_reason = None
|
||||||
|
self.plugin_resolved_path = None
|
||||||
|
self.plugin_resolved_name = None
|
||||||
|
self.deprecated = False
|
||||||
|
self.removal_date = None
|
||||||
|
self.deprecation_warnings = []
|
||||||
|
self.resolved = False
|
||||||
|
|
||||||
|
def record_deprecation(self, name, deprecation):
|
||||||
|
if not deprecation:
|
||||||
|
return self
|
||||||
|
|
||||||
|
warning_text = deprecation.get('warning_text', None)
|
||||||
|
removal_date = deprecation.get('removal_date', None)
|
||||||
|
if not warning_text:
|
||||||
|
if removal_date:
|
||||||
|
warning_text = '{0} has been deprecated and will be removed in a release after {1}'.format(name, removal_date)
|
||||||
|
else:
|
||||||
|
warning_text = '{0} has been deprecated and will be removed in a future release'.format(name)
|
||||||
|
|
||||||
|
self.deprecated = True
|
||||||
|
if removal_date:
|
||||||
|
self.removal_date = removal_date
|
||||||
|
self.deprecation_warnings.append(warning_text)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def resolve(self, resolved_name, resolved_path, exit_reason):
|
||||||
|
self.pending_redirect = None
|
||||||
|
self.plugin_resolved_name = resolved_name
|
||||||
|
self.plugin_resolved_path = resolved_path
|
||||||
|
self.exit_reason = exit_reason
|
||||||
|
self.resolved = True
|
||||||
|
return self
|
||||||
|
|
||||||
|
def redirect(self, redirect_name):
|
||||||
|
self.pending_redirect = redirect_name
|
||||||
|
self.exit_reason = 'pending redirect resolution from {0} to {1}'.format(self.original_name, redirect_name)
|
||||||
|
self.resolved = False
|
||||||
|
return self
|
||||||
|
|
||||||
|
def nope(self, exit_reason):
|
||||||
|
self.pending_redirect = None
|
||||||
|
self.exit_reason = exit_reason
|
||||||
|
self.resolved = False
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
class PluginLoader:
|
class PluginLoader:
|
||||||
'''
|
'''
|
||||||
PluginLoader loads plugins from the configured plugin directories.
|
PluginLoader loads plugins from the configured plugin directories.
|
||||||
|
@ -136,6 +211,9 @@ class PluginLoader:
|
||||||
|
|
||||||
self._searched_paths = set()
|
self._searched_paths = set()
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'PluginLoader(type={0})'.format(AnsibleCollectionRef.legacy_plugin_dir_to_plugin_type(self.subdir))
|
||||||
|
|
||||||
def _clear_caches(self):
|
def _clear_caches(self):
|
||||||
|
|
||||||
if C.OLD_PLUGIN_CACHE_CLEARING:
|
if C.OLD_PLUGIN_CACHE_CLEARING:
|
||||||
|
@ -307,15 +385,80 @@ class PluginLoader:
|
||||||
self._clear_caches()
|
self._clear_caches()
|
||||||
display.debug('Added %s to loader search path' % (directory))
|
display.debug('Added %s to loader search path' % (directory))
|
||||||
|
|
||||||
def _find_fq_plugin(self, fq_name, extension):
|
def _query_collection_routing_meta(self, acr, plugin_type, extension=None):
|
||||||
|
collection_pkg = import_module(acr.n_python_collection_package_name)
|
||||||
|
if not collection_pkg:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# FIXME: shouldn't need this...
|
||||||
|
try:
|
||||||
|
# force any type-specific metadata postprocessing to occur
|
||||||
|
import_module(acr.n_python_collection_package_name + '.plugins.{0}'.format(plugin_type))
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# this will be created by the collection PEP302 loader
|
||||||
|
collection_meta = getattr(collection_pkg, '_collection_meta', None)
|
||||||
|
|
||||||
|
if not collection_meta:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# TODO: add subdirs support
|
||||||
|
# check for extension-specific entry first (eg 'setup.ps1')
|
||||||
|
# TODO: str/bytes on extension/name munging
|
||||||
|
if acr.subdirs:
|
||||||
|
subdir_qualified_resource = '.'.join([acr.subdirs, acr.resource])
|
||||||
|
else:
|
||||||
|
subdir_qualified_resource = acr.resource
|
||||||
|
entry = collection_meta.get('plugin_routing', {}).get(plugin_type, {}).get(subdir_qualified_resource + extension, None)
|
||||||
|
if not entry:
|
||||||
|
# try for extension-agnostic entry
|
||||||
|
entry = collection_meta.get('plugin_routing', {}).get(plugin_type, {}).get(subdir_qualified_resource, None)
|
||||||
|
return entry
|
||||||
|
|
||||||
|
def _find_fq_plugin(self, fq_name, extension, plugin_load_context):
|
||||||
"""Search builtin paths to find a plugin. No external paths are searched,
|
"""Search builtin paths to find a plugin. No external paths are searched,
|
||||||
meaning plugins inside roles inside collections will be ignored.
|
meaning plugins inside roles inside collections will be ignored.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
plugin_load_context.resolved = False
|
||||||
|
|
||||||
plugin_type = AnsibleCollectionRef.legacy_plugin_dir_to_plugin_type(self.subdir)
|
plugin_type = AnsibleCollectionRef.legacy_plugin_dir_to_plugin_type(self.subdir)
|
||||||
|
|
||||||
acr = AnsibleCollectionRef.from_fqcr(fq_name, plugin_type)
|
acr = AnsibleCollectionRef.from_fqcr(fq_name, plugin_type)
|
||||||
|
|
||||||
|
# check collection metadata to see if any special handling is required for this plugin
|
||||||
|
routing_metadata = self._query_collection_routing_meta(acr, plugin_type, extension=extension)
|
||||||
|
|
||||||
|
# TODO: factor this into a wrapper method
|
||||||
|
if routing_metadata:
|
||||||
|
deprecation = routing_metadata.get('deprecation', None)
|
||||||
|
|
||||||
|
# this will no-op if there's no deprecation metadata for this plugin
|
||||||
|
plugin_load_context.record_deprecation(fq_name, deprecation)
|
||||||
|
|
||||||
|
tombstone = routing_metadata.get('tombstone', None)
|
||||||
|
|
||||||
|
if tombstone:
|
||||||
|
redirect = tombstone.get('redirect', None)
|
||||||
|
removal_date = tombstone.get('removal_date')
|
||||||
|
if removal_date:
|
||||||
|
removed_msg = '{0} was removed on {1}'.format(fq_name, removal_date)
|
||||||
|
else:
|
||||||
|
removed_msg = '{0} was removed in a previous release'.format(fq_name)
|
||||||
|
plugin_load_context.removal_date = removal_date
|
||||||
|
plugin_load_context.resolved = True
|
||||||
|
plugin_load_context.exit_reason = removed_msg
|
||||||
|
return plugin_load_context
|
||||||
|
|
||||||
|
redirect = routing_metadata.get('redirect', None)
|
||||||
|
|
||||||
|
if redirect:
|
||||||
|
# FIXME: remove once this is covered in debug or whatever
|
||||||
|
display.vv("redirecting (type: {0}) {1} to {2}".format(plugin_type, fq_name, redirect))
|
||||||
|
return plugin_load_context.redirect(redirect)
|
||||||
|
# TODO: non-FQCN case, do we support `.` prefix for current collection, assume it with no dots, require it for subdirs in current, or ?
|
||||||
|
|
||||||
n_resource = to_native(acr.resource, errors='strict')
|
n_resource = to_native(acr.resource, errors='strict')
|
||||||
# we want this before the extension is added
|
# we want this before the extension is added
|
||||||
full_name = '{0}.{1}'.format(acr.n_python_package_name, n_resource)
|
full_name = '{0}.{1}'.format(acr.n_python_package_name, n_resource)
|
||||||
|
@ -326,17 +469,10 @@ class PluginLoader:
|
||||||
pkg = sys.modules.get(acr.n_python_package_name)
|
pkg = sys.modules.get(acr.n_python_package_name)
|
||||||
if not pkg:
|
if not pkg:
|
||||||
# FIXME: there must be cheaper/safer way to do this
|
# FIXME: there must be cheaper/safer way to do this
|
||||||
pkg = import_module(acr.n_python_package_name)
|
|
||||||
|
|
||||||
# if the package is one of our flatmaps, we need to consult its loader to find the path, since the file could be
|
|
||||||
# anywhere in the tree
|
|
||||||
if hasattr(pkg, '__loader__') and isinstance(pkg.__loader__, AnsibleFlatMapLoader):
|
|
||||||
try:
|
try:
|
||||||
file_path = pkg.__loader__.find_file(n_resource)
|
pkg = import_module(acr.n_python_package_name)
|
||||||
return full_name, to_text(file_path)
|
except (ImportError, ModuleNotFoundError):
|
||||||
except IOError:
|
return plugin_load_context.nope('Python package {0} not found'.format(acr.n_python_package_name))
|
||||||
# this loader already takes care of extensionless files, so if we didn't find it, just bail
|
|
||||||
return None, None
|
|
||||||
|
|
||||||
pkg_path = os.path.dirname(pkg.__file__)
|
pkg_path = os.path.dirname(pkg.__file__)
|
||||||
|
|
||||||
|
@ -344,7 +480,11 @@ class PluginLoader:
|
||||||
|
|
||||||
# FIXME: and is file or file link or ...
|
# FIXME: and is file or file link or ...
|
||||||
if os.path.exists(n_resource_path):
|
if os.path.exists(n_resource_path):
|
||||||
return full_name, to_text(n_resource_path)
|
return plugin_load_context.resolve(full_name, to_text(n_resource_path), 'found exact match for {0} in {1}'.format(full_name, acr.collection))
|
||||||
|
|
||||||
|
if extension:
|
||||||
|
# the request was extension-specific, don't try for an extensionless match
|
||||||
|
return plugin_load_context.nope('no match for {0} in {1}'.format(to_text(n_resource), acr.collection))
|
||||||
|
|
||||||
# look for any matching extension in the package location (sans filter)
|
# look for any matching extension in the package location (sans filter)
|
||||||
found_files = [f
|
found_files = [f
|
||||||
|
@ -352,24 +492,63 @@ class PluginLoader:
|
||||||
if os.path.isfile(f) and not f.endswith(C.MODULE_IGNORE_EXTS)]
|
if os.path.isfile(f) and not f.endswith(C.MODULE_IGNORE_EXTS)]
|
||||||
|
|
||||||
if not found_files:
|
if not found_files:
|
||||||
return None, None
|
return plugin_load_context.nope('failed fuzzy extension match for {0} in {1}'.format(full_name, acr.collection))
|
||||||
|
|
||||||
if len(found_files) > 1:
|
if len(found_files) > 1:
|
||||||
# TODO: warn?
|
# TODO: warn?
|
||||||
pass
|
pass
|
||||||
|
|
||||||
return full_name, to_text(found_files[0])
|
return plugin_load_context.resolve(full_name, to_text(found_files[0]), 'found fuzzy extension match for {0} in {1}'.format(full_name, acr.collection))
|
||||||
|
|
||||||
def find_plugin(self, name, mod_type='', ignore_deprecated=False, check_aliases=False, collection_list=None):
|
def find_plugin(self, name, mod_type='', ignore_deprecated=False, check_aliases=False, collection_list=None):
|
||||||
''' Find a plugin named name '''
|
''' Find a plugin named name '''
|
||||||
return self.find_plugin_with_name(name, mod_type, ignore_deprecated, check_aliases, collection_list)[1]
|
result = self.find_plugin_with_context(name, mod_type, ignore_deprecated, check_aliases, collection_list)
|
||||||
|
if result.resolved and result.plugin_resolved_path:
|
||||||
|
return result.plugin_resolved_path
|
||||||
|
|
||||||
def find_plugin_with_name(self, name, mod_type='', ignore_deprecated=False, check_aliases=False, collection_list=None):
|
return None
|
||||||
''' Find a plugin named name '''
|
|
||||||
|
def find_plugin_with_context(self, name, mod_type='', ignore_deprecated=False, check_aliases=False, collection_list=None):
|
||||||
|
''' Find a plugin named name, returning contextual info about the load, recursively resolving redirection '''
|
||||||
|
plugin_load_context = PluginLoadContext()
|
||||||
|
plugin_load_context.original_name = name
|
||||||
|
while True:
|
||||||
|
result = self._resolve_plugin_step(name, mod_type, ignore_deprecated, check_aliases, collection_list, plugin_load_context=plugin_load_context)
|
||||||
|
if result.pending_redirect:
|
||||||
|
if result.pending_redirect in result.redirect_list:
|
||||||
|
raise AnsiblePluginCircularRedirect('plugin redirect loop resolving {0} (path: {1})'.format(result.original_name, result.redirect_list))
|
||||||
|
name = result.pending_redirect
|
||||||
|
result.pending_redirect = None
|
||||||
|
plugin_load_context = result
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
# TODO: smuggle these to the controller when we're in a worker, reduce noise from normal things like missing plugin packages during collection search
|
||||||
|
if plugin_load_context.error_list:
|
||||||
|
display.warning("errors were encountered during the plugin load for {0}:\n{1}".format(name, plugin_load_context.error_list))
|
||||||
|
|
||||||
|
# TODO: display/return import_error_list? Only useful for forensics...
|
||||||
|
|
||||||
|
if plugin_load_context.deprecated and C.config.get_config_value('DEPRECATION_WARNINGS'):
|
||||||
|
for dw in plugin_load_context.deprecation_warnings:
|
||||||
|
# TODO: need to smuggle these to the controller if we're in a worker context
|
||||||
|
display.warning('[DEPRECATION WARNING] ' + dw)
|
||||||
|
|
||||||
|
return plugin_load_context
|
||||||
|
|
||||||
|
# FIXME: name bikeshed
|
||||||
|
def _resolve_plugin_step(self, name, mod_type='', ignore_deprecated=False,
|
||||||
|
check_aliases=False, collection_list=None, plugin_load_context=PluginLoadContext()):
|
||||||
|
if not plugin_load_context:
|
||||||
|
raise ValueError('A PluginLoadContext is required')
|
||||||
|
|
||||||
|
plugin_load_context.redirect_list.append(name)
|
||||||
|
plugin_load_context.resolved = False
|
||||||
|
|
||||||
global _PLUGIN_FILTERS
|
global _PLUGIN_FILTERS
|
||||||
if name in _PLUGIN_FILTERS[self.package]:
|
if name in _PLUGIN_FILTERS[self.package]:
|
||||||
return None, None
|
plugin_load_context.exit_reason = '{0} matched a defined plugin filter'.format(name)
|
||||||
|
return plugin_load_context
|
||||||
|
|
||||||
if mod_type:
|
if mod_type:
|
||||||
suffix = mod_type
|
suffix = mod_type
|
||||||
|
@ -387,37 +566,46 @@ class PluginLoader:
|
||||||
candidates = [name]
|
candidates = [name]
|
||||||
else:
|
else:
|
||||||
candidates = ['{0}.{1}'.format(c, name) for c in collection_list]
|
candidates = ['{0}.{1}'.format(c, name) for c in collection_list]
|
||||||
# TODO: keep actual errors, not just assembled messages
|
|
||||||
errors = []
|
|
||||||
for candidate_name in candidates:
|
for candidate_name in candidates:
|
||||||
try:
|
try:
|
||||||
|
plugin_load_context.load_attempts.append(candidate_name)
|
||||||
# HACK: refactor this properly
|
# HACK: refactor this properly
|
||||||
if candidate_name.startswith('ansible.legacy'):
|
if candidate_name.startswith('ansible.legacy'):
|
||||||
# 'ansible.legacy' refers to the plugin finding behavior used before collections existed.
|
# 'ansible.legacy' refers to the plugin finding behavior used before collections existed.
|
||||||
# They need to search 'library' and the various '*_plugins' directories in order to find the file.
|
# They need to search 'library' and the various '*_plugins' directories in order to find the file.
|
||||||
full_name = name
|
plugin_load_context = self._find_plugin_legacy(name.replace('ansible.legacy.', '', 1),
|
||||||
p = self._find_plugin_legacy(name.replace('ansible.legacy.', '', 1), ignore_deprecated, check_aliases, suffix)
|
plugin_load_context, ignore_deprecated, check_aliases, suffix)
|
||||||
else:
|
else:
|
||||||
# 'ansible.builtin' should be handled here. This means only internal, or builtin, paths are searched.
|
# 'ansible.builtin' should be handled here. This means only internal, or builtin, paths are searched.
|
||||||
full_name, p = self._find_fq_plugin(candidate_name, suffix)
|
plugin_load_context = self._find_fq_plugin(candidate_name, suffix, plugin_load_context=plugin_load_context)
|
||||||
if p:
|
if plugin_load_context.resolved or plugin_load_context.pending_redirect: # if we got an answer or need to chase down a redirect, return
|
||||||
return full_name, p
|
return plugin_load_context
|
||||||
|
except (AnsiblePluginRemoved, AnsiblePluginCircularRedirect, AnsibleCollectionUnsupportedVersionError):
|
||||||
|
# these are generally fatal, let them fly
|
||||||
|
raise
|
||||||
|
except ImportError as ie:
|
||||||
|
plugin_load_context.import_error_list.append(ie)
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
errors.append(to_native(ex))
|
# FIXME: keep actual errors, not just assembled messages
|
||||||
|
plugin_load_context.error_list.append(to_native(ex))
|
||||||
|
|
||||||
if errors:
|
if plugin_load_context.error_list:
|
||||||
display.debug(msg='plugin lookup for {0} failed; errors: {1}'.format(name, '; '.join(errors)))
|
display.debug(msg='plugin lookup for {0} failed; errors: {1}'.format(name, '; '.join(plugin_load_context.error_list)))
|
||||||
|
|
||||||
return None, None
|
plugin_load_context.exit_reason = 'no matches found for {0}'.format(name)
|
||||||
|
|
||||||
|
return plugin_load_context
|
||||||
|
|
||||||
# if we got here, there's no collection list and it's not an FQ name, so do legacy lookup
|
# if we got here, there's no collection list and it's not an FQ name, so do legacy lookup
|
||||||
|
|
||||||
return name, self._find_plugin_legacy(name, ignore_deprecated, check_aliases, suffix)
|
return self._find_plugin_legacy(name, plugin_load_context, ignore_deprecated, check_aliases, suffix)
|
||||||
|
|
||||||
def _find_plugin_legacy(self, name, ignore_deprecated=False, check_aliases=False, suffix=None):
|
def _find_plugin_legacy(self, name, plugin_load_context, ignore_deprecated=False, check_aliases=False, suffix=None):
|
||||||
"""Search library and various *_plugins paths in order to find the file.
|
"""Search library and various *_plugins paths in order to find the file.
|
||||||
This was behavior prior to the existence of collections.
|
This was behavior prior to the existence of collections.
|
||||||
"""
|
"""
|
||||||
|
plugin_load_context.resolved = False
|
||||||
|
|
||||||
if check_aliases:
|
if check_aliases:
|
||||||
name = self.aliases.get(name, name)
|
name = self.aliases.get(name, name)
|
||||||
|
@ -426,7 +614,10 @@ class PluginLoader:
|
||||||
# requested mod_type
|
# requested mod_type
|
||||||
pull_cache = self._plugin_path_cache[suffix]
|
pull_cache = self._plugin_path_cache[suffix]
|
||||||
try:
|
try:
|
||||||
return pull_cache[name]
|
plugin_load_context.plugin_resolved_path = pull_cache[name]
|
||||||
|
plugin_load_context.plugin_resolved_name = name
|
||||||
|
plugin_load_context.resolved = True
|
||||||
|
return plugin_load_context
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# Cache miss. Now let's find the plugin
|
# Cache miss. Now let's find the plugin
|
||||||
pass
|
pass
|
||||||
|
@ -438,6 +629,7 @@ class PluginLoader:
|
||||||
# We can use _get_paths() since add_directory() forces a cache refresh.
|
# We can use _get_paths() since add_directory() forces a cache refresh.
|
||||||
for path in (p for p in self._get_paths() if p not in self._searched_paths and os.path.isdir(p)):
|
for path in (p for p in self._get_paths() if p not in self._searched_paths and os.path.isdir(p)):
|
||||||
display.debug('trying %s' % path)
|
display.debug('trying %s' % path)
|
||||||
|
plugin_load_context.load_attempts.append(path)
|
||||||
try:
|
try:
|
||||||
full_paths = (os.path.join(path, f) for f in os.listdir(path))
|
full_paths = (os.path.join(path, f) for f in os.listdir(path))
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
|
@ -474,7 +666,10 @@ class PluginLoader:
|
||||||
|
|
||||||
self._searched_paths.add(path)
|
self._searched_paths.add(path)
|
||||||
try:
|
try:
|
||||||
return pull_cache[name]
|
plugin_load_context.plugin_resolved_path = pull_cache[name]
|
||||||
|
plugin_load_context.plugin_resolved_name = name
|
||||||
|
plugin_load_context.resolved = True
|
||||||
|
return plugin_load_context
|
||||||
except KeyError:
|
except KeyError:
|
||||||
# Didn't find the plugin in this directory. Load modules from the next one
|
# Didn't find the plugin in this directory. Load modules from the next one
|
||||||
pass
|
pass
|
||||||
|
@ -488,9 +683,17 @@ class PluginLoader:
|
||||||
# FIXME: this is not always the case, some are just aliases
|
# FIXME: this is not always the case, some are just aliases
|
||||||
display.deprecated('%s is kept for backwards compatibility but usage is discouraged. ' # pylint: disable=ansible-deprecated-no-version
|
display.deprecated('%s is kept for backwards compatibility but usage is discouraged. ' # pylint: disable=ansible-deprecated-no-version
|
||||||
'The module documentation details page may explain more about this rationale.' % name.lstrip('_'))
|
'The module documentation details page may explain more about this rationale.' % name.lstrip('_'))
|
||||||
return pull_cache[alias_name]
|
plugin_load_context.plugin_resolved_path = pull_cache[alias_name]
|
||||||
|
plugin_load_context.plugin_resolved_name = alias_name
|
||||||
|
plugin_load_context.resolved = True
|
||||||
|
return plugin_load_context
|
||||||
|
|
||||||
return None
|
# last ditch, if it's something that can be redirected, look for a builtin redirect before giving up
|
||||||
|
candidate_fqcr = 'ansible.builtin.{0}'.format(name)
|
||||||
|
if '.' not in name and AnsibleCollectionRef.is_valid_fqcr(candidate_fqcr):
|
||||||
|
return self._find_fq_plugin(fq_name=candidate_fqcr, extension=suffix, plugin_load_context=plugin_load_context)
|
||||||
|
|
||||||
|
return plugin_load_context.nope('{0} is not eligible for last-chance resolution'.format(name))
|
||||||
|
|
||||||
def has_plugin(self, name, collection_list=None):
|
def has_plugin(self, name, collection_list=None):
|
||||||
''' Checks if a plugin named name exists '''
|
''' Checks if a plugin named name exists '''
|
||||||
|
@ -530,11 +733,12 @@ class PluginLoader:
|
||||||
module = imp.load_source(to_native(full_name), to_native(path), module_file)
|
module = imp.load_source(to_native(full_name), to_native(path), module_file)
|
||||||
return module
|
return module
|
||||||
|
|
||||||
def _update_object(self, obj, name, path):
|
def _update_object(self, obj, name, path, redirected_names=None):
|
||||||
|
|
||||||
# set extra info on the module, in case we want it later
|
# set extra info on the module, in case we want it later
|
||||||
setattr(obj, '_original_path', path)
|
setattr(obj, '_original_path', path)
|
||||||
setattr(obj, '_load_name', name)
|
setattr(obj, '_load_name', name)
|
||||||
|
setattr(obj, '_redirected_names', redirected_names or [])
|
||||||
|
|
||||||
def get(self, name, *args, **kwargs):
|
def get(self, name, *args, **kwargs):
|
||||||
''' instantiates a plugin of the given name using arguments '''
|
''' instantiates a plugin of the given name using arguments '''
|
||||||
|
@ -544,10 +748,15 @@ class PluginLoader:
|
||||||
collection_list = kwargs.pop('collection_list', None)
|
collection_list = kwargs.pop('collection_list', None)
|
||||||
if name in self.aliases:
|
if name in self.aliases:
|
||||||
name = self.aliases[name]
|
name = self.aliases[name]
|
||||||
name, path = self.find_plugin_with_name(name, collection_list=collection_list)
|
plugin_load_context = self.find_plugin_with_context(name, collection_list=collection_list)
|
||||||
if path is None:
|
if not plugin_load_context.resolved or not plugin_load_context.plugin_resolved_path:
|
||||||
|
# FIXME: this is probably an error (eg removed plugin)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
name = plugin_load_context.plugin_resolved_name
|
||||||
|
path = plugin_load_context.plugin_resolved_path
|
||||||
|
redirected_names = plugin_load_context.redirect_list or []
|
||||||
|
|
||||||
if path not in self._module_cache:
|
if path not in self._module_cache:
|
||||||
self._module_cache[path] = self._load_module_source(name, path)
|
self._module_cache[path] = self._load_module_source(name, path)
|
||||||
self._load_config_defs(name, self._module_cache[path], path)
|
self._load_config_defs(name, self._module_cache[path], path)
|
||||||
|
@ -566,6 +775,7 @@ class PluginLoader:
|
||||||
if not issubclass(obj, plugin_class):
|
if not issubclass(obj, plugin_class):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
# FIXME: update this to use the load context
|
||||||
self._display_plugin_load(self.class_name, name, self._searched_paths, path, found_in_cache=found_in_cache, class_only=class_only)
|
self._display_plugin_load(self.class_name, name, self._searched_paths, path, found_in_cache=found_in_cache, class_only=class_only)
|
||||||
|
|
||||||
if not class_only:
|
if not class_only:
|
||||||
|
@ -573,7 +783,7 @@ class PluginLoader:
|
||||||
# A plugin may need to use its _load_name in __init__ (for example, to set
|
# A plugin may need to use its _load_name in __init__ (for example, to set
|
||||||
# or get options from config), so update the object before using the constructor
|
# or get options from config), so update the object before using the constructor
|
||||||
instance = object.__new__(obj)
|
instance = object.__new__(obj)
|
||||||
self._update_object(instance, name, path)
|
self._update_object(instance, name, path, redirected_names)
|
||||||
obj.__init__(instance, *args, **kwargs)
|
obj.__init__(instance, *args, **kwargs)
|
||||||
obj = instance
|
obj = instance
|
||||||
except TypeError as e:
|
except TypeError as e:
|
||||||
|
@ -583,7 +793,7 @@ class PluginLoader:
|
||||||
return None
|
return None
|
||||||
raise
|
raise
|
||||||
|
|
||||||
self._update_object(obj, name, path)
|
self._update_object(obj, name, path, redirected_names)
|
||||||
return obj
|
return obj
|
||||||
|
|
||||||
def _display_plugin_load(self, class_name, name, searched_paths, path, found_in_cache=None, class_only=None):
|
def _display_plugin_load(self, class_name, name, searched_paths, path, found_in_cache=None, class_only=None):
|
||||||
|
@ -818,9 +1028,52 @@ def _load_plugin_filter():
|
||||||
return filters
|
return filters
|
||||||
|
|
||||||
|
|
||||||
|
# since we don't want the actual collection loader understanding metadata, we'll do it in an event handler
|
||||||
|
def _on_collection_load_handler(collection_name, collection_path):
|
||||||
|
display.vvvv(to_text('Loading collection {0} from {1}'.format(collection_name, collection_path)))
|
||||||
|
|
||||||
|
collection_meta = _get_collection_metadata(collection_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not _does_collection_support_ansible_version(collection_meta.get('requires_ansible', ''), ansible_version):
|
||||||
|
mismatch_behavior = C.config.get_config_value('COLLECTIONS_ON_ANSIBLE_VERSION_MISMATCH')
|
||||||
|
message = 'Collection {0} does not support Ansible version {1}'.format(collection_name, ansible_version)
|
||||||
|
if mismatch_behavior == 'warning':
|
||||||
|
display.warning(message)
|
||||||
|
elif mismatch_behavior == 'error':
|
||||||
|
raise AnsibleCollectionUnsupportedVersionError(message)
|
||||||
|
except AnsibleError:
|
||||||
|
raise
|
||||||
|
except Exception as ex:
|
||||||
|
display.warning('Error parsing collection metadata requires_ansible value from collection {0}: {1}'.format(collection_name, ex))
|
||||||
|
|
||||||
|
|
||||||
|
def _does_collection_support_ansible_version(requirement_string, ansible_version):
|
||||||
|
if not requirement_string:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if not SpecifierSet:
|
||||||
|
display.warning('packaging Python module unavailable; unable to validate collection Ansible version requirements')
|
||||||
|
return True
|
||||||
|
|
||||||
|
ss = SpecifierSet(requirement_string)
|
||||||
|
|
||||||
|
# ignore prerelease/postrelease/beta/dev flags for simplicity
|
||||||
|
base_ansible_version = Version(ansible_version).base_version
|
||||||
|
|
||||||
|
return ss.contains(base_ansible_version)
|
||||||
|
|
||||||
|
|
||||||
def _configure_collection_loader():
|
def _configure_collection_loader():
|
||||||
if not any((isinstance(l, AnsibleCollectionLoader) for l in sys.meta_path)):
|
if AnsibleCollectionConfig.collection_finder:
|
||||||
sys.meta_path.insert(0, AnsibleCollectionLoader(C.config))
|
display.warning('AnsibleCollectionFinder has already been configured')
|
||||||
|
return
|
||||||
|
|
||||||
|
finder = _AnsibleCollectionFinder(C.config.get_config_value('COLLECTIONS_PATHS'), C.config.get_config_value('COLLECTIONS_SCAN_SYS_PATH'))
|
||||||
|
finder._install()
|
||||||
|
|
||||||
|
# this should succeed now
|
||||||
|
AnsibleCollectionConfig.on_collection_load += _on_collection_load_handler
|
||||||
|
|
||||||
|
|
||||||
# TODO: All of the following is initialization code It should be moved inside of an initialization
|
# TODO: All of the following is initialization code It should be moved inside of an initialization
|
||||||
|
|
|
@ -240,17 +240,6 @@ class StrategyModule(StrategyBase):
|
||||||
run_once = False
|
run_once = False
|
||||||
work_to_do = True
|
work_to_do = True
|
||||||
|
|
||||||
# test to see if the task across all hosts points to an action plugin which
|
|
||||||
# sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
|
|
||||||
# will only send this task to the first host in the list.
|
|
||||||
|
|
||||||
try:
|
|
||||||
action = action_loader.get(task.action, class_only=True)
|
|
||||||
except KeyError:
|
|
||||||
# we don't care here, because the action may simply not have a
|
|
||||||
# corresponding action plugin
|
|
||||||
action = None
|
|
||||||
|
|
||||||
# check to see if this task should be skipped, due to it being a member of a
|
# check to see if this task should be skipped, due to it being a member of a
|
||||||
# role which has already run (and whether that role allows duplicate execution)
|
# role which has already run (and whether that role allows duplicate execution)
|
||||||
if task._role and task._role.has_run(host):
|
if task._role and task._role.has_run(host):
|
||||||
|
@ -260,6 +249,26 @@ class StrategyModule(StrategyBase):
|
||||||
display.debug("'%s' skipped because role has already run" % task)
|
display.debug("'%s' skipped because role has already run" % task)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
display.debug("getting variables")
|
||||||
|
task_vars = self._variable_manager.get_vars(play=iterator._play, host=host, task=task,
|
||||||
|
_hosts=self._hosts_cache, _hosts_all=self._hosts_cache_all)
|
||||||
|
self.add_tqm_variables(task_vars, play=iterator._play)
|
||||||
|
templar = Templar(loader=self._loader, variables=task_vars)
|
||||||
|
display.debug("done getting variables")
|
||||||
|
|
||||||
|
# test to see if the task across all hosts points to an action plugin which
|
||||||
|
# sets BYPASS_HOST_LOOP to true, or if it has run_once enabled. If so, we
|
||||||
|
# will only send this task to the first host in the list.
|
||||||
|
|
||||||
|
task.action = templar.template(task.action)
|
||||||
|
|
||||||
|
try:
|
||||||
|
action = action_loader.get(task.action, class_only=True)
|
||||||
|
except KeyError:
|
||||||
|
# we don't care here, because the action may simply not have a
|
||||||
|
# corresponding action plugin
|
||||||
|
action = None
|
||||||
|
|
||||||
if task.action == 'meta':
|
if task.action == 'meta':
|
||||||
# for the linear strategy, we run meta tasks just once and for
|
# for the linear strategy, we run meta tasks just once and for
|
||||||
# all hosts currently being iterated over rather than one host
|
# all hosts currently being iterated over rather than one host
|
||||||
|
@ -277,13 +286,6 @@ class StrategyModule(StrategyBase):
|
||||||
skip_rest = True
|
skip_rest = True
|
||||||
break
|
break
|
||||||
|
|
||||||
display.debug("getting variables")
|
|
||||||
task_vars = self._variable_manager.get_vars(play=iterator._play, host=host, task=task,
|
|
||||||
_hosts=self._hosts_cache, _hosts_all=self._hosts_cache_all)
|
|
||||||
self.add_tqm_variables(task_vars, play=iterator._play)
|
|
||||||
templar = Templar(loader=self._loader, variables=task_vars)
|
|
||||||
display.debug("done getting variables")
|
|
||||||
|
|
||||||
run_once = templar.template(task.run_once) or action and getattr(action, 'BYPASS_HOST_LOOP', False)
|
run_once = templar.template(task.run_once) or action and getattr(action, 'BYPASS_HOST_LOOP', False)
|
||||||
|
|
||||||
if (task.any_errors_fatal or run_once) and not task.ignore_errors:
|
if (task.any_errors_fatal or run_once) and not task.ignore_errors:
|
||||||
|
|
|
@ -30,6 +30,7 @@ import time
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
from numbers import Number
|
from numbers import Number
|
||||||
|
from traceback import format_exc
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from hashlib import sha1
|
from hashlib import sha1
|
||||||
|
@ -53,6 +54,7 @@ from ansible.template.template import AnsibleJ2Template
|
||||||
from ansible.template.vars import AnsibleJ2Vars
|
from ansible.template.vars import AnsibleJ2Vars
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionRef
|
from ansible.utils.collection_loader import AnsibleCollectionRef
|
||||||
from ansible.utils.display import Display
|
from ansible.utils.display import Display
|
||||||
|
from ansible.utils.collection_loader._collection_finder import _get_collection_metadata
|
||||||
from ansible.utils.unsafe_proxy import wrap_var
|
from ansible.utils.unsafe_proxy import wrap_var
|
||||||
|
|
||||||
display = Display()
|
display = Display()
|
||||||
|
@ -350,52 +352,75 @@ class JinjaPluginIntercept(MutableMapping):
|
||||||
# FUTURE: we can cache FQ filter/test calls for the entire duration of a run, since a given collection's impl's
|
# FUTURE: we can cache FQ filter/test calls for the entire duration of a run, since a given collection's impl's
|
||||||
# aren't supposed to change during a run
|
# aren't supposed to change during a run
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
if not isinstance(key, string_types):
|
|
||||||
raise ValueError('key must be a string')
|
|
||||||
|
|
||||||
key = to_native(key)
|
|
||||||
|
|
||||||
if '.' not in key: # might be a built-in value, delegate to base dict
|
|
||||||
return self._delegatee.__getitem__(key)
|
|
||||||
|
|
||||||
func = self._collection_jinja_func_cache.get(key)
|
|
||||||
|
|
||||||
if func:
|
|
||||||
return func
|
|
||||||
|
|
||||||
acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname)
|
|
||||||
|
|
||||||
if not acr:
|
|
||||||
raise KeyError('invalid plugin name: {0}'.format(key))
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pkg = import_module(acr.n_python_package_name)
|
if not isinstance(key, string_types):
|
||||||
except ImportError:
|
raise ValueError('key must be a string')
|
||||||
raise KeyError()
|
|
||||||
|
|
||||||
parent_prefix = acr.collection
|
key = to_native(key)
|
||||||
|
|
||||||
if acr.subdirs:
|
if '.' not in key: # might be a built-in or legacy, check the delegatee dict first, then try for a last-chance base redirect
|
||||||
parent_prefix = '{0}.{1}'.format(parent_prefix, acr.subdirs)
|
func = self._delegatee.get(key)
|
||||||
|
|
||||||
for dummy, module_name, ispkg in pkgutil.iter_modules(pkg.__path__, prefix=parent_prefix + '.'):
|
if func:
|
||||||
if ispkg:
|
return func
|
||||||
continue
|
|
||||||
|
ts = _get_collection_metadata('ansible.builtin')
|
||||||
|
|
||||||
|
# TODO: implement support for collection-backed redirect (currently only builtin)
|
||||||
|
# TODO: implement cycle detection (unified across collection redir as well)
|
||||||
|
redirect_fqcr = ts.get('plugin_routing', {}).get(self._dirname, {}).get(key, {}).get('redirect', None)
|
||||||
|
if redirect_fqcr:
|
||||||
|
acr = AnsibleCollectionRef.from_fqcr(ref=redirect_fqcr, ref_type=self._dirname)
|
||||||
|
display.vvv('redirecting {0} {1} to {2}.{3}'.format(self._dirname, key, acr.collection, acr.resource))
|
||||||
|
key = redirect_fqcr
|
||||||
|
# TODO: handle recursive forwarding (not necessary for builtin, but definitely for further collection redirs)
|
||||||
|
|
||||||
|
func = self._collection_jinja_func_cache.get(key)
|
||||||
|
|
||||||
|
if func:
|
||||||
|
return func
|
||||||
|
|
||||||
|
acr = AnsibleCollectionRef.try_parse_fqcr(key, self._dirname)
|
||||||
|
|
||||||
|
if not acr:
|
||||||
|
raise KeyError('invalid plugin name: {0}'.format(key))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
plugin_impl = self._pluginloader.get(module_name)
|
pkg = import_module(acr.n_python_package_name)
|
||||||
except Exception as e:
|
except ImportError:
|
||||||
raise TemplateSyntaxError(to_native(e), 0)
|
raise KeyError()
|
||||||
|
|
||||||
method_map = getattr(plugin_impl, self._method_map_name)
|
parent_prefix = acr.collection
|
||||||
|
|
||||||
for f in iteritems(method_map()):
|
if acr.subdirs:
|
||||||
fq_name = '.'.join((parent_prefix, f[0]))
|
parent_prefix = '{0}.{1}'.format(parent_prefix, acr.subdirs)
|
||||||
# FIXME: detect/warn on intra-collection function name collisions
|
|
||||||
self._collection_jinja_func_cache[fq_name] = f[1]
|
|
||||||
|
|
||||||
function_impl = self._collection_jinja_func_cache[key]
|
# TODO: implement collection-level redirect
|
||||||
return function_impl
|
|
||||||
|
for dummy, module_name, ispkg in pkgutil.iter_modules(pkg.__path__, prefix=parent_prefix + '.'):
|
||||||
|
if ispkg:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
plugin_impl = self._pluginloader.get(module_name)
|
||||||
|
except Exception as e:
|
||||||
|
raise TemplateSyntaxError(to_native(e), 0)
|
||||||
|
|
||||||
|
method_map = getattr(plugin_impl, self._method_map_name)
|
||||||
|
|
||||||
|
for f in iteritems(method_map()):
|
||||||
|
fq_name = '.'.join((parent_prefix, f[0]))
|
||||||
|
# FIXME: detect/warn on intra-collection function name collisions
|
||||||
|
self._collection_jinja_func_cache[fq_name] = f[1]
|
||||||
|
|
||||||
|
function_impl = self._collection_jinja_func_cache[key]
|
||||||
|
return function_impl
|
||||||
|
except KeyError:
|
||||||
|
raise
|
||||||
|
except Exception as ex:
|
||||||
|
display.warning('an unexpected error occurred during Jinja2 environment setup: {0}'.format(to_native(ex)))
|
||||||
|
display.vvv('exception during Jinja2 environment setup: {0}'.format(format_exc()))
|
||||||
|
raise
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
return self._delegatee.__setitem__(key, value)
|
return self._delegatee.__setitem__(key, value)
|
||||||
|
|
|
@ -1,603 +0,0 @@
|
||||||
# (c) 2019 Ansible Project
|
|
||||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
|
||||||
|
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import os
|
|
||||||
import os.path
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from types import ModuleType
|
|
||||||
|
|
||||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
|
||||||
from ansible.module_utils.compat.importlib import import_module
|
|
||||||
from ansible.module_utils.six import iteritems, string_types, with_metaclass
|
|
||||||
from ansible.utils.singleton import Singleton
|
|
||||||
|
|
||||||
_SYNTHETIC_PACKAGES = {
|
|
||||||
# these provide fallback package definitions when there are no on-disk paths
|
|
||||||
'ansible_collections': dict(type='pkg_only', allow_external_subpackages=True),
|
|
||||||
'ansible_collections.ansible': dict(type='pkg_only', allow_external_subpackages=True),
|
|
||||||
# these implement the ansible.builtin synthetic collection mapped to the packages inside the ansible distribution
|
|
||||||
'ansible_collections.ansible.builtin': dict(type='pkg_only'),
|
|
||||||
'ansible_collections.ansible.builtin.plugins': dict(type='map', map='ansible.plugins'),
|
|
||||||
'ansible_collections.ansible.builtin.plugins.module_utils': dict(type='map', map='ansible.module_utils', graft=True),
|
|
||||||
'ansible_collections.ansible.builtin.plugins.modules': dict(type='flatmap', flatmap='ansible.modules', graft=True),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# FIXME: exception handling/error logging
|
|
||||||
class AnsibleCollectionLoader(with_metaclass(Singleton, object)):
|
|
||||||
def __init__(self, config=None):
|
|
||||||
if config:
|
|
||||||
paths = config.get_config_value('COLLECTIONS_PATHS')
|
|
||||||
else:
|
|
||||||
paths = os.environ.get('ANSIBLE_COLLECTIONS_PATHS', '').split(os.pathsep)
|
|
||||||
|
|
||||||
if isinstance(paths, string_types):
|
|
||||||
paths = [paths]
|
|
||||||
elif paths is None:
|
|
||||||
paths = []
|
|
||||||
|
|
||||||
# expand any placeholders in configured paths
|
|
||||||
paths = [
|
|
||||||
to_native(os.path.expanduser(p), errors='surrogate_or_strict')
|
|
||||||
for p in paths
|
|
||||||
]
|
|
||||||
|
|
||||||
# Append all ``ansible_collections`` dirs from sys.path to the end
|
|
||||||
for path in sys.path:
|
|
||||||
if (
|
|
||||||
path not in paths and
|
|
||||||
os.path.isdir(to_bytes(
|
|
||||||
os.path.join(path, 'ansible_collections'),
|
|
||||||
errors='surrogate_or_strict',
|
|
||||||
))
|
|
||||||
):
|
|
||||||
paths.append(path)
|
|
||||||
|
|
||||||
self._n_configured_paths = paths
|
|
||||||
|
|
||||||
self._n_playbook_paths = []
|
|
||||||
self._default_collection = None
|
|
||||||
# pre-inject grafted package maps so we can force them to use the right loader instead of potentially delegating to a "normal" loader
|
|
||||||
for syn_pkg_def in (p for p in iteritems(_SYNTHETIC_PACKAGES) if p[1].get('graft')):
|
|
||||||
pkg_name = syn_pkg_def[0]
|
|
||||||
pkg_def = syn_pkg_def[1]
|
|
||||||
|
|
||||||
newmod = ModuleType(pkg_name)
|
|
||||||
newmod.__package__ = pkg_name
|
|
||||||
newmod.__file__ = '<ansible_synthetic_collection_package>'
|
|
||||||
pkg_type = pkg_def.get('type')
|
|
||||||
|
|
||||||
# TODO: need to rethink map style so we can just delegate all the loading
|
|
||||||
|
|
||||||
if pkg_type == 'flatmap':
|
|
||||||
newmod.__loader__ = AnsibleFlatMapLoader(import_module(pkg_def['flatmap']))
|
|
||||||
newmod.__path__ = []
|
|
||||||
|
|
||||||
sys.modules[pkg_name] = newmod
|
|
||||||
|
|
||||||
@property
|
|
||||||
def n_collection_paths(self):
|
|
||||||
return self._n_playbook_paths + self._n_configured_paths
|
|
||||||
|
|
||||||
def get_collection_path(self, collection_name):
|
|
||||||
if not AnsibleCollectionRef.is_valid_collection_name(collection_name):
|
|
||||||
raise ValueError('{0} is not a valid collection name'.format(to_native(collection_name)))
|
|
||||||
|
|
||||||
m = import_module('ansible_collections.{0}'.format(collection_name))
|
|
||||||
|
|
||||||
return m.__file__
|
|
||||||
|
|
||||||
def set_playbook_paths(self, b_playbook_paths):
|
|
||||||
if isinstance(b_playbook_paths, string_types):
|
|
||||||
b_playbook_paths = [b_playbook_paths]
|
|
||||||
|
|
||||||
# track visited paths; we have to preserve the dir order as-passed in case there are duplicate collections (first one wins)
|
|
||||||
added_paths = set()
|
|
||||||
|
|
||||||
# de-dupe and ensure the paths are native strings (Python seems to do this for package paths etc, so assume it's safe)
|
|
||||||
self._n_playbook_paths = [os.path.join(to_native(p), 'collections') for p in b_playbook_paths if not (p in added_paths or added_paths.add(p))]
|
|
||||||
# FIXME: only allow setting this once, or handle any necessary cache/package path invalidations internally?
|
|
||||||
|
|
||||||
# FIXME: is there a better place to store this?
|
|
||||||
# FIXME: only allow setting this once
|
|
||||||
def set_default_collection(self, collection_name):
|
|
||||||
self._default_collection = collection_name
|
|
||||||
|
|
||||||
@property
|
|
||||||
def default_collection(self):
|
|
||||||
return self._default_collection
|
|
||||||
|
|
||||||
def find_module(self, fullname, path=None):
|
|
||||||
if self._find_module(fullname, path, load=False)[0]:
|
|
||||||
return self
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def load_module(self, fullname):
|
|
||||||
mod = self._find_module(fullname, None, load=True)[1]
|
|
||||||
|
|
||||||
if not mod:
|
|
||||||
raise ImportError('module {0} not found'.format(fullname))
|
|
||||||
|
|
||||||
return mod
|
|
||||||
|
|
||||||
def _find_module(self, fullname, path, load):
|
|
||||||
# this loader is only concerned with items under the Ansible Collections namespace hierarchy, ignore others
|
|
||||||
if not fullname.startswith('ansible_collections.') and fullname != 'ansible_collections':
|
|
||||||
return False, None
|
|
||||||
|
|
||||||
if sys.modules.get(fullname):
|
|
||||||
if not load:
|
|
||||||
return True, None
|
|
||||||
|
|
||||||
return True, sys.modules[fullname]
|
|
||||||
|
|
||||||
newmod = None
|
|
||||||
|
|
||||||
# this loader implements key functionality for Ansible collections
|
|
||||||
# * implicit distributed namespace packages for the root Ansible namespace (no pkgutil.extend_path hackery reqd)
|
|
||||||
# * implicit package support for Python 2.7 (no need for __init__.py in collections, except to use standard Py2.7 tooling)
|
|
||||||
# * preventing controller-side code injection during collection loading
|
|
||||||
# * (default loader would execute arbitrary package code from all __init__.py's)
|
|
||||||
|
|
||||||
parent_pkg_name = '.'.join(fullname.split('.')[:-1])
|
|
||||||
|
|
||||||
parent_pkg = sys.modules.get(parent_pkg_name)
|
|
||||||
|
|
||||||
if parent_pkg_name and not parent_pkg:
|
|
||||||
raise ImportError('parent package {0} not found'.format(parent_pkg_name))
|
|
||||||
|
|
||||||
# are we at or below the collection level? eg a.mynamespace.mycollection.something.else
|
|
||||||
# if so, we don't want distributed namespace behavior; first mynamespace.mycollection on the path is where
|
|
||||||
# we'll load everything from (ie, don't fall back to another mynamespace.mycollection lower on the path)
|
|
||||||
sub_collection = fullname.count('.') > 1
|
|
||||||
|
|
||||||
synpkg_def = _SYNTHETIC_PACKAGES.get(fullname)
|
|
||||||
synpkg_remainder = ''
|
|
||||||
|
|
||||||
if not synpkg_def:
|
|
||||||
# if the parent is a grafted package, we have some special work to do, otherwise just look for stuff on disk
|
|
||||||
parent_synpkg_def = _SYNTHETIC_PACKAGES.get(parent_pkg_name)
|
|
||||||
if parent_synpkg_def and parent_synpkg_def.get('graft'):
|
|
||||||
synpkg_def = parent_synpkg_def
|
|
||||||
synpkg_remainder = '.' + fullname.rpartition('.')[2]
|
|
||||||
|
|
||||||
# FUTURE: collapse as much of this back to on-demand as possible (maybe stub packages that get replaced when actually loaded?)
|
|
||||||
if synpkg_def:
|
|
||||||
pkg_type = synpkg_def.get('type')
|
|
||||||
if not pkg_type:
|
|
||||||
raise KeyError('invalid synthetic package type (no package "type" specified)')
|
|
||||||
if pkg_type == 'map':
|
|
||||||
map_package = synpkg_def.get('map')
|
|
||||||
|
|
||||||
if not map_package:
|
|
||||||
raise KeyError('invalid synthetic map package definition (no target "map" defined)')
|
|
||||||
|
|
||||||
if not load:
|
|
||||||
return True, None
|
|
||||||
|
|
||||||
mod = import_module(map_package + synpkg_remainder)
|
|
||||||
|
|
||||||
sys.modules[fullname] = mod
|
|
||||||
|
|
||||||
return True, mod
|
|
||||||
elif pkg_type == 'flatmap':
|
|
||||||
raise NotImplementedError()
|
|
||||||
elif pkg_type == 'pkg_only':
|
|
||||||
if not load:
|
|
||||||
return True, None
|
|
||||||
|
|
||||||
newmod = ModuleType(fullname)
|
|
||||||
newmod.__package__ = fullname
|
|
||||||
newmod.__file__ = '<ansible_synthetic_collection_package>'
|
|
||||||
newmod.__loader__ = self
|
|
||||||
newmod.__path__ = []
|
|
||||||
|
|
||||||
if not synpkg_def.get('allow_external_subpackages'):
|
|
||||||
# if external subpackages are NOT allowed, we're done
|
|
||||||
sys.modules[fullname] = newmod
|
|
||||||
return True, newmod
|
|
||||||
|
|
||||||
# if external subpackages ARE allowed, check for on-disk implementations and return a normal
|
|
||||||
# package if we find one, otherwise return the one we created here
|
|
||||||
|
|
||||||
if not parent_pkg: # top-level package, look for NS subpackages on all collection paths
|
|
||||||
package_paths = [self._extend_path_with_ns(p, fullname) for p in self.n_collection_paths]
|
|
||||||
else: # subpackage; search in all subpaths (we'll limit later inside a collection)
|
|
||||||
package_paths = [self._extend_path_with_ns(p, fullname) for p in parent_pkg.__path__]
|
|
||||||
|
|
||||||
for candidate_child_path in package_paths:
|
|
||||||
code_object = None
|
|
||||||
is_package = True
|
|
||||||
location = None
|
|
||||||
# check for implicit sub-package first
|
|
||||||
if os.path.isdir(to_bytes(candidate_child_path)):
|
|
||||||
# Py3.x implicit namespace packages don't have a file location, so they don't support get_data
|
|
||||||
# (which assumes the parent dir or that the loader has an internal mapping); so we have to provide
|
|
||||||
# a bogus leaf file on the __file__ attribute for pkgutil.get_data to strip off
|
|
||||||
location = os.path.join(candidate_child_path, '__synthetic__')
|
|
||||||
else:
|
|
||||||
for source_path in [os.path.join(candidate_child_path, '__init__.py'),
|
|
||||||
candidate_child_path + '.py']:
|
|
||||||
if not os.path.isfile(to_bytes(source_path)):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not load:
|
|
||||||
return True, None
|
|
||||||
|
|
||||||
with open(to_bytes(source_path), 'rb') as fd:
|
|
||||||
source = fd.read()
|
|
||||||
|
|
||||||
code_object = compile(source=source, filename=source_path, mode='exec', flags=0, dont_inherit=True)
|
|
||||||
location = source_path
|
|
||||||
is_package = source_path.endswith('__init__.py')
|
|
||||||
break
|
|
||||||
|
|
||||||
if not location:
|
|
||||||
continue
|
|
||||||
|
|
||||||
newmod = ModuleType(fullname)
|
|
||||||
newmod.__file__ = location
|
|
||||||
newmod.__loader__ = self
|
|
||||||
|
|
||||||
if is_package:
|
|
||||||
if sub_collection: # we never want to search multiple instances of the same collection; use first found
|
|
||||||
newmod.__path__ = [candidate_child_path]
|
|
||||||
else:
|
|
||||||
newmod.__path__ = package_paths
|
|
||||||
|
|
||||||
newmod.__package__ = fullname
|
|
||||||
else:
|
|
||||||
newmod.__package__ = parent_pkg_name
|
|
||||||
|
|
||||||
sys.modules[fullname] = newmod
|
|
||||||
|
|
||||||
if code_object:
|
|
||||||
# FIXME: decide cases where we don't actually want to exec the code?
|
|
||||||
exec(code_object, newmod.__dict__)
|
|
||||||
|
|
||||||
return True, newmod
|
|
||||||
|
|
||||||
# even if we didn't find one on disk, fall back to a synthetic package if we have one...
|
|
||||||
if newmod:
|
|
||||||
sys.modules[fullname] = newmod
|
|
||||||
return True, newmod
|
|
||||||
|
|
||||||
# FIXME: need to handle the "no dirs present" case for at least the root and synthetic internal collections like ansible.builtin
|
|
||||||
|
|
||||||
return False, None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _extend_path_with_ns(path, ns):
|
|
||||||
ns_path_add = ns.rsplit('.', 1)[-1]
|
|
||||||
|
|
||||||
return os.path.join(path, ns_path_add)
|
|
||||||
|
|
||||||
def get_data(self, filename):
|
|
||||||
with open(filename, 'rb') as fd:
|
|
||||||
return fd.read()
|
|
||||||
|
|
||||||
|
|
||||||
class AnsibleFlatMapLoader(object):
|
|
||||||
_extension_blacklist = ['.pyc', '.pyo']
|
|
||||||
|
|
||||||
def __init__(self, root_package):
|
|
||||||
self._root_package = root_package
|
|
||||||
self._dirtree = None
|
|
||||||
|
|
||||||
def _init_dirtree(self):
|
|
||||||
# FIXME: thread safety
|
|
||||||
root_path = os.path.dirname(self._root_package.__file__)
|
|
||||||
flat_files = []
|
|
||||||
# FIXME: make this a dict of filename->dir for faster direct lookup?
|
|
||||||
# FIXME: deal with _ prefixed deprecated files (or require another method for collections?)
|
|
||||||
# FIXME: fix overloaded filenames (eg, rename Windows setup to win_setup)
|
|
||||||
for root, dirs, files in os.walk(root_path):
|
|
||||||
# add all files in this dir that don't have a blacklisted extension
|
|
||||||
flat_files.extend(((root, f) for f in files if not any((f.endswith(ext) for ext in self._extension_blacklist))))
|
|
||||||
|
|
||||||
# HACK: Put Windows modules at the end of the list. This makes collection_loader behave
|
|
||||||
# the same way as plugin loader, preventing '.ps1' from modules being selected before '.py'
|
|
||||||
# modules simply because '.ps1' files may be above '.py' files in the flat_files list.
|
|
||||||
#
|
|
||||||
# The expected sort order is paths in the order they were in 'flat_files'
|
|
||||||
# with paths ending in '/windows' at the end, also in the original order they were
|
|
||||||
# in 'flat_files'. The .sort() method is guaranteed to be stable, so original order is preserved.
|
|
||||||
flat_files.sort(key=lambda p: p[0].endswith('/windows'))
|
|
||||||
self._dirtree = flat_files
|
|
||||||
|
|
||||||
def find_file(self, filename):
|
|
||||||
# FIXME: thread safety
|
|
||||||
if not self._dirtree:
|
|
||||||
self._init_dirtree()
|
|
||||||
|
|
||||||
if '.' not in filename: # no extension specified, use extension regex to filter
|
|
||||||
extensionless_re = re.compile(r'^{0}(\..+)?$'.format(re.escape(filename)))
|
|
||||||
# why doesn't Python have first()?
|
|
||||||
try:
|
|
||||||
# FIXME: store extensionless in a separate direct lookup?
|
|
||||||
filepath = next(os.path.join(r, f) for r, f in self._dirtree if extensionless_re.match(f))
|
|
||||||
except StopIteration:
|
|
||||||
raise IOError("couldn't find {0}".format(filename))
|
|
||||||
else: # actual filename, just look it up
|
|
||||||
# FIXME: this case sucks; make it a lookup
|
|
||||||
try:
|
|
||||||
filepath = next(os.path.join(r, f) for r, f in self._dirtree if f == filename)
|
|
||||||
except StopIteration:
|
|
||||||
raise IOError("couldn't find {0}".format(filename))
|
|
||||||
|
|
||||||
return filepath
|
|
||||||
|
|
||||||
def get_data(self, filename):
|
|
||||||
found_file = self.find_file(filename)
|
|
||||||
|
|
||||||
with open(found_file, 'rb') as fd:
|
|
||||||
return fd.read()
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: implement these for easier inline debugging?
|
|
||||||
# def get_source(self, fullname):
|
|
||||||
# def get_code(self, fullname):
|
|
||||||
# def is_package(self, fullname):
|
|
||||||
|
|
||||||
|
|
||||||
class AnsibleCollectionRef:
|
|
||||||
# FUTURE: introspect plugin loaders to get these dynamically?
|
|
||||||
VALID_REF_TYPES = frozenset(to_text(r) for r in ['action', 'become', 'cache', 'callback', 'cliconf', 'connection',
|
|
||||||
'doc_fragments', 'filter', 'httpapi', 'inventory', 'lookup',
|
|
||||||
'module_utils', 'modules', 'netconf', 'role', 'shell', 'strategy',
|
|
||||||
'terminal', 'test', 'vars'])
|
|
||||||
|
|
||||||
# FIXME: tighten this up to match Python identifier reqs, etc
|
|
||||||
VALID_COLLECTION_NAME_RE = re.compile(to_text(r'^(\w+)\.(\w+)$'))
|
|
||||||
VALID_SUBDIRS_RE = re.compile(to_text(r'^\w+(\.\w+)*$'))
|
|
||||||
VALID_FQCR_RE = re.compile(to_text(r'^\w+\.\w+\.\w+(\.\w+)*$')) # can have 0-N included subdirs as well
|
|
||||||
|
|
||||||
def __init__(self, collection_name, subdirs, resource, ref_type):
|
|
||||||
"""
|
|
||||||
Create an AnsibleCollectionRef from components
|
|
||||||
:param collection_name: a collection name of the form 'namespace.collectionname'
|
|
||||||
:param subdirs: optional subdir segments to be appended below the plugin type (eg, 'subdir1.subdir2')
|
|
||||||
:param resource: the name of the resource being references (eg, 'mymodule', 'someaction', 'a_role')
|
|
||||||
:param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
|
|
||||||
"""
|
|
||||||
collection_name = to_text(collection_name, errors='strict')
|
|
||||||
if subdirs is not None:
|
|
||||||
subdirs = to_text(subdirs, errors='strict')
|
|
||||||
resource = to_text(resource, errors='strict')
|
|
||||||
ref_type = to_text(ref_type, errors='strict')
|
|
||||||
|
|
||||||
if not self.is_valid_collection_name(collection_name):
|
|
||||||
raise ValueError('invalid collection name (must be of the form namespace.collection): {0}'.format(to_native(collection_name)))
|
|
||||||
|
|
||||||
if ref_type not in self.VALID_REF_TYPES:
|
|
||||||
raise ValueError('invalid collection ref_type: {0}'.format(ref_type))
|
|
||||||
|
|
||||||
self.collection = collection_name
|
|
||||||
if subdirs:
|
|
||||||
if not re.match(self.VALID_SUBDIRS_RE, subdirs):
|
|
||||||
raise ValueError('invalid subdirs entry: {0} (must be empty/None or of the form subdir1.subdir2)'.format(to_native(subdirs)))
|
|
||||||
self.subdirs = subdirs
|
|
||||||
else:
|
|
||||||
self.subdirs = u''
|
|
||||||
|
|
||||||
self.resource = resource
|
|
||||||
self.ref_type = ref_type
|
|
||||||
|
|
||||||
package_components = [u'ansible_collections', self.collection]
|
|
||||||
|
|
||||||
if self.ref_type == u'role':
|
|
||||||
package_components.append(u'roles')
|
|
||||||
else:
|
|
||||||
# we assume it's a plugin
|
|
||||||
package_components += [u'plugins', self.ref_type]
|
|
||||||
|
|
||||||
if self.subdirs:
|
|
||||||
package_components.append(self.subdirs)
|
|
||||||
|
|
||||||
if self.ref_type == u'role':
|
|
||||||
# roles are their own resource
|
|
||||||
package_components.append(self.resource)
|
|
||||||
|
|
||||||
self.n_python_package_name = to_native('.'.join(package_components))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_fqcr(ref, ref_type):
|
|
||||||
"""
|
|
||||||
Parse a string as a fully-qualified collection reference, raises ValueError if invalid
|
|
||||||
:param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
|
|
||||||
:param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
|
|
||||||
:return: a populated AnsibleCollectionRef object
|
|
||||||
"""
|
|
||||||
# assuming the fq_name is of the form (ns).(coll).(optional_subdir_N).(resource_name),
|
|
||||||
# we split the resource name off the right, split ns and coll off the left, and we're left with any optional
|
|
||||||
# subdirs that need to be added back below the plugin-specific subdir we'll add. So:
|
|
||||||
# ns.coll.resource -> ansible_collections.ns.coll.plugins.(plugintype).resource
|
|
||||||
# ns.coll.subdir1.resource -> ansible_collections.ns.coll.plugins.subdir1.(plugintype).resource
|
|
||||||
# ns.coll.rolename -> ansible_collections.ns.coll.roles.rolename
|
|
||||||
if not AnsibleCollectionRef.is_valid_fqcr(ref):
|
|
||||||
raise ValueError('{0} is not a valid collection reference'.format(to_native(ref)))
|
|
||||||
|
|
||||||
ref = to_text(ref, errors='strict')
|
|
||||||
ref_type = to_text(ref_type, errors='strict')
|
|
||||||
|
|
||||||
resource_splitname = ref.rsplit(u'.', 1)
|
|
||||||
package_remnant = resource_splitname[0]
|
|
||||||
resource = resource_splitname[1]
|
|
||||||
|
|
||||||
# split the left two components of the collection package name off, anything remaining is plugin-type
|
|
||||||
# specific subdirs to be added back on below the plugin type
|
|
||||||
package_splitname = package_remnant.split(u'.', 2)
|
|
||||||
if len(package_splitname) == 3:
|
|
||||||
subdirs = package_splitname[2]
|
|
||||||
else:
|
|
||||||
subdirs = u''
|
|
||||||
|
|
||||||
collection_name = u'.'.join(package_splitname[0:2])
|
|
||||||
|
|
||||||
return AnsibleCollectionRef(collection_name, subdirs, resource, ref_type)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def try_parse_fqcr(ref, ref_type):
|
|
||||||
"""
|
|
||||||
Attempt to parse a string as a fully-qualified collection reference, returning None on failure (instead of raising an error)
|
|
||||||
:param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
|
|
||||||
:param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
|
|
||||||
:return: a populated AnsibleCollectionRef object on successful parsing, else None
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return AnsibleCollectionRef.from_fqcr(ref, ref_type)
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def legacy_plugin_dir_to_plugin_type(legacy_plugin_dir_name):
|
|
||||||
"""
|
|
||||||
Utility method to convert from a PluginLoader dir name to a plugin ref_type
|
|
||||||
:param legacy_plugin_dir_name: PluginLoader dir name (eg, 'action_plugins', 'library')
|
|
||||||
:return: the corresponding plugin ref_type (eg, 'action', 'role')
|
|
||||||
"""
|
|
||||||
legacy_plugin_dir_name = to_text(legacy_plugin_dir_name)
|
|
||||||
|
|
||||||
plugin_type = legacy_plugin_dir_name.replace(u'_plugins', u'')
|
|
||||||
|
|
||||||
if plugin_type == u'library':
|
|
||||||
plugin_type = u'modules'
|
|
||||||
|
|
||||||
if plugin_type not in AnsibleCollectionRef.VALID_REF_TYPES:
|
|
||||||
raise ValueError('{0} cannot be mapped to a valid collection ref type'.format(to_native(legacy_plugin_dir_name)))
|
|
||||||
|
|
||||||
return plugin_type
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def is_valid_fqcr(ref, ref_type=None):
|
|
||||||
"""
|
|
||||||
Validates if is string is a well-formed fully-qualified collection reference (does not look up the collection itself)
|
|
||||||
:param ref: candidate collection reference to validate (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
|
|
||||||
:param ref_type: optional reference type to enable deeper validation, eg 'module', 'role', 'doc_fragment'
|
|
||||||
:return: True if the collection ref passed is well-formed, False otherwise
|
|
||||||
"""
|
|
||||||
|
|
||||||
ref = to_text(ref)
|
|
||||||
|
|
||||||
if not ref_type:
|
|
||||||
return bool(re.match(AnsibleCollectionRef.VALID_FQCR_RE, ref))
|
|
||||||
|
|
||||||
return bool(AnsibleCollectionRef.try_parse_fqcr(ref, ref_type))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def is_valid_collection_name(collection_name):
|
|
||||||
"""
|
|
||||||
Validates if the given string is a well-formed collection name (does not look up the collection itself)
|
|
||||||
:param collection_name: candidate collection name to validate (a valid name is of the form 'ns.collname')
|
|
||||||
:return: True if the collection name passed is well-formed, False otherwise
|
|
||||||
"""
|
|
||||||
|
|
||||||
collection_name = to_text(collection_name)
|
|
||||||
|
|
||||||
return bool(re.match(AnsibleCollectionRef.VALID_COLLECTION_NAME_RE, collection_name))
|
|
||||||
|
|
||||||
|
|
||||||
def get_collection_role_path(role_name, collection_list=None):
|
|
||||||
acr = AnsibleCollectionRef.try_parse_fqcr(role_name, 'role')
|
|
||||||
|
|
||||||
if acr:
|
|
||||||
# looks like a valid qualified collection ref; skip the collection_list
|
|
||||||
collection_list = [acr.collection]
|
|
||||||
subdirs = acr.subdirs
|
|
||||||
resource = acr.resource
|
|
||||||
elif not collection_list:
|
|
||||||
return None # not a FQ role and no collection search list spec'd, nothing to do
|
|
||||||
else:
|
|
||||||
resource = role_name # treat as unqualified, loop through the collection search list to try and resolve
|
|
||||||
subdirs = ''
|
|
||||||
|
|
||||||
for collection_name in collection_list:
|
|
||||||
try:
|
|
||||||
acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type='role')
|
|
||||||
# FIXME: error handling/logging; need to catch any import failures and move along
|
|
||||||
|
|
||||||
# FIXME: this line shouldn't be necessary, but py2 pkgutil.get_data is delegating back to built-in loader when it shouldn't
|
|
||||||
pkg = import_module(acr.n_python_package_name)
|
|
||||||
|
|
||||||
if pkg is not None:
|
|
||||||
# the package is now loaded, get the collection's package and ask where it lives
|
|
||||||
path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict'))
|
|
||||||
return resource, to_text(path, errors='surrogate_or_strict'), collection_name
|
|
||||||
|
|
||||||
except IOError:
|
|
||||||
continue
|
|
||||||
except Exception as ex:
|
|
||||||
# FIXME: pick out typical import errors first, then error logging
|
|
||||||
continue
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
_N_COLLECTION_PATH_RE = re.compile(r'/ansible_collections/([^/]+)/([^/]+)')
|
|
||||||
|
|
||||||
|
|
||||||
def get_collection_name_from_path(path):
|
|
||||||
"""
|
|
||||||
Return the containing collection name for a given path, or None if the path is not below a configured collection, or
|
|
||||||
the collection cannot be loaded (eg, the collection is masked by another of the same name higher in the configured
|
|
||||||
collection roots).
|
|
||||||
:param n_path: native-string path to evaluate for collection containment
|
|
||||||
:return: collection name or None
|
|
||||||
"""
|
|
||||||
n_collection_paths = [to_native(os.path.abspath(to_bytes(p))) for p in AnsibleCollectionLoader().n_collection_paths]
|
|
||||||
|
|
||||||
b_path = os.path.abspath(to_bytes(path))
|
|
||||||
n_path = to_native(b_path)
|
|
||||||
|
|
||||||
for coll_path in n_collection_paths:
|
|
||||||
common_prefix = to_native(os.path.commonprefix([b_path, to_bytes(coll_path)]))
|
|
||||||
if common_prefix == coll_path:
|
|
||||||
# strip off the common prefix (handle weird testing cases of nested collection roots, eg)
|
|
||||||
collection_remnant = n_path[len(coll_path):]
|
|
||||||
# commonprefix may include the trailing /, prepend to the remnant if necessary (eg trailing / on root)
|
|
||||||
if collection_remnant and collection_remnant[0] != '/':
|
|
||||||
collection_remnant = '/' + collection_remnant
|
|
||||||
# the path lives under this collection root, see if it maps to a collection
|
|
||||||
found_collection = _N_COLLECTION_PATH_RE.search(collection_remnant)
|
|
||||||
if not found_collection:
|
|
||||||
continue
|
|
||||||
n_collection_name = '{0}.{1}'.format(*found_collection.groups())
|
|
||||||
|
|
||||||
loaded_collection_path = AnsibleCollectionLoader().get_collection_path(n_collection_name)
|
|
||||||
|
|
||||||
if not loaded_collection_path:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# ensure we're using the canonical real path, with the bogus __synthetic__ stripped off
|
|
||||||
b_loaded_collection_path = os.path.dirname(os.path.abspath(to_bytes(loaded_collection_path)))
|
|
||||||
|
|
||||||
# if the collection path prefix matches the path prefix we were passed, it's the same collection that's loaded
|
|
||||||
if os.path.commonprefix([b_path, b_loaded_collection_path]) == b_loaded_collection_path:
|
|
||||||
return n_collection_name
|
|
||||||
|
|
||||||
return None # if not, it's a collection, but not the same collection the loader sees, so ignore it
|
|
||||||
|
|
||||||
|
|
||||||
def set_collection_playbook_paths(b_playbook_paths):
|
|
||||||
AnsibleCollectionLoader().set_playbook_paths(b_playbook_paths)
|
|
||||||
|
|
||||||
|
|
||||||
def resource_from_fqcr(ref):
|
|
||||||
"""
|
|
||||||
Return resource from a fully-qualified collection reference,
|
|
||||||
or from a simple resource name.
|
|
||||||
|
|
||||||
For fully-qualified collection references, this is equivalent to
|
|
||||||
``AnsibleCollectionRef.from_fqcr(ref).resource``.
|
|
||||||
|
|
||||||
:param ref: collection reference to parse
|
|
||||||
:return: the resource as a unicode string
|
|
||||||
"""
|
|
||||||
ref = to_text(ref, errors='strict')
|
|
||||||
return ref.split(u'.')[-1]
|
|
23
lib/ansible/utils/collection_loader/__init__.py
Normal file
23
lib/ansible/utils/collection_loader/__init__.py
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
# (c) 2019 Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
# FIXME: decide what of this we want to actually be public/toplevel, put other stuff on a utility class?
|
||||||
|
from ._collection_config import AnsibleCollectionConfig
|
||||||
|
from ._collection_finder import AnsibleCollectionRef
|
||||||
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
|
||||||
|
|
||||||
|
def resource_from_fqcr(ref):
|
||||||
|
"""
|
||||||
|
Return resource from a fully-qualified collection reference,
|
||||||
|
or from a simple resource name.
|
||||||
|
For fully-qualified collection references, this is equivalent to
|
||||||
|
``AnsibleCollectionRef.from_fqcr(ref).resource``.
|
||||||
|
:param ref: collection reference to parse
|
||||||
|
:return: the resource as a unicode string
|
||||||
|
"""
|
||||||
|
ref = to_text(ref, errors='strict')
|
||||||
|
return ref.split(u'.')[-1]
|
101
lib/ansible/utils/collection_loader/_collection_config.py
Normal file
101
lib/ansible/utils/collection_loader/_collection_config.py
Normal file
|
@ -0,0 +1,101 @@
|
||||||
|
# (c) 2019 Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
from ansible.module_utils.common.text.converters import to_text
|
||||||
|
from ansible.module_utils.six import with_metaclass
|
||||||
|
|
||||||
|
|
||||||
|
class _EventSource:
|
||||||
|
def __init__(self):
|
||||||
|
self._handlers = set()
|
||||||
|
|
||||||
|
def __iadd__(self, handler):
|
||||||
|
if not callable(handler):
|
||||||
|
raise ValueError('handler must be callable')
|
||||||
|
self._handlers.add(handler)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __isub__(self, handler):
|
||||||
|
try:
|
||||||
|
self._handlers.remove(handler)
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _on_exception(self, handler, exc, *args, **kwargs):
|
||||||
|
# if we return True, we want the caller to re-raise
|
||||||
|
return True
|
||||||
|
|
||||||
|
def fire(self, *args, **kwargs):
|
||||||
|
for h in self._handlers:
|
||||||
|
try:
|
||||||
|
h(*args, **kwargs)
|
||||||
|
except Exception as ex:
|
||||||
|
if self._on_exception(h, ex, *args, **kwargs):
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
class _AnsibleCollectionConfig(type):
|
||||||
|
def __init__(cls, meta, name, bases):
|
||||||
|
cls._collection_finder = None
|
||||||
|
cls._default_collection = None
|
||||||
|
cls._on_collection_load = _EventSource()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def collection_finder(cls):
|
||||||
|
return cls._collection_finder
|
||||||
|
|
||||||
|
@collection_finder.setter
|
||||||
|
def collection_finder(cls, value):
|
||||||
|
if cls._collection_finder:
|
||||||
|
raise ValueError('an AnsibleCollectionFinder has already been configured')
|
||||||
|
|
||||||
|
cls._collection_finder = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def collection_paths(cls):
|
||||||
|
cls._require_finder()
|
||||||
|
return [to_text(p) for p in cls._collection_finder._n_collection_paths]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def default_collection(cls):
|
||||||
|
return cls._default_collection
|
||||||
|
|
||||||
|
@default_collection.setter
|
||||||
|
def default_collection(cls, value):
|
||||||
|
if cls._default_collection:
|
||||||
|
raise ValueError('default collection {0} has already been configured'.format(value))
|
||||||
|
|
||||||
|
cls._default_collection = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def on_collection_load(cls):
|
||||||
|
return cls._on_collection_load
|
||||||
|
|
||||||
|
@on_collection_load.setter
|
||||||
|
def on_collection_load(cls, value):
|
||||||
|
if value is not cls._on_collection_load:
|
||||||
|
raise ValueError('on_collection_load is not directly settable (use +=)')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def playbook_paths(cls):
|
||||||
|
cls._require_finder()
|
||||||
|
return [to_text(p) for p in cls._collection_finder._n_playbook_paths]
|
||||||
|
|
||||||
|
@playbook_paths.setter
|
||||||
|
def playbook_paths(cls, value):
|
||||||
|
cls._require_finder()
|
||||||
|
cls._collection_finder.set_playbook_paths(value)
|
||||||
|
|
||||||
|
def _require_finder(cls):
|
||||||
|
if not cls._collection_finder:
|
||||||
|
raise NotImplementedError('an AnsibleCollectionFinder has not been installed in this process')
|
||||||
|
|
||||||
|
|
||||||
|
# concrete class of our metaclass type that defines the class properties we want
|
||||||
|
class AnsibleCollectionConfig(with_metaclass(_AnsibleCollectionConfig)):
|
||||||
|
pass
|
953
lib/ansible/utils/collection_loader/_collection_finder.py
Normal file
953
lib/ansible/utils/collection_loader/_collection_finder.py
Normal file
|
@ -0,0 +1,953 @@
|
||||||
|
# (c) 2019 Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import pkgutil
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
# DO NOT add new non-stdlib import deps here, this loader is used by external tools (eg ansible-test import sanity)
|
||||||
|
# that only allow stdlib and module_utils
|
||||||
|
from ansible.module_utils.common.text.converters import to_native, to_text, to_bytes
|
||||||
|
from ansible.module_utils.six import string_types, PY3
|
||||||
|
from ._collection_config import AnsibleCollectionConfig
|
||||||
|
|
||||||
|
from contextlib import contextmanager
|
||||||
|
from types import ModuleType
|
||||||
|
|
||||||
|
try:
|
||||||
|
from importlib import import_module
|
||||||
|
except ImportError:
|
||||||
|
def import_module(name):
|
||||||
|
__import__(name)
|
||||||
|
return sys.modules[name]
|
||||||
|
|
||||||
|
try:
|
||||||
|
from importlib import reload as reload_module
|
||||||
|
except ImportError:
|
||||||
|
# 2.7 has a global reload function instead...
|
||||||
|
reload_module = reload # pylint:disable=undefined-variable
|
||||||
|
|
||||||
|
# NB: this supports import sanity test providing a different impl
|
||||||
|
try:
|
||||||
|
from ._collection_meta import _meta_yml_to_dict
|
||||||
|
except ImportError:
|
||||||
|
_meta_yml_to_dict = None
|
||||||
|
|
||||||
|
|
||||||
|
class _AnsibleCollectionFinder:
|
||||||
|
def __init__(self, paths=None, scan_sys_paths=True):
|
||||||
|
# TODO: accept metadata loader override
|
||||||
|
self._ansible_pkg_path = to_native(os.path.dirname(to_bytes(sys.modules['ansible'].__file__)))
|
||||||
|
|
||||||
|
if isinstance(paths, string_types):
|
||||||
|
paths = [paths]
|
||||||
|
elif paths is None:
|
||||||
|
paths = []
|
||||||
|
|
||||||
|
# expand any placeholders in configured paths
|
||||||
|
paths = [os.path.expanduser(to_native(p, errors='surrogate_or_strict')) for p in paths]
|
||||||
|
|
||||||
|
if scan_sys_paths:
|
||||||
|
# append all sys.path entries with an ansible_collections package
|
||||||
|
for path in sys.path:
|
||||||
|
if (
|
||||||
|
path not in paths and
|
||||||
|
os.path.isdir(to_bytes(
|
||||||
|
os.path.join(path, 'ansible_collections'),
|
||||||
|
errors='surrogate_or_strict',
|
||||||
|
))
|
||||||
|
):
|
||||||
|
paths.append(path)
|
||||||
|
|
||||||
|
self._n_configured_paths = paths
|
||||||
|
self._n_cached_collection_paths = None
|
||||||
|
self._n_cached_collection_qualified_paths = None
|
||||||
|
|
||||||
|
self._n_playbook_paths = []
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _remove(cls):
|
||||||
|
for mps in sys.meta_path:
|
||||||
|
if isinstance(mps, _AnsibleCollectionFinder):
|
||||||
|
sys.meta_path.remove(mps)
|
||||||
|
|
||||||
|
# remove any path hooks that look like ours
|
||||||
|
for ph in sys.path_hooks:
|
||||||
|
if hasattr(ph, '__self__') and isinstance(ph.__self__, _AnsibleCollectionFinder):
|
||||||
|
sys.path_hooks.remove(ph)
|
||||||
|
|
||||||
|
# zap any cached path importer cache entries that might refer to us
|
||||||
|
sys.path_importer_cache.clear()
|
||||||
|
|
||||||
|
AnsibleCollectionConfig._collection_finder = None
|
||||||
|
|
||||||
|
# validate via the public property that we really killed it
|
||||||
|
if AnsibleCollectionConfig.collection_finder is not None:
|
||||||
|
raise AssertionError('_AnsibleCollectionFinder remove did not reset AnsibleCollectionConfig.collection_finder')
|
||||||
|
|
||||||
|
def _install(self):
|
||||||
|
self._remove()
|
||||||
|
sys.meta_path.insert(0, self)
|
||||||
|
|
||||||
|
sys.path_hooks.insert(0, self._ansible_collection_path_hook)
|
||||||
|
|
||||||
|
AnsibleCollectionConfig.collection_finder = self
|
||||||
|
|
||||||
|
def _ansible_collection_path_hook(self, path):
|
||||||
|
path = to_native(path)
|
||||||
|
interesting_paths = self._n_cached_collection_qualified_paths
|
||||||
|
if not interesting_paths:
|
||||||
|
interesting_paths = [os.path.join(p, 'ansible_collections') for p in
|
||||||
|
self._n_collection_paths]
|
||||||
|
interesting_paths.insert(0, self._ansible_pkg_path)
|
||||||
|
self._n_cached_collection_qualified_paths = interesting_paths
|
||||||
|
|
||||||
|
if any(path.startswith(p) for p in interesting_paths):
|
||||||
|
return _AnsiblePathHookFinder(self, path)
|
||||||
|
|
||||||
|
raise ImportError('not interested')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _n_collection_paths(self):
|
||||||
|
paths = self._n_cached_collection_paths
|
||||||
|
if not paths:
|
||||||
|
self._n_cached_collection_paths = paths = self._n_playbook_paths + self._n_configured_paths
|
||||||
|
return paths
|
||||||
|
|
||||||
|
def set_playbook_paths(self, playbook_paths):
|
||||||
|
if isinstance(playbook_paths, string_types):
|
||||||
|
playbook_paths = [playbook_paths]
|
||||||
|
|
||||||
|
# track visited paths; we have to preserve the dir order as-passed in case there are duplicate collections (first one wins)
|
||||||
|
added_paths = set()
|
||||||
|
|
||||||
|
# de-dupe
|
||||||
|
self._n_playbook_paths = [os.path.join(to_native(p), 'collections') for p in playbook_paths if not (p in added_paths or added_paths.add(p))]
|
||||||
|
self._n_cached_collection_paths = None
|
||||||
|
# HACK: playbook CLI sets this relatively late, so we've already loaded some packages whose paths might depend on this. Fix those up.
|
||||||
|
# NB: this should NOT be used for late additions; ideally we'd fix the playbook dir setup earlier in Ansible init
|
||||||
|
# to prevent this from occurring
|
||||||
|
for pkg in ['ansible_collections', 'ansible_collections.ansible']:
|
||||||
|
self._reload_hack(pkg)
|
||||||
|
|
||||||
|
def _reload_hack(self, fullname):
|
||||||
|
m = sys.modules.get(fullname)
|
||||||
|
if not m:
|
||||||
|
return
|
||||||
|
reload_module(m)
|
||||||
|
|
||||||
|
def find_module(self, fullname, path=None):
|
||||||
|
# Figure out what's being asked for, and delegate to a special-purpose loader
|
||||||
|
|
||||||
|
split_name = fullname.split('.')
|
||||||
|
toplevel_pkg = split_name[0]
|
||||||
|
module_to_find = split_name[-1]
|
||||||
|
part_count = len(split_name)
|
||||||
|
|
||||||
|
if toplevel_pkg not in ['ansible', 'ansible_collections']:
|
||||||
|
# not interested in anything other than ansible_collections (and limited cases under ansible)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# sanity check what we're getting from import, canonicalize path values
|
||||||
|
if part_count == 1:
|
||||||
|
if path:
|
||||||
|
raise ValueError('path should not be specified for top-level packages (trying to find {0})'.format(fullname))
|
||||||
|
else:
|
||||||
|
# seed the path to the configured collection roots
|
||||||
|
path = self._n_collection_paths
|
||||||
|
|
||||||
|
if part_count > 1 and path is None:
|
||||||
|
raise ValueError('path must be specified for subpackages (trying to find {0})'.format(fullname))
|
||||||
|
|
||||||
|
# NB: actual "find"ing is delegated to the constructors on the various loaders; they'll ImportError if not found
|
||||||
|
try:
|
||||||
|
if toplevel_pkg == 'ansible':
|
||||||
|
# something under the ansible package, delegate to our internal loader in case of redirections
|
||||||
|
return _AnsibleInternalRedirectLoader(fullname=fullname, path_list=path)
|
||||||
|
if part_count == 1:
|
||||||
|
return _AnsibleCollectionRootPkgLoader(fullname=fullname, path_list=path)
|
||||||
|
if part_count == 2: # ns pkg eg, ansible_collections, ansible_collections.somens
|
||||||
|
return _AnsibleCollectionNSPkgLoader(fullname=fullname, path_list=path)
|
||||||
|
elif part_count == 3: # collection pkg eg, ansible_collections.somens.somecoll
|
||||||
|
return _AnsibleCollectionPkgLoader(fullname=fullname, path_list=path)
|
||||||
|
# anything below the collection
|
||||||
|
return _AnsibleCollectionLoader(fullname=fullname, path_list=path)
|
||||||
|
except ImportError:
|
||||||
|
# TODO: log attempt to load context
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Implements a path_hook finder for iter_modules (since it's only path based). This finder does not need to actually
|
||||||
|
# function as a finder in most cases, since our meta_path finder is consulted first for *almost* everything, except
|
||||||
|
# pkgutil.iter_modules, and under py2, pkgutil.get_data if the parent package passed has not been loaded yet.
|
||||||
|
class _AnsiblePathHookFinder:
|
||||||
|
def __init__(self, collection_finder, pathctx):
|
||||||
|
# when called from a path_hook, find_module doesn't usually get the path arg, so this provides our context
|
||||||
|
self._pathctx = to_native(pathctx)
|
||||||
|
self._collection_finder = collection_finder
|
||||||
|
if PY3:
|
||||||
|
# cache the native FileFinder (take advantage of its filesystem cache for future find/load requests)
|
||||||
|
self._file_finder = None
|
||||||
|
|
||||||
|
# class init is fun- this method has a self arg that won't get used
|
||||||
|
def _get_filefinder_path_hook(self=None):
|
||||||
|
_file_finder_hook = None
|
||||||
|
if PY3:
|
||||||
|
# try to find the FileFinder hook to call for fallback path-based imports in Py3
|
||||||
|
_file_finder_hook = [ph for ph in sys.path_hooks if 'FileFinder' in repr(ph)]
|
||||||
|
if len(_file_finder_hook) != 1:
|
||||||
|
raise Exception('need exactly one FileFinder import hook (found {0})'.format(len(_file_finder_hook)))
|
||||||
|
_file_finder_hook = _file_finder_hook[0]
|
||||||
|
|
||||||
|
return _file_finder_hook
|
||||||
|
|
||||||
|
_filefinder_path_hook = _get_filefinder_path_hook()
|
||||||
|
|
||||||
|
def find_module(self, fullname, path=None):
|
||||||
|
# we ignore the passed in path here- use what we got from the path hook init
|
||||||
|
split_name = fullname.split('.')
|
||||||
|
toplevel_pkg = split_name[0]
|
||||||
|
|
||||||
|
if toplevel_pkg == 'ansible_collections':
|
||||||
|
# collections content? delegate to the collection finder
|
||||||
|
return self._collection_finder.find_module(fullname, path=[self._pathctx])
|
||||||
|
else:
|
||||||
|
# Something else; we'd normally restrict this to `ansible` descendent modules so that any weird loader
|
||||||
|
# behavior that arbitrary Python modules have can be serviced by those loaders. In some dev/test
|
||||||
|
# scenarios (eg a venv under a collection) our path_hook signs us up to load non-Ansible things, and
|
||||||
|
# it's too late by the time we've reached this point, but also too expensive for the path_hook to figure
|
||||||
|
# out what we *shouldn't* be loading with the limited info it has. So we'll just delegate to the
|
||||||
|
# normal path-based loader as best we can to service it. This also allows us to take advantage of Python's
|
||||||
|
# built-in FS caching and byte-compilation for most things.
|
||||||
|
if PY3:
|
||||||
|
# create or consult our cached file finder for this path
|
||||||
|
if not self._file_finder:
|
||||||
|
self._file_finder = _AnsiblePathHookFinder._filefinder_path_hook(self._pathctx)
|
||||||
|
|
||||||
|
spec = self._file_finder.find_spec(fullname)
|
||||||
|
if not spec:
|
||||||
|
return None
|
||||||
|
return spec.loader
|
||||||
|
else:
|
||||||
|
# call py2's internal loader
|
||||||
|
return pkgutil.ImpImporter(self._pathctx).find_module(fullname)
|
||||||
|
|
||||||
|
def iter_modules(self, prefix):
|
||||||
|
# NB: this currently represents only what's on disk, and does not handle package redirection
|
||||||
|
return _iter_modules_impl([self._pathctx], prefix)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "{0}(path='{1}')".format(self.__class__.__name__, self._pathctx)
|
||||||
|
|
||||||
|
|
||||||
|
class _AnsibleCollectionPkgLoaderBase:
|
||||||
|
_allows_package_code = False
|
||||||
|
|
||||||
|
def __init__(self, fullname, path_list=None):
|
||||||
|
self._fullname = fullname
|
||||||
|
self._redirect_module = None
|
||||||
|
self._split_name = fullname.split('.')
|
||||||
|
self._rpart_name = fullname.rpartition('.')
|
||||||
|
self._parent_package_name = self._rpart_name[0] # eg ansible_collections for ansible_collections.somens, '' for toplevel
|
||||||
|
self._package_to_load = self._rpart_name[2] # eg somens for ansible_collections.somens
|
||||||
|
|
||||||
|
self._source_code_path = None
|
||||||
|
self._decoded_source = None
|
||||||
|
self._compiled_code = None
|
||||||
|
|
||||||
|
self._validate_args()
|
||||||
|
|
||||||
|
self._candidate_paths = self._get_candidate_paths([to_native(p) for p in path_list])
|
||||||
|
self._subpackage_search_paths = self._get_subpackage_search_paths(self._candidate_paths)
|
||||||
|
|
||||||
|
self._validate_final()
|
||||||
|
|
||||||
|
# allow subclasses to validate args and sniff split values before we start digging around
|
||||||
|
def _validate_args(self):
|
||||||
|
if self._split_name[0] != 'ansible_collections':
|
||||||
|
raise ImportError('this loader can only load packages from the ansible_collections package, not {0}'.format(self._fullname))
|
||||||
|
|
||||||
|
# allow subclasses to customize candidate path filtering
|
||||||
|
def _get_candidate_paths(self, path_list):
|
||||||
|
return [os.path.join(p, self._package_to_load) for p in path_list]
|
||||||
|
|
||||||
|
# allow subclasses to customize finding paths
|
||||||
|
def _get_subpackage_search_paths(self, candidate_paths):
|
||||||
|
# filter candidate paths for existence (NB: silently ignoring package init code and same-named modules)
|
||||||
|
return [p for p in candidate_paths if os.path.isdir(to_bytes(p))]
|
||||||
|
|
||||||
|
# allow subclasses to customize state validation/manipulation before we return the loader instance
|
||||||
|
def _validate_final(self):
|
||||||
|
return
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@contextmanager
|
||||||
|
def _new_or_existing_module(name, **kwargs):
|
||||||
|
# handle all-or-nothing sys.modules creation/use-existing/delete-on-exception-if-created behavior
|
||||||
|
created_module = False
|
||||||
|
module = sys.modules.get(name)
|
||||||
|
try:
|
||||||
|
if not module:
|
||||||
|
module = ModuleType(name)
|
||||||
|
created_module = True
|
||||||
|
sys.modules[name] = module
|
||||||
|
# always override the values passed, except name (allow reference aliasing)
|
||||||
|
for attr, value in kwargs.items():
|
||||||
|
setattr(module, attr, value)
|
||||||
|
yield module
|
||||||
|
except Exception:
|
||||||
|
if created_module:
|
||||||
|
if sys.modules.get(name):
|
||||||
|
sys.modules.pop(name)
|
||||||
|
raise
|
||||||
|
|
||||||
|
# basic module/package location support
|
||||||
|
# NB: this does not support distributed packages!
|
||||||
|
@staticmethod
|
||||||
|
def _module_file_from_path(leaf_name, path):
|
||||||
|
has_code = True
|
||||||
|
package_path = os.path.join(to_native(path), to_native(leaf_name))
|
||||||
|
module_path = None
|
||||||
|
|
||||||
|
# if the submodule is a package, assemble valid submodule paths, but stop looking for a module
|
||||||
|
if os.path.isdir(to_bytes(package_path)):
|
||||||
|
# is there a package init?
|
||||||
|
module_path = os.path.join(package_path, '__init__.py')
|
||||||
|
if not os.path.isfile(to_bytes(module_path)):
|
||||||
|
module_path = os.path.join(package_path, '__synthetic__')
|
||||||
|
has_code = False
|
||||||
|
else:
|
||||||
|
module_path = package_path + '.py'
|
||||||
|
package_path = None
|
||||||
|
if not os.path.isfile(to_bytes(module_path)):
|
||||||
|
raise ImportError('{0} not found at {1}'.format(leaf_name, path))
|
||||||
|
|
||||||
|
return module_path, has_code, package_path
|
||||||
|
|
||||||
|
def load_module(self, fullname):
|
||||||
|
# short-circuit redirect; we've already imported the redirected module, so just alias it and return it
|
||||||
|
if self._redirect_module:
|
||||||
|
sys.modules[self._fullname] = self._redirect_module
|
||||||
|
return self._redirect_module
|
||||||
|
|
||||||
|
# we're actually loading a module/package
|
||||||
|
module_attrs = dict(
|
||||||
|
__loader__=self,
|
||||||
|
__file__=self.get_filename(fullname),
|
||||||
|
__package__=self._parent_package_name # sane default for non-packages
|
||||||
|
)
|
||||||
|
|
||||||
|
# eg, I am a package
|
||||||
|
if self._subpackage_search_paths is not None: # empty is legal
|
||||||
|
module_attrs['__path__'] = self._subpackage_search_paths
|
||||||
|
module_attrs['__package__'] = fullname # per PEP366
|
||||||
|
|
||||||
|
with self._new_or_existing_module(fullname, **module_attrs) as module:
|
||||||
|
# execute the module's code in its namespace
|
||||||
|
exec(self.get_code(fullname), module.__dict__)
|
||||||
|
|
||||||
|
return module
|
||||||
|
|
||||||
|
def is_package(self, fullname):
|
||||||
|
if fullname != self._fullname:
|
||||||
|
raise ValueError('this loader cannot answer is_package for {0}, only {1}'.format(fullname, self._fullname))
|
||||||
|
return self._subpackage_search_paths is not None
|
||||||
|
|
||||||
|
def get_source(self, fullname):
|
||||||
|
if self._decoded_source:
|
||||||
|
return self._decoded_source
|
||||||
|
if fullname != self._fullname:
|
||||||
|
raise ValueError('this loader cannot load source for {0}, only {1}'.format(fullname, self._fullname))
|
||||||
|
if not self._source_code_path:
|
||||||
|
return None
|
||||||
|
# FIXME: what do we want encoding/newline requirements to be?
|
||||||
|
self._decoded_source = self.get_data(self._source_code_path)
|
||||||
|
return self._decoded_source
|
||||||
|
|
||||||
|
def get_data(self, path):
|
||||||
|
if not path:
|
||||||
|
raise ValueError('a path must be specified')
|
||||||
|
|
||||||
|
# TODO: ensure we're being asked for a path below something we own
|
||||||
|
# TODO: try to handle redirects internally?
|
||||||
|
|
||||||
|
if not path[0] == '/':
|
||||||
|
# relative to current package, search package paths if possible (this may not be necessary)
|
||||||
|
# candidate_paths = [os.path.join(ssp, path) for ssp in self._subpackage_search_paths]
|
||||||
|
raise ValueError('relative resource paths not supported')
|
||||||
|
else:
|
||||||
|
candidate_paths = [path]
|
||||||
|
|
||||||
|
for p in candidate_paths:
|
||||||
|
b_path = to_bytes(p)
|
||||||
|
if os.path.isfile(b_path):
|
||||||
|
with open(b_path, 'rb') as fd:
|
||||||
|
return fd.read()
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _synthetic_filename(self, fullname):
|
||||||
|
return '<ansible_synthetic_collection_package>'
|
||||||
|
|
||||||
|
def get_filename(self, fullname):
|
||||||
|
if fullname != self._fullname:
|
||||||
|
raise ValueError('this loader cannot find files for {0}, only {1}'.format(fullname, self._fullname))
|
||||||
|
|
||||||
|
filename = self._source_code_path
|
||||||
|
|
||||||
|
if not filename and self.is_package(fullname):
|
||||||
|
if len(self._subpackage_search_paths) == 1:
|
||||||
|
filename = os.path.join(self._subpackage_search_paths[0], '__synthetic__')
|
||||||
|
else:
|
||||||
|
filename = self._synthetic_filename(fullname)
|
||||||
|
|
||||||
|
return filename
|
||||||
|
|
||||||
|
def get_code(self, fullname):
|
||||||
|
if self._compiled_code:
|
||||||
|
return self._compiled_code
|
||||||
|
|
||||||
|
# this may or may not be an actual filename, but it's the value we'll use for __file__
|
||||||
|
filename = self.get_filename(fullname)
|
||||||
|
if not filename:
|
||||||
|
filename = '<string>'
|
||||||
|
|
||||||
|
source_code = self.get_source(fullname)
|
||||||
|
if not source_code:
|
||||||
|
source_code = ''
|
||||||
|
|
||||||
|
self._compiled_code = compile(source=source_code, filename=filename, mode='exec', flags=0, dont_inherit=True)
|
||||||
|
|
||||||
|
return self._compiled_code
|
||||||
|
|
||||||
|
def iter_modules(self, prefix):
|
||||||
|
return _iter_modules_impl(self._subpackage_search_paths, prefix)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '{0}(path={1})'.format(self.__class__.__name__, self._subpackage_search_paths or self._source_code_path)
|
||||||
|
|
||||||
|
|
||||||
|
class _AnsibleCollectionRootPkgLoader(_AnsibleCollectionPkgLoaderBase):
|
||||||
|
def _validate_args(self):
|
||||||
|
super(_AnsibleCollectionRootPkgLoader, self)._validate_args()
|
||||||
|
if len(self._split_name) != 1:
|
||||||
|
raise ImportError('this loader can only load the ansible_collections toplevel package, not {0}'.format(self._fullname))
|
||||||
|
|
||||||
|
|
||||||
|
# Implements Ansible's custom namespace package support.
|
||||||
|
# The ansible_collections package and one level down (collections namespaces) are Python namespace packages
|
||||||
|
# that search across all configured collection roots. The collection package (two levels down) is the first one found
|
||||||
|
# on the configured collection root path, and Python namespace package aggregation is not allowed at or below
|
||||||
|
# the collection. Implements implicit package (package dir) support for both Py2/3. Package init code is ignored
|
||||||
|
# by this loader.
|
||||||
|
class _AnsibleCollectionNSPkgLoader(_AnsibleCollectionPkgLoaderBase):
|
||||||
|
def _validate_args(self):
|
||||||
|
super(_AnsibleCollectionNSPkgLoader, self)._validate_args()
|
||||||
|
if len(self._split_name) != 2:
|
||||||
|
raise ImportError('this loader can only load collections namespace packages, not {0}'.format(self._fullname))
|
||||||
|
|
||||||
|
def _validate_final(self):
|
||||||
|
# special-case the `ansible` namespace, since `ansible.builtin` is magical
|
||||||
|
if not self._subpackage_search_paths and self._package_to_load != 'ansible':
|
||||||
|
raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths))
|
||||||
|
|
||||||
|
|
||||||
|
# handles locating the actual collection package and associated metadata
|
||||||
|
class _AnsibleCollectionPkgLoader(_AnsibleCollectionPkgLoaderBase):
|
||||||
|
def _validate_args(self):
|
||||||
|
super(_AnsibleCollectionPkgLoader, self)._validate_args()
|
||||||
|
if len(self._split_name) != 3:
|
||||||
|
raise ImportError('this loader can only load collection packages, not {0}'.format(self._fullname))
|
||||||
|
|
||||||
|
def _validate_final(self):
|
||||||
|
if self._split_name[1:3] == ['ansible', 'builtin']:
|
||||||
|
# we don't want to allow this one to have on-disk search capability
|
||||||
|
self._subpackage_search_paths = []
|
||||||
|
elif not self._subpackage_search_paths:
|
||||||
|
raise ImportError('no {0} found in {1}'.format(self._package_to_load, self._candidate_paths))
|
||||||
|
else:
|
||||||
|
# only search within the first collection we found
|
||||||
|
self._subpackage_search_paths = [self._subpackage_search_paths[0]]
|
||||||
|
|
||||||
|
def load_module(self, fullname):
|
||||||
|
if not _meta_yml_to_dict:
|
||||||
|
raise ValueError('ansible.utils.collection_loader._meta_yml_to_dict is not set')
|
||||||
|
|
||||||
|
module = super(_AnsibleCollectionPkgLoader, self).load_module(fullname)
|
||||||
|
|
||||||
|
module._collection_meta = {}
|
||||||
|
# TODO: load collection metadata, cache in __loader__ state
|
||||||
|
|
||||||
|
collection_name = '.'.join(self._split_name[1:3])
|
||||||
|
|
||||||
|
if collection_name == 'ansible.builtin':
|
||||||
|
# ansible.builtin is a synthetic collection, get its routing config from the Ansible distro
|
||||||
|
raw_routing = pkgutil.get_data('ansible.config', 'ansible_builtin_runtime.yml')
|
||||||
|
else:
|
||||||
|
b_routing_meta_path = to_bytes(os.path.join(module.__path__[0], 'meta/runtime.yml'))
|
||||||
|
if os.path.isfile(b_routing_meta_path):
|
||||||
|
with open(b_routing_meta_path, 'rb') as fd:
|
||||||
|
raw_routing = fd.read()
|
||||||
|
else:
|
||||||
|
raw_routing = ''
|
||||||
|
try:
|
||||||
|
if raw_routing:
|
||||||
|
routing_dict = _meta_yml_to_dict(raw_routing, (collection_name, 'runtime.yml'))
|
||||||
|
module._collection_meta = self._canonicalize_meta(routing_dict)
|
||||||
|
except Exception as ex:
|
||||||
|
raise ValueError('error parsing collection metadata: {0}'.format(to_native(ex)))
|
||||||
|
|
||||||
|
AnsibleCollectionConfig.on_collection_load.fire(collection_name=collection_name, collection_path=os.path.dirname(module.__file__))
|
||||||
|
|
||||||
|
return module
|
||||||
|
|
||||||
|
def _canonicalize_meta(self, meta_dict):
|
||||||
|
# TODO: rewrite import keys and all redirect targets that start with .. (current namespace) and . (current collection)
|
||||||
|
# OR we could do it all on the fly?
|
||||||
|
# if not meta_dict:
|
||||||
|
# return {}
|
||||||
|
#
|
||||||
|
# ns_name = '.'.join(self._split_name[0:2])
|
||||||
|
# collection_name = '.'.join(self._split_name[0:3])
|
||||||
|
#
|
||||||
|
# #
|
||||||
|
# for routing_type, routing_type_dict in iteritems(meta_dict.get('plugin_routing', {})):
|
||||||
|
# for plugin_key, plugin_dict in iteritems(routing_type_dict):
|
||||||
|
# redirect = plugin_dict.get('redirect', '')
|
||||||
|
# if redirect.startswith('..'):
|
||||||
|
# redirect = redirect[2:]
|
||||||
|
|
||||||
|
return meta_dict
|
||||||
|
|
||||||
|
|
||||||
|
# loads everything under a collection, including handling redirections defined by the collection
|
||||||
|
class _AnsibleCollectionLoader(_AnsibleCollectionPkgLoaderBase):
|
||||||
|
# HACK: stash this in a better place
|
||||||
|
_redirected_package_map = {}
|
||||||
|
_allows_package_code = True
|
||||||
|
|
||||||
|
def _validate_args(self):
|
||||||
|
super(_AnsibleCollectionLoader, self)._validate_args()
|
||||||
|
if len(self._split_name) < 4:
|
||||||
|
raise ValueError('this loader is only for sub-collection modules/packages, not {0}'.format(self._fullname))
|
||||||
|
|
||||||
|
def _get_candidate_paths(self, path_list):
|
||||||
|
if len(path_list) != 1 and self._split_name[1:3] != ['ansible', 'builtin']:
|
||||||
|
raise ValueError('this loader requires exactly one path to search')
|
||||||
|
|
||||||
|
return path_list
|
||||||
|
|
||||||
|
def _get_subpackage_search_paths(self, candidate_paths):
|
||||||
|
collection_meta = _get_collection_metadata('.'.join(self._split_name[1:3]))
|
||||||
|
|
||||||
|
# check for explicit redirection, as well as ancestor package-level redirection (only load the actual code once!)
|
||||||
|
redirect = None
|
||||||
|
explicit_redirect = False
|
||||||
|
|
||||||
|
routing_entry = _nested_dict_get(collection_meta, ['import_redirection', self._fullname])
|
||||||
|
if routing_entry:
|
||||||
|
redirect = routing_entry.get('redirect')
|
||||||
|
|
||||||
|
if redirect:
|
||||||
|
explicit_redirect = True
|
||||||
|
else:
|
||||||
|
redirect = _get_ancestor_redirect(self._redirected_package_map, self._fullname)
|
||||||
|
|
||||||
|
# NB: package level redirection requires hooking all future imports beneath the redirected source package
|
||||||
|
# in order to ensure sanity on future relative imports. We always import everything under its "real" name,
|
||||||
|
# then add a sys.modules entry with the redirected name using the same module instance. If we naively imported
|
||||||
|
# the source for each redirection, most submodules would import OK, but we'd have N runtime copies of the module
|
||||||
|
# (one for each name), and relative imports that ascend above the redirected package would break (since they'd
|
||||||
|
# see the redirected ancestor package contents instead of the package where they actually live).
|
||||||
|
if redirect:
|
||||||
|
# FIXME: wrap this so we can be explicit about a failed redirection
|
||||||
|
self._redirect_module = import_module(redirect)
|
||||||
|
if explicit_redirect and hasattr(self._redirect_module, '__path__') and self._redirect_module.__path__:
|
||||||
|
# if the import target looks like a package, store its name so we can rewrite future descendent loads
|
||||||
|
# FIXME: shouldn't this be in a shared location? This is currently per loader instance, so
|
||||||
|
self._redirected_package_map[self._fullname] = redirect
|
||||||
|
|
||||||
|
# if we redirected, don't do any further custom package logic
|
||||||
|
return None
|
||||||
|
|
||||||
|
# we're not doing a redirect- try to find what we need to actually load a module/package
|
||||||
|
|
||||||
|
# this will raise ImportError if we can't find the requested module/package at all
|
||||||
|
if not candidate_paths:
|
||||||
|
# noplace to look, just ImportError
|
||||||
|
raise ImportError('package has no paths')
|
||||||
|
|
||||||
|
found_path, has_code, package_path = self._module_file_from_path(self._package_to_load, candidate_paths[0])
|
||||||
|
|
||||||
|
# still here? we found something to load...
|
||||||
|
if has_code:
|
||||||
|
self._source_code_path = found_path
|
||||||
|
|
||||||
|
if package_path:
|
||||||
|
return [package_path] # always needs to be a list
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# This loader only answers for intercepted Ansible Python modules. Normal imports will fail here and be picked up later
|
||||||
|
# by our path_hook importer (which proxies the built-in import mechanisms, allowing normal caching etc to occur)
|
||||||
|
class _AnsibleInternalRedirectLoader:
|
||||||
|
def __init__(self, fullname, path_list):
|
||||||
|
self._redirect = None
|
||||||
|
|
||||||
|
split_name = fullname.split('.')
|
||||||
|
toplevel_pkg = split_name[0]
|
||||||
|
module_to_load = split_name[-1]
|
||||||
|
|
||||||
|
if toplevel_pkg != 'ansible':
|
||||||
|
raise ImportError('not interested')
|
||||||
|
|
||||||
|
builtin_meta = _get_collection_metadata('ansible.builtin')
|
||||||
|
|
||||||
|
routing_entry = _nested_dict_get(builtin_meta, ['import_redirection', fullname])
|
||||||
|
if routing_entry:
|
||||||
|
self._redirect = routing_entry.get('redirect')
|
||||||
|
|
||||||
|
if not self._redirect:
|
||||||
|
raise ImportError('not redirected, go ask path_hook')
|
||||||
|
|
||||||
|
def load_module(self, fullname):
|
||||||
|
# since we're delegating to other loaders, this should only be called for internal redirects where we answered
|
||||||
|
# find_module with this loader, in which case we'll just directly import the redirection target, insert it into
|
||||||
|
# sys.modules under the name it was requested by, and return the original module.
|
||||||
|
|
||||||
|
# should never see this
|
||||||
|
if not self._redirect:
|
||||||
|
raise ValueError('no redirect found for {0}'.format(fullname))
|
||||||
|
|
||||||
|
# FIXME: smuggle redirection context, provide warning/error that we tried and failed to redirect
|
||||||
|
mod = import_module(self._redirect)
|
||||||
|
sys.modules[fullname] = mod
|
||||||
|
return mod
|
||||||
|
|
||||||
|
|
||||||
|
class AnsibleCollectionRef:
|
||||||
|
# FUTURE: introspect plugin loaders to get these dynamically?
|
||||||
|
VALID_REF_TYPES = frozenset(to_text(r) for r in ['action', 'become', 'cache', 'callback', 'cliconf', 'connection',
|
||||||
|
'doc_fragments', 'filter', 'httpapi', 'inventory', 'lookup',
|
||||||
|
'module_utils', 'modules', 'netconf', 'role', 'shell', 'strategy',
|
||||||
|
'terminal', 'test', 'vars'])
|
||||||
|
|
||||||
|
# FIXME: tighten this up to match Python identifier reqs, etc
|
||||||
|
VALID_COLLECTION_NAME_RE = re.compile(to_text(r'^(\w+)\.(\w+)$'))
|
||||||
|
VALID_SUBDIRS_RE = re.compile(to_text(r'^\w+(\.\w+)*$'))
|
||||||
|
VALID_FQCR_RE = re.compile(to_text(r'^\w+\.\w+\.\w+(\.\w+)*$')) # can have 0-N included subdirs as well
|
||||||
|
|
||||||
|
def __init__(self, collection_name, subdirs, resource, ref_type):
|
||||||
|
"""
|
||||||
|
Create an AnsibleCollectionRef from components
|
||||||
|
:param collection_name: a collection name of the form 'namespace.collectionname'
|
||||||
|
:param subdirs: optional subdir segments to be appended below the plugin type (eg, 'subdir1.subdir2')
|
||||||
|
:param resource: the name of the resource being references (eg, 'mymodule', 'someaction', 'a_role')
|
||||||
|
:param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
|
||||||
|
"""
|
||||||
|
collection_name = to_text(collection_name, errors='strict')
|
||||||
|
if subdirs is not None:
|
||||||
|
subdirs = to_text(subdirs, errors='strict')
|
||||||
|
resource = to_text(resource, errors='strict')
|
||||||
|
ref_type = to_text(ref_type, errors='strict')
|
||||||
|
|
||||||
|
if not self.is_valid_collection_name(collection_name):
|
||||||
|
raise ValueError('invalid collection name (must be of the form namespace.collection): {0}'.format(to_native(collection_name)))
|
||||||
|
|
||||||
|
if ref_type not in self.VALID_REF_TYPES:
|
||||||
|
raise ValueError('invalid collection ref_type: {0}'.format(ref_type))
|
||||||
|
|
||||||
|
self.collection = collection_name
|
||||||
|
if subdirs:
|
||||||
|
if not re.match(self.VALID_SUBDIRS_RE, subdirs):
|
||||||
|
raise ValueError('invalid subdirs entry: {0} (must be empty/None or of the form subdir1.subdir2)'.format(to_native(subdirs)))
|
||||||
|
self.subdirs = subdirs
|
||||||
|
else:
|
||||||
|
self.subdirs = u''
|
||||||
|
|
||||||
|
self.resource = resource
|
||||||
|
self.ref_type = ref_type
|
||||||
|
|
||||||
|
package_components = [u'ansible_collections', self.collection]
|
||||||
|
fqcr_components = [self.collection]
|
||||||
|
|
||||||
|
self.n_python_collection_package_name = to_native('.'.join(package_components))
|
||||||
|
|
||||||
|
if self.ref_type == u'role':
|
||||||
|
package_components.append(u'roles')
|
||||||
|
else:
|
||||||
|
# we assume it's a plugin
|
||||||
|
package_components += [u'plugins', self.ref_type]
|
||||||
|
|
||||||
|
if self.subdirs:
|
||||||
|
package_components.append(self.subdirs)
|
||||||
|
fqcr_components.append(self.subdirs)
|
||||||
|
|
||||||
|
if self.ref_type == u'role':
|
||||||
|
# roles are their own resource
|
||||||
|
package_components.append(self.resource)
|
||||||
|
|
||||||
|
fqcr_components.append(self.resource)
|
||||||
|
|
||||||
|
self.n_python_package_name = to_native('.'.join(package_components))
|
||||||
|
self._fqcr = u'.'.join(fqcr_components)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'AnsibleCollectionRef(collection={0!r}, subdirs={1!r}, resource={2!r})'.format(self.collection, self.subdirs, self.resource)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fqcr(self):
|
||||||
|
return self._fqcr
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def from_fqcr(ref, ref_type):
|
||||||
|
"""
|
||||||
|
Parse a string as a fully-qualified collection reference, raises ValueError if invalid
|
||||||
|
:param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
|
||||||
|
:param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
|
||||||
|
:return: a populated AnsibleCollectionRef object
|
||||||
|
"""
|
||||||
|
# assuming the fq_name is of the form (ns).(coll).(optional_subdir_N).(resource_name),
|
||||||
|
# we split the resource name off the right, split ns and coll off the left, and we're left with any optional
|
||||||
|
# subdirs that need to be added back below the plugin-specific subdir we'll add. So:
|
||||||
|
# ns.coll.resource -> ansible_collections.ns.coll.plugins.(plugintype).resource
|
||||||
|
# ns.coll.subdir1.resource -> ansible_collections.ns.coll.plugins.subdir1.(plugintype).resource
|
||||||
|
# ns.coll.rolename -> ansible_collections.ns.coll.roles.rolename
|
||||||
|
if not AnsibleCollectionRef.is_valid_fqcr(ref):
|
||||||
|
raise ValueError('{0} is not a valid collection reference'.format(to_native(ref)))
|
||||||
|
|
||||||
|
ref = to_text(ref, errors='strict')
|
||||||
|
ref_type = to_text(ref_type, errors='strict')
|
||||||
|
|
||||||
|
resource_splitname = ref.rsplit(u'.', 1)
|
||||||
|
package_remnant = resource_splitname[0]
|
||||||
|
resource = resource_splitname[1]
|
||||||
|
|
||||||
|
# split the left two components of the collection package name off, anything remaining is plugin-type
|
||||||
|
# specific subdirs to be added back on below the plugin type
|
||||||
|
package_splitname = package_remnant.split(u'.', 2)
|
||||||
|
if len(package_splitname) == 3:
|
||||||
|
subdirs = package_splitname[2]
|
||||||
|
else:
|
||||||
|
subdirs = u''
|
||||||
|
|
||||||
|
collection_name = u'.'.join(package_splitname[0:2])
|
||||||
|
|
||||||
|
return AnsibleCollectionRef(collection_name, subdirs, resource, ref_type)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def try_parse_fqcr(ref, ref_type):
|
||||||
|
"""
|
||||||
|
Attempt to parse a string as a fully-qualified collection reference, returning None on failure (instead of raising an error)
|
||||||
|
:param ref: collection reference to parse (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
|
||||||
|
:param ref_type: the type of the reference, eg 'module', 'role', 'doc_fragment'
|
||||||
|
:return: a populated AnsibleCollectionRef object on successful parsing, else None
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return AnsibleCollectionRef.from_fqcr(ref, ref_type)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def legacy_plugin_dir_to_plugin_type(legacy_plugin_dir_name):
|
||||||
|
"""
|
||||||
|
Utility method to convert from a PluginLoader dir name to a plugin ref_type
|
||||||
|
:param legacy_plugin_dir_name: PluginLoader dir name (eg, 'action_plugins', 'library')
|
||||||
|
:return: the corresponding plugin ref_type (eg, 'action', 'role')
|
||||||
|
"""
|
||||||
|
legacy_plugin_dir_name = to_text(legacy_plugin_dir_name)
|
||||||
|
|
||||||
|
plugin_type = legacy_plugin_dir_name.replace(u'_plugins', u'')
|
||||||
|
|
||||||
|
if plugin_type == u'library':
|
||||||
|
plugin_type = u'modules'
|
||||||
|
|
||||||
|
if plugin_type not in AnsibleCollectionRef.VALID_REF_TYPES:
|
||||||
|
raise ValueError('{0} cannot be mapped to a valid collection ref type'.format(to_native(legacy_plugin_dir_name)))
|
||||||
|
|
||||||
|
return plugin_type
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_valid_fqcr(ref, ref_type=None):
|
||||||
|
"""
|
||||||
|
Validates if is string is a well-formed fully-qualified collection reference (does not look up the collection itself)
|
||||||
|
:param ref: candidate collection reference to validate (a valid ref is of the form 'ns.coll.resource' or 'ns.coll.subdir1.subdir2.resource')
|
||||||
|
:param ref_type: optional reference type to enable deeper validation, eg 'module', 'role', 'doc_fragment'
|
||||||
|
:return: True if the collection ref passed is well-formed, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
ref = to_text(ref)
|
||||||
|
|
||||||
|
if not ref_type:
|
||||||
|
return bool(re.match(AnsibleCollectionRef.VALID_FQCR_RE, ref))
|
||||||
|
|
||||||
|
return bool(AnsibleCollectionRef.try_parse_fqcr(ref, ref_type))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_valid_collection_name(collection_name):
|
||||||
|
"""
|
||||||
|
Validates if the given string is a well-formed collection name (does not look up the collection itself)
|
||||||
|
:param collection_name: candidate collection name to validate (a valid name is of the form 'ns.collname')
|
||||||
|
:return: True if the collection name passed is well-formed, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
collection_name = to_text(collection_name)
|
||||||
|
|
||||||
|
return bool(re.match(AnsibleCollectionRef.VALID_COLLECTION_NAME_RE, collection_name))
|
||||||
|
|
||||||
|
|
||||||
|
def _get_collection_role_path(role_name, collection_list=None):
|
||||||
|
acr = AnsibleCollectionRef.try_parse_fqcr(role_name, 'role')
|
||||||
|
|
||||||
|
if acr:
|
||||||
|
# looks like a valid qualified collection ref; skip the collection_list
|
||||||
|
collection_list = [acr.collection]
|
||||||
|
subdirs = acr.subdirs
|
||||||
|
resource = acr.resource
|
||||||
|
elif not collection_list:
|
||||||
|
return None # not a FQ role and no collection search list spec'd, nothing to do
|
||||||
|
else:
|
||||||
|
resource = role_name # treat as unqualified, loop through the collection search list to try and resolve
|
||||||
|
subdirs = ''
|
||||||
|
|
||||||
|
for collection_name in collection_list:
|
||||||
|
try:
|
||||||
|
acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type='role')
|
||||||
|
# FIXME: error handling/logging; need to catch any import failures and move along
|
||||||
|
pkg = import_module(acr.n_python_package_name)
|
||||||
|
|
||||||
|
if pkg is not None:
|
||||||
|
# the package is now loaded, get the collection's package and ask where it lives
|
||||||
|
path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict'))
|
||||||
|
return resource, to_text(path, errors='surrogate_or_strict'), collection_name
|
||||||
|
|
||||||
|
except IOError:
|
||||||
|
continue
|
||||||
|
except Exception as ex:
|
||||||
|
# FIXME: pick out typical import errors first, then error logging
|
||||||
|
continue
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_collection_name_from_path(path):
|
||||||
|
"""
|
||||||
|
Return the containing collection name for a given path, or None if the path is not below a configured collection, or
|
||||||
|
the collection cannot be loaded (eg, the collection is masked by another of the same name higher in the configured
|
||||||
|
collection roots).
|
||||||
|
:param path: path to evaluate for collection containment
|
||||||
|
:return: collection name or None
|
||||||
|
"""
|
||||||
|
|
||||||
|
# FIXME: mess with realpath canonicalization or not?
|
||||||
|
path = to_native(path)
|
||||||
|
|
||||||
|
path_parts = path.split('/')
|
||||||
|
if path_parts.count('ansible_collections') != 1:
|
||||||
|
return None
|
||||||
|
|
||||||
|
ac_pos = path_parts.index('ansible_collections')
|
||||||
|
|
||||||
|
# make sure it's followed by at least a namespace and collection name
|
||||||
|
if len(path_parts) < ac_pos + 3:
|
||||||
|
return None
|
||||||
|
|
||||||
|
candidate_collection_name = '.'.join(path_parts[ac_pos + 1:ac_pos + 3])
|
||||||
|
|
||||||
|
try:
|
||||||
|
# we've got a name for it, now see if the path prefix matches what the loader sees
|
||||||
|
imported_pkg_path = to_native(os.path.dirname(to_bytes(import_module('ansible_collections.' + candidate_collection_name).__file__)))
|
||||||
|
except ImportError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# reassemble the original path prefix up the collection name, and it should match what we just imported. If not
|
||||||
|
# this is probably a collection root that's not configured.
|
||||||
|
|
||||||
|
original_path_prefix = os.path.join('/', *path_parts[0:ac_pos + 3])
|
||||||
|
|
||||||
|
if original_path_prefix != imported_pkg_path:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return candidate_collection_name
|
||||||
|
|
||||||
|
|
||||||
|
def _get_import_redirect(collection_meta_dict, fullname):
|
||||||
|
if not collection_meta_dict:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return _nested_dict_get(collection_meta_dict, ['import_redirection', fullname, 'redirect'])
|
||||||
|
|
||||||
|
|
||||||
|
def _get_ancestor_redirect(redirected_package_map, fullname):
|
||||||
|
# walk the requested module's ancestor packages to see if any have been previously redirected
|
||||||
|
cur_pkg = fullname
|
||||||
|
while cur_pkg:
|
||||||
|
cur_pkg = cur_pkg.rpartition('.')[0]
|
||||||
|
ancestor_redirect = redirected_package_map.get(cur_pkg)
|
||||||
|
if ancestor_redirect:
|
||||||
|
# rewrite the prefix on fullname so we import the target first, then alias it
|
||||||
|
redirect = ancestor_redirect + fullname[len(cur_pkg):]
|
||||||
|
return redirect
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _nested_dict_get(root_dict, key_list):
|
||||||
|
cur_value = root_dict
|
||||||
|
for key in key_list:
|
||||||
|
cur_value = cur_value.get(key)
|
||||||
|
if not cur_value:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return cur_value
|
||||||
|
|
||||||
|
|
||||||
|
def _iter_modules_impl(paths, prefix=''):
|
||||||
|
# NB: this currently only iterates what's on disk- redirected modules are not considered
|
||||||
|
if not prefix:
|
||||||
|
prefix = ''
|
||||||
|
else:
|
||||||
|
prefix = to_native(prefix)
|
||||||
|
# yield (module_loader, name, ispkg) for each module/pkg under path
|
||||||
|
# TODO: implement ignore/silent catch for unreadable?
|
||||||
|
for b_path in map(to_bytes, paths):
|
||||||
|
if not os.path.isdir(b_path):
|
||||||
|
continue
|
||||||
|
for b_basename in sorted(os.listdir(b_path)):
|
||||||
|
b_candidate_module_path = os.path.join(b_path, b_basename)
|
||||||
|
if os.path.isdir(b_candidate_module_path):
|
||||||
|
# exclude things that obviously aren't Python package dirs
|
||||||
|
# FIXME: this dir is adjustable in py3.8+, check for it
|
||||||
|
if b'.' in b_basename or b_basename == b'__pycache__':
|
||||||
|
continue
|
||||||
|
|
||||||
|
# TODO: proper string handling?
|
||||||
|
yield prefix + to_native(b_basename), True
|
||||||
|
else:
|
||||||
|
# FIXME: match builtin ordering for package/dir/file, support compiled?
|
||||||
|
if b_basename.endswith(b'.py') and b_basename != b'__init__.py':
|
||||||
|
yield prefix + to_native(os.path.splitext(b_basename)[0]), False
|
||||||
|
|
||||||
|
|
||||||
|
def _get_collection_metadata(collection_name):
|
||||||
|
collection_name = to_native(collection_name)
|
||||||
|
if not collection_name or not isinstance(collection_name, string_types) or len(collection_name.split('.')) != 2:
|
||||||
|
raise ValueError('collection_name must be a non-empty string of the form namespace.collection')
|
||||||
|
|
||||||
|
try:
|
||||||
|
collection_pkg = import_module('ansible_collections.' + collection_name)
|
||||||
|
except ImportError:
|
||||||
|
raise ValueError('unable to locate collection {0}'.format(collection_name))
|
||||||
|
|
||||||
|
_collection_meta = getattr(collection_pkg, '_collection_meta', None)
|
||||||
|
|
||||||
|
if _collection_meta is None:
|
||||||
|
raise ValueError('collection metadata was not loaded for collection {0}'.format(collection_name))
|
||||||
|
|
||||||
|
return _collection_meta
|
17
lib/ansible/utils/collection_loader/_collection_meta.py
Normal file
17
lib/ansible/utils/collection_loader/_collection_meta.py
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
# (c) 2019 Ansible Project
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
from yaml import safe_load
|
||||||
|
|
||||||
|
|
||||||
|
def _meta_yml_to_dict(yaml_string_data, content_id):
|
||||||
|
routing_dict = safe_load(yaml_string_data)
|
||||||
|
if not routing_dict:
|
||||||
|
routing_dict = {}
|
||||||
|
# TODO: change this to Mapping abc?
|
||||||
|
if not isinstance(routing_dict, dict):
|
||||||
|
raise ValueError('collection metadata must be a dictionary')
|
||||||
|
return routing_dict
|
|
@ -6,3 +6,4 @@
|
||||||
jinja2
|
jinja2
|
||||||
PyYAML
|
PyYAML
|
||||||
cryptography
|
cryptography
|
||||||
|
packaging
|
||||||
|
|
|
@ -1 +1,2 @@
|
||||||
shippable/posix/group1
|
shippable/posix/group1
|
||||||
|
disabled
|
||||||
|
|
|
@ -0,0 +1,39 @@
|
||||||
|
plugin_routing:
|
||||||
|
action:
|
||||||
|
uses_redirected_action:
|
||||||
|
redirect: testns.testcoll.subclassed_normal
|
||||||
|
connection:
|
||||||
|
redirected_local:
|
||||||
|
redirect: ansible.builtin.local
|
||||||
|
modules:
|
||||||
|
multilevel1:
|
||||||
|
redirect: testns.testcoll.multilevel2
|
||||||
|
multilevel2:
|
||||||
|
redirect: testns.testcoll.multilevel3
|
||||||
|
multilevel3:
|
||||||
|
redirect: testns.testcoll.ping
|
||||||
|
uses_redirected_action:
|
||||||
|
redirect: ansible.builtin.ping
|
||||||
|
setup.ps1: ansible.windows.setup
|
||||||
|
looped_ping:
|
||||||
|
redirect: testns.testcoll.looped_ping2
|
||||||
|
looped_ping2:
|
||||||
|
redirect: testns.testcoll.looped_ping
|
||||||
|
bogus_redirect:
|
||||||
|
redirect: bogus.collection.shouldbomb
|
||||||
|
deprecated_ping:
|
||||||
|
deprecation:
|
||||||
|
removal_date: 2020-12-31
|
||||||
|
warning_text: old_ping will be removed in a future release of this collection. Use new_ping instead.
|
||||||
|
foobar_facts:
|
||||||
|
redirect: foobar_info
|
||||||
|
aliased_ping:
|
||||||
|
redirect: ansible.builtin.ping
|
||||||
|
dead_ping:
|
||||||
|
tombstone:
|
||||||
|
removal_date: 2019-12-31
|
||||||
|
warning_text: dead_ping has been removed
|
||||||
|
module_utils:
|
||||||
|
moved_out_root:
|
||||||
|
redirect: testns.content_adj.sub1.foomodule
|
||||||
|
requires_ansible: '>=2.11'
|
|
@ -0,0 +1,11 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
from ansible.plugins.action.normal import ActionModule as NormalAction
|
||||||
|
|
||||||
|
|
||||||
|
class ActionModule(NormalAction):
|
||||||
|
def run(self, *args, **kwargs):
|
||||||
|
result = super(ActionModule, self).run(*args, **kwargs)
|
||||||
|
result['hacked'] = 'I got run under a subclassed normal, yay'
|
||||||
|
return result
|
|
@ -0,0 +1,20 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
from ansible.plugins.action import ActionBase
|
||||||
|
from ansible.module_utils.formerly_core import thingtocall
|
||||||
|
|
||||||
|
|
||||||
|
class ActionModule(ActionBase):
|
||||||
|
TRANSFERS_FILES = False
|
||||||
|
_VALID_ARGS = frozenset()
|
||||||
|
|
||||||
|
def run(self, tmp=None, task_vars=None):
|
||||||
|
if task_vars is None:
|
||||||
|
task_vars = dict()
|
||||||
|
|
||||||
|
result = super(ActionModule, self).run(None, task_vars)
|
||||||
|
|
||||||
|
result = dict(changed=False, ttc_res=thingtocall())
|
||||||
|
|
||||||
|
return result
|
|
@ -0,0 +1,6 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
def nested_same():
|
||||||
|
return 'hello from nested_same'
|
|
@ -0,0 +1,11 @@
|
||||||
|
# NB: this module should never be loaded, since we'll see the subpkg_with_init package dir first
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
def thingtocall():
|
||||||
|
raise Exception('this should never be called (loaded discrete module instead of package module)')
|
||||||
|
|
||||||
|
|
||||||
|
def anotherthingtocall():
|
||||||
|
raise Exception('this should never be called (loaded discrete module instead of package module)')
|
|
@ -0,0 +1,13 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
print(json.dumps(dict(changed=False, source='user', is_deprecated=True)))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -0,0 +1,19 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from ansible_collections.testns.testcoll.plugins.module_utils.moved_out_root import importme
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
mu_result = importme()
|
||||||
|
print(json.dumps(dict(changed=False, source='user', mu_result=mu_result)))
|
||||||
|
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -0,0 +1,19 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from ansible.module_utils.formerly_core import thingtocall
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
mu_result = thingtocall()
|
||||||
|
print(json.dumps(dict(changed=False, source='user', mu_result=mu_result)))
|
||||||
|
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -0,0 +1,19 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from ansible_collections.testns.testcoll.plugins.module_utils.nested_same.nested_same.nested_same import nested_same
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
mu_result = nested_same()
|
||||||
|
print(json.dumps(dict(changed=False, source='user', mu_result=mu_result)))
|
||||||
|
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -0,0 +1,19 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from ansible_collections.testns.testcoll.plugins.module_utils.nested_same.nested_same import nested_same
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
mu_result = nested_same.nested_same()
|
||||||
|
print(json.dumps(dict(changed=False, source='user', mu_result=mu_result)))
|
||||||
|
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -0,0 +1,6 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
def importme():
|
||||||
|
return "hello from {0}".format(__name__)
|
|
@ -0,0 +1,13 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
def override_formerly_core_masked_filter(*args, **kwargs):
|
||||||
|
return 'hello from overridden formerly_core_masked_filter'
|
||||||
|
|
||||||
|
|
||||||
|
class FilterModule(object):
|
||||||
|
def filters(self):
|
||||||
|
return {
|
||||||
|
'formerly_core_masked_filter': override_formerly_core_masked_filter
|
||||||
|
}
|
26
test/integration/targets/collections/inventory_test.yml
Normal file
26
test/integration/targets/collections/inventory_test.yml
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
- name: test a collection-hosted connection plugin against hosts from collection-hosted inventory plugins
|
||||||
|
hosts: dynamic_host_a, dynamic_host_redirected
|
||||||
|
gather_facts: no
|
||||||
|
vars:
|
||||||
|
ansible_connection: testns.testcoll.localconn
|
||||||
|
ansible_localconn_connectionvar: from_play
|
||||||
|
tasks:
|
||||||
|
- raw: echo 'hello world'
|
||||||
|
register: connection_out
|
||||||
|
|
||||||
|
- assert:
|
||||||
|
that:
|
||||||
|
- connection_out.stdout == "localconn ran echo 'hello world'"
|
||||||
|
# ensure that the connection var we overrode above made it into the running config
|
||||||
|
- connection_out.stderr == "connectionvar is from_play"
|
||||||
|
|
||||||
|
|
||||||
|
- hosts: localhost
|
||||||
|
gather_facts: no
|
||||||
|
tasks:
|
||||||
|
- assert:
|
||||||
|
that:
|
||||||
|
- hostvars['dynamic_host_a'] is defined
|
||||||
|
- hostvars['dynamic_host_a'].connection_out.stdout == "localconn ran echo 'hello world'"
|
||||||
|
- hostvars['dynamic_host_redirected'] is defined
|
||||||
|
- hostvars['dynamic_host_redirected'].connection_out.stdout == "localconn ran echo 'hello world'"
|
|
@ -63,6 +63,16 @@
|
||||||
testns.testcoll.uses_leaf_mu_module_import_from:
|
testns.testcoll.uses_leaf_mu_module_import_from:
|
||||||
register: from_out
|
register: from_out
|
||||||
|
|
||||||
|
# module with multiple levels of the same nested package name and imported as a function
|
||||||
|
- name: exec module with multiple levels of the same nested package name imported as a function
|
||||||
|
testns.testcoll.uses_nested_same_as_func:
|
||||||
|
register: from_nested_func
|
||||||
|
|
||||||
|
# module with multiple levels of the same nested package name and imported as a module
|
||||||
|
- name: exec module with multiple levels of the same nested package name imported as a module
|
||||||
|
testns.testcoll.uses_nested_same_as_module:
|
||||||
|
register: from_nested_module
|
||||||
|
|
||||||
- assert:
|
- assert:
|
||||||
that:
|
that:
|
||||||
- testmodule_out.source == 'user'
|
- testmodule_out.source == 'user'
|
||||||
|
@ -79,6 +89,8 @@
|
||||||
- flat_out.mu_result == 'thingtocall in leaf'
|
- flat_out.mu_result == 'thingtocall in leaf'
|
||||||
- from_out.mu_result == 'thingtocall in leaf'
|
- from_out.mu_result == 'thingtocall in leaf'
|
||||||
- from_out.mu2_result == 'thingtocall in secondary'
|
- from_out.mu2_result == 'thingtocall in secondary'
|
||||||
|
- from_nested_func.mu_result == 'hello from nested_same'
|
||||||
|
- from_nested_module.mu_result == 'hello from nested_same'
|
||||||
|
|
||||||
- hosts: testhost
|
- hosts: testhost
|
||||||
tasks:
|
tasks:
|
||||||
|
@ -373,28 +385,7 @@
|
||||||
- include_tasks: includeme.yml
|
- include_tasks: includeme.yml
|
||||||
|
|
||||||
|
|
||||||
- name: test a collection-hosted connection plugin against a host from a collection-hosted inventory plugin
|
- import_playbook: test_collection_meta.yml
|
||||||
hosts: dynamic_host_a
|
|
||||||
vars:
|
|
||||||
ansible_connection: testns.testcoll.localconn
|
|
||||||
ansible_localconn_connectionvar: from_play
|
|
||||||
tasks:
|
|
||||||
- raw: echo 'hello world'
|
|
||||||
register: connection_out
|
|
||||||
|
|
||||||
- assert:
|
|
||||||
that:
|
|
||||||
- connection_out.stdout == "localconn ran echo 'hello world'"
|
|
||||||
# ensure that the connection var we overrode above made it into the running config
|
|
||||||
- connection_out.stderr == "connectionvar is from_play"
|
|
||||||
|
|
||||||
- hosts: testhost
|
|
||||||
tasks:
|
|
||||||
- assert:
|
|
||||||
that:
|
|
||||||
- hostvars['dynamic_host_a'] is defined
|
|
||||||
- hostvars['dynamic_host_a'].connection_out.stdout == "localconn ran echo 'hello world'"
|
|
||||||
|
|
||||||
- name: Test FQCN handlers
|
- name: Test FQCN handlers
|
||||||
hosts: testhost
|
hosts: testhost
|
||||||
vars:
|
vars:
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
# use a plugin redirected by core to a collection to ensure inventory redirection and redirected config names are working
|
||||||
|
plugin: formerly_core_inventory # this is defined in the ansible-base runtime.yml routing to point at testns.content_adj.statichost
|
||||||
|
hostname: dynamic_host_redirected
|
|
@ -38,19 +38,22 @@ else
|
||||||
export TEST_PLAYBOOK=posix.yml
|
export TEST_PLAYBOOK=posix.yml
|
||||||
|
|
||||||
echo "testing default collection support"
|
echo "testing default collection support"
|
||||||
ansible-playbook -i "${INVENTORY_PATH}" collection_root_user/ansible_collections/testns/testcoll/playbooks/default_collection_playbook.yml
|
ansible-playbook -i "${INVENTORY_PATH}" collection_root_user/ansible_collections/testns/testcoll/playbooks/default_collection_playbook.yml "$@"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# run test playbooks
|
# run test playbooks
|
||||||
ansible-playbook -i "${INVENTORY_PATH}" -i ./a.statichost.yml -v "${TEST_PLAYBOOK}" "$@"
|
ansible-playbook -i "${INVENTORY_PATH}" -v "${TEST_PLAYBOOK}" "$@"
|
||||||
|
|
||||||
if [[ ${INVENTORY_PATH} != *.winrm ]]; then
|
if [[ ${INVENTORY_PATH} != *.winrm ]]; then
|
||||||
ansible-playbook -i "${INVENTORY_PATH}" -i ./a.statichost.yml -v invocation_tests.yml "$@"
|
ansible-playbook -i "${INVENTORY_PATH}" -v invocation_tests.yml "$@"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# test collection inventories
|
||||||
|
ansible-playbook inventory_test.yml -i a.statichost.yml -i redirected.statichost.yml "$@"
|
||||||
|
|
||||||
# test adjacent with --playbook-dir
|
# test adjacent with --playbook-dir
|
||||||
export ANSIBLE_COLLECTIONS_PATHS=''
|
export ANSIBLE_COLLECTIONS_PATHS=''
|
||||||
ANSIBLE_INVENTORY_ANY_UNPARSED_IS_FAILED=1 ansible-inventory -i a.statichost.yml --list --export --playbook-dir=. -v "$@"
|
ANSIBLE_INVENTORY_ANY_UNPARSED_IS_FAILED=1 ansible-inventory --list --export --playbook-dir=. -v "$@"
|
||||||
|
|
||||||
# use an inventory source with caching enabled
|
# use an inventory source with caching enabled
|
||||||
ansible-playbook -i a.statichost.yml -i ./cache.statichost.yml -v check_populated_inventory.yml
|
ansible-playbook -i a.statichost.yml -i ./cache.statichost.yml -v check_populated_inventory.yml
|
||||||
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
- hosts: localhost
|
||||||
|
gather_facts: no
|
||||||
|
collections:
|
||||||
|
- testns.testcoll
|
||||||
|
vars:
|
||||||
|
# redirect connection
|
||||||
|
ansible_connection: testns.testcoll.redirected_local
|
||||||
|
tasks:
|
||||||
|
- assert:
|
||||||
|
that: ('data' | testns.testcoll.testfilter) == 'data_via_testfilter_from_userdir'
|
||||||
|
|
||||||
|
# redirect module (multiple levels)
|
||||||
|
- multilevel1:
|
||||||
|
# redirect action
|
||||||
|
- uses_redirected_action:
|
||||||
|
# redirect import (consumed via action)
|
||||||
|
- uses_redirected_import:
|
||||||
|
# redirect lookup
|
||||||
|
- assert:
|
||||||
|
that: lookup('formerly_core_lookup') == 'mylookup_from_user_dir'
|
||||||
|
# redirect filter
|
||||||
|
- assert:
|
||||||
|
that: ('yes' | formerly_core_filter) == True
|
||||||
|
# legacy filter should mask redirected
|
||||||
|
- assert:
|
||||||
|
that: ('' | formerly_core_masked_filter) == 'hello from overridden formerly_core_masked_filter'
|
||||||
|
# redirect test
|
||||||
|
- assert:
|
||||||
|
that:
|
||||||
|
- "'stuff' is formerly_core_test('tuf')"
|
||||||
|
- "'hello override' is formerly_core_masked_test"
|
||||||
|
# redirect module (formerly internal)
|
||||||
|
- formerly_core_ping:
|
||||||
|
# redirect module from collection (with subdir)
|
||||||
|
- testns.testcoll.module_subdir.subdir_ping_module:
|
||||||
|
# redirect module_utils plugin (consumed via module)
|
||||||
|
- uses_core_redirected_mu:
|
||||||
|
# deprecated module (issues warning)
|
||||||
|
- deprecated_ping:
|
||||||
|
# redirect module (internal alias)
|
||||||
|
- aliased_ping:
|
||||||
|
# redirect module (cycle detection, fatal)
|
||||||
|
# - looped_ping:
|
||||||
|
|
||||||
|
# removed module (fatal)
|
||||||
|
# - dead_ping:
|
|
@ -0,0 +1,16 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
def override_formerly_core_masked_test(value, *args, **kwargs):
|
||||||
|
if value != 'hello override':
|
||||||
|
raise Exception('expected "hello override" only...')
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class TestModule(object):
|
||||||
|
def tests(self):
|
||||||
|
return {
|
||||||
|
'formerly_core_masked_test': override_formerly_core_masked_test
|
||||||
|
}
|
|
@ -26,10 +26,11 @@ def pytest_configure():
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pytest_configure.executed = True
|
pytest_configure.executed = True
|
||||||
|
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionLoader
|
from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder
|
||||||
|
|
||||||
# allow unit tests to import code from collections
|
# allow unit tests to import code from collections
|
||||||
sys.meta_path.insert(0, AnsibleCollectionLoader())
|
|
||||||
|
_AnsibleCollectionFinder(paths=[os.path.dirname(ANSIBLE_COLLECTIONS_PATH)])._install() # pylint: disable=protected-access
|
||||||
|
|
||||||
# noinspection PyProtectedMember
|
# noinspection PyProtectedMember
|
||||||
import py._path.local
|
import py._path.local
|
||||||
|
|
|
@ -2,4 +2,5 @@ cryptography
|
||||||
jinja2
|
jinja2
|
||||||
junit-xml
|
junit-xml
|
||||||
ordereddict ; python_version < '2.7'
|
ordereddict ; python_version < '2.7'
|
||||||
|
packaging
|
||||||
pyyaml
|
pyyaml
|
||||||
|
|
|
@ -2,5 +2,6 @@ cryptography
|
||||||
jinja2
|
jinja2
|
||||||
junit-xml
|
junit-xml
|
||||||
ordereddict ; python_version < '2.7' # ansible-test junit callback plugin requirement
|
ordereddict ; python_version < '2.7' # ansible-test junit callback plugin requirement
|
||||||
|
packaging
|
||||||
paramiko
|
paramiko
|
||||||
pyyaml
|
pyyaml
|
||||||
|
|
|
@ -5,6 +5,7 @@ ntlm-auth
|
||||||
ordereddict ; python_version < '2.7' # ansible-test junit callback plugin requirement
|
ordereddict ; python_version < '2.7' # ansible-test junit callback plugin requirement
|
||||||
requests-ntlm
|
requests-ntlm
|
||||||
requests-credssp
|
requests-credssp
|
||||||
|
packaging
|
||||||
pypsrp
|
pypsrp
|
||||||
pywinrm[credssp]
|
pywinrm[credssp]
|
||||||
pyyaml
|
pyyaml
|
||||||
|
|
|
@ -9,17 +9,20 @@ def main():
|
||||||
Main program function used to isolate globals from imported code.
|
Main program function used to isolate globals from imported code.
|
||||||
Changes to globals in imported modules on Python 2.x will overwrite our own globals.
|
Changes to globals in imported modules on Python 2.x will overwrite our own globals.
|
||||||
"""
|
"""
|
||||||
|
import ansible
|
||||||
import contextlib
|
import contextlib
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import runpy
|
import runpy
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
import types
|
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
ansible_path = os.environ['PYTHONPATH']
|
ansible_path = os.path.dirname(os.path.dirname(ansible.__file__))
|
||||||
temp_path = os.environ['SANITY_TEMP_PATH'] + os.path.sep
|
temp_path = os.environ['SANITY_TEMP_PATH'] + os.path.sep
|
||||||
|
external_python = os.environ.get('SANITY_EXTERNAL_PYTHON')
|
||||||
collection_full_name = os.environ.get('SANITY_COLLECTION_FULL_NAME')
|
collection_full_name = os.environ.get('SANITY_COLLECTION_FULL_NAME')
|
||||||
collection_root = os.environ.get('ANSIBLE_COLLECTIONS_PATHS')
|
collection_root = os.environ.get('ANSIBLE_COLLECTIONS_PATHS')
|
||||||
|
|
||||||
|
@ -37,63 +40,45 @@ def main():
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
# pre-load an empty ansible package to prevent unwanted code in __init__.py from loading
|
|
||||||
# without this the ansible.release import there would pull in many Python modules which Ansible modules should not have access to
|
|
||||||
ansible_module = types.ModuleType('ansible')
|
|
||||||
ansible_module.__file__ = os.path.join(os.environ['PYTHONPATH'], 'ansible', '__init__.py')
|
|
||||||
ansible_module.__path__ = [os.path.dirname(ansible_module.__file__)]
|
|
||||||
ansible_module.__package__ = 'ansible'
|
|
||||||
|
|
||||||
sys.modules['ansible'] = ansible_module
|
|
||||||
|
|
||||||
if collection_full_name:
|
if collection_full_name:
|
||||||
# allow importing code from collections when testing a collection
|
# allow importing code from collections when testing a collection
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionLoader
|
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
||||||
from ansible.module_utils._text import to_bytes
|
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||||
|
from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder
|
||||||
|
from ansible.utils.collection_loader import _collection_finder
|
||||||
|
|
||||||
def get_source(self, fullname):
|
yaml_to_json_path = os.path.join(os.path.dirname(__file__), 'yaml_to_json.py')
|
||||||
with open(to_bytes(self.get_filename(fullname)), 'rb') as mod_file:
|
yaml_to_dict_cache = {}
|
||||||
return mod_file.read()
|
|
||||||
|
|
||||||
def get_code(self, fullname):
|
def yaml_to_dict(yaml, content_id):
|
||||||
return compile(source=self.get_source(fullname), filename=self.get_filename(fullname), mode='exec', flags=0, dont_inherit=True)
|
"""
|
||||||
|
Return a Python dict version of the provided YAML.
|
||||||
|
Conversion is done in a subprocess since the current Python interpreter does not have access to PyYAML.
|
||||||
|
"""
|
||||||
|
if content_id in yaml_to_dict_cache:
|
||||||
|
return yaml_to_dict_cache[content_id]
|
||||||
|
|
||||||
def is_package(self, fullname):
|
try:
|
||||||
return os.path.basename(self.get_filename(fullname)) in ('__init__.py', '__synthetic__')
|
cmd = [external_python, yaml_to_json_path]
|
||||||
|
proc = subprocess.Popen([to_bytes(c) for c in cmd], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
|
stdout_bytes, stderr_bytes = proc.communicate(to_bytes(yaml))
|
||||||
|
|
||||||
def get_filename(self, fullname):
|
if proc.returncode != 0:
|
||||||
if fullname in sys.modules:
|
raise Exception('command %s failed with return code %d: %s' % ([to_native(c) for c in cmd], proc.returncode, to_native(stderr_bytes)))
|
||||||
return sys.modules[fullname].__file__
|
|
||||||
|
|
||||||
# find the module without importing it
|
data = yaml_to_dict_cache[content_id] = json.loads(to_text(stdout_bytes))
|
||||||
# otherwise an ImportError during module load will prevent us from getting the filename of the module
|
|
||||||
loader = self.find_module(fullname)
|
|
||||||
|
|
||||||
if not loader:
|
return data
|
||||||
raise ImportError('module {0} not found'.format(fullname))
|
except Exception as ex:
|
||||||
|
raise Exception('internal importer error - failed to parse yaml: %s' % to_native(ex))
|
||||||
|
|
||||||
# determine the filename of the module that was found
|
_collection_finder._meta_yml_to_dict = yaml_to_dict # pylint: disable=protected-access
|
||||||
filename = os.path.join(collection_root, fullname.replace('.', os.path.sep))
|
|
||||||
|
|
||||||
if os.path.isdir(filename):
|
collection_loader = _AnsibleCollectionFinder(paths=[collection_root])
|
||||||
init_filename = os.path.join(filename, '__init__.py')
|
collection_loader._install() # pylint: disable=protected-access
|
||||||
filename = init_filename if os.path.exists(init_filename) else os.path.join(filename, '__synthetic__')
|
nuke_modules = list(m for m in sys.modules if m.partition('.')[0] == 'ansible')
|
||||||
else:
|
map(sys.modules.pop, nuke_modules)
|
||||||
filename += '.py'
|
|
||||||
|
|
||||||
return filename
|
|
||||||
|
|
||||||
# monkeypatch collection loader to work with runpy
|
|
||||||
# remove this (and the associated code above) once implemented natively in the collection loader
|
|
||||||
AnsibleCollectionLoader.get_source = get_source
|
|
||||||
AnsibleCollectionLoader.get_code = get_code
|
|
||||||
AnsibleCollectionLoader.is_package = is_package
|
|
||||||
AnsibleCollectionLoader.get_filename = get_filename
|
|
||||||
|
|
||||||
collection_loader = AnsibleCollectionLoader()
|
|
||||||
|
|
||||||
# noinspection PyCallingNonCallable
|
|
||||||
sys.meta_path.insert(0, collection_loader)
|
|
||||||
else:
|
else:
|
||||||
# do not support collection loading when not testing a collection
|
# do not support collection loading when not testing a collection
|
||||||
collection_loader = None
|
collection_loader = None
|
||||||
|
|
16
test/lib/ansible_test/_data/sanity/import/yaml_to_json.py
Normal file
16
test/lib/ansible_test/_data/sanity/import/yaml_to_json.py
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
"""Read YAML from stdin and write JSON to stdout."""
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import json
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from yaml import load
|
||||||
|
|
||||||
|
try:
|
||||||
|
from yaml import CSafeLoader as SafeLoader
|
||||||
|
except ImportError:
|
||||||
|
from yaml import SafeLoader
|
||||||
|
|
||||||
|
|
||||||
|
json.dump(load(sys.stdin, Loader=SafeLoader), sys.stdout)
|
|
@ -42,7 +42,7 @@ from ansible.executor.module_common import REPLACER_WINDOWS
|
||||||
from ansible.module_utils.common._collections_compat import Mapping
|
from ansible.module_utils.common._collections_compat import Mapping
|
||||||
from ansible.module_utils._text import to_bytes
|
from ansible.module_utils._text import to_bytes
|
||||||
from ansible.plugins.loader import fragment_loader
|
from ansible.plugins.loader import fragment_loader
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionLoader
|
from ansible.utils.collection_loader._collection_finder import _AnsibleCollectionFinder
|
||||||
from ansible.utils.plugin_docs import BLACKLIST, add_fragments, get_docstring
|
from ansible.utils.plugin_docs import BLACKLIST, add_fragments, get_docstring
|
||||||
from ansible.utils.version import SemanticVersion
|
from ansible.utils.version import SemanticVersion
|
||||||
|
|
||||||
|
@ -942,7 +942,7 @@ class ModuleValidator(Validator):
|
||||||
else:
|
else:
|
||||||
# We are testing a collection
|
# We are testing a collection
|
||||||
if self.routing and self.routing.get('plugin_routing', {}).get('modules', {}).get(self.name, {}).get('deprecation', {}):
|
if self.routing and self.routing.get('plugin_routing', {}).get('modules', {}).get(self.name, {}).get('deprecation', {}):
|
||||||
# meta/routing.yml says this is deprecated
|
# meta/runtime.yml says this is deprecated
|
||||||
routing_says_deprecated = True
|
routing_says_deprecated = True
|
||||||
deprecated = True
|
deprecated = True
|
||||||
|
|
||||||
|
@ -1118,15 +1118,15 @@ class ModuleValidator(Validator):
|
||||||
self.reporter.error(
|
self.reporter.error(
|
||||||
path=self.object_path,
|
path=self.object_path,
|
||||||
code='collections-no-underscore-on-deprecation',
|
code='collections-no-underscore-on-deprecation',
|
||||||
msg='Deprecated content in collections MUST NOT start with "_", update meta/routing.yml instead',
|
msg='Deprecated content in collections MUST NOT start with "_", update meta/runtime.yml instead',
|
||||||
)
|
)
|
||||||
|
|
||||||
if not (doc_deprecated == routing_says_deprecated):
|
if not (doc_deprecated == routing_says_deprecated):
|
||||||
# DOCUMENTATION.deprecated and meta/routing.yml disagree
|
# DOCUMENTATION.deprecated and meta/runtime.yml disagree
|
||||||
self.reporter.error(
|
self.reporter.error(
|
||||||
path=self.object_path,
|
path=self.object_path,
|
||||||
code='deprecation-mismatch',
|
code='deprecation-mismatch',
|
||||||
msg='"meta/routing.yml" and DOCUMENTATION.deprecation do not agree.'
|
msg='"meta/runtime.yml" and DOCUMENTATION.deprecation do not agree.'
|
||||||
)
|
)
|
||||||
|
|
||||||
# In the future we should error if ANSIBLE_METADATA exists in a collection
|
# In the future we should error if ANSIBLE_METADATA exists in a collection
|
||||||
|
@ -2140,39 +2140,8 @@ class PythonPackageValidator(Validator):
|
||||||
|
|
||||||
|
|
||||||
def setup_collection_loader():
|
def setup_collection_loader():
|
||||||
def get_source(self, fullname):
|
collections_paths = os.environ.get('ANSIBLE_COLLECTIONS_PATHS', '').split(os.pathsep)
|
||||||
mod = sys.modules.get(fullname)
|
_AnsibleCollectionFinder(collections_paths)
|
||||||
if not mod:
|
|
||||||
mod = self.load_module(fullname)
|
|
||||||
|
|
||||||
with open(to_bytes(mod.__file__), 'rb') as mod_file:
|
|
||||||
source = mod_file.read()
|
|
||||||
|
|
||||||
return source
|
|
||||||
|
|
||||||
def get_code(self, fullname):
|
|
||||||
return compile(source=self.get_source(fullname), filename=self.get_filename(fullname), mode='exec', flags=0, dont_inherit=True)
|
|
||||||
|
|
||||||
def is_package(self, fullname):
|
|
||||||
return self.get_filename(fullname).endswith('__init__.py')
|
|
||||||
|
|
||||||
def get_filename(self, fullname):
|
|
||||||
mod = sys.modules.get(fullname) or self.load_module(fullname)
|
|
||||||
|
|
||||||
return mod.__file__
|
|
||||||
|
|
||||||
# monkeypatch collection loader to work with runpy
|
|
||||||
# remove this (and the associated code above) once implemented natively in the collection loader
|
|
||||||
AnsibleCollectionLoader.get_source = get_source
|
|
||||||
AnsibleCollectionLoader.get_code = get_code
|
|
||||||
AnsibleCollectionLoader.is_package = is_package
|
|
||||||
AnsibleCollectionLoader.get_filename = get_filename
|
|
||||||
|
|
||||||
collection_loader = AnsibleCollectionLoader()
|
|
||||||
|
|
||||||
# allow importing code from collections when testing a collection
|
|
||||||
# noinspection PyCallingNonCallable
|
|
||||||
sys.meta_path.insert(0, collection_loader)
|
|
||||||
|
|
||||||
|
|
||||||
def re_compile(value):
|
def re_compile(value):
|
||||||
|
@ -2228,8 +2197,8 @@ def run():
|
||||||
routing = None
|
routing = None
|
||||||
if args.collection:
|
if args.collection:
|
||||||
setup_collection_loader()
|
setup_collection_loader()
|
||||||
routing_file = 'meta/routing.yml'
|
routing_file = 'meta/runtime.yml'
|
||||||
# Load meta/routing.yml if it exists, as it may contain deprecation information
|
# Load meta/runtime.yml if it exists, as it may contain deprecation information
|
||||||
if os.path.isfile(routing_file):
|
if os.path.isfile(routing_file):
|
||||||
try:
|
try:
|
||||||
with open(routing_file) as f:
|
with open(routing_file) as f:
|
||||||
|
|
|
@ -76,10 +76,12 @@ class ImportTest(SanityMultipleVersion):
|
||||||
"""
|
"""
|
||||||
capture_pip = args.verbosity < 2
|
capture_pip = args.verbosity < 2
|
||||||
|
|
||||||
|
python = find_python(python_version)
|
||||||
|
|
||||||
if python_version.startswith('2.') and args.requirements:
|
if python_version.startswith('2.') and args.requirements:
|
||||||
# hack to make sure that virtualenv is available under Python 2.x
|
# hack to make sure that virtualenv is available under Python 2.x
|
||||||
# on Python 3.x we can use the built-in venv
|
# on Python 3.x we can use the built-in venv
|
||||||
pip = generate_pip_command(find_python(python_version))
|
pip = generate_pip_command(python)
|
||||||
run_command(args, generate_pip_install(pip, 'sanity.import', packages=['virtualenv']), capture=capture_pip)
|
run_command(args, generate_pip_install(pip, 'sanity.import', packages=['virtualenv']), capture=capture_pip)
|
||||||
|
|
||||||
settings = self.load_processor(args, python_version)
|
settings = self.load_processor(args, python_version)
|
||||||
|
@ -102,8 +104,10 @@ class ImportTest(SanityMultipleVersion):
|
||||||
|
|
||||||
# add the importer to our virtual environment so it can be accessed through the coverage injector
|
# add the importer to our virtual environment so it can be accessed through the coverage injector
|
||||||
importer_path = os.path.join(virtual_environment_bin, 'importer.py')
|
importer_path = os.path.join(virtual_environment_bin, 'importer.py')
|
||||||
|
yaml_to_json_path = os.path.join(virtual_environment_bin, 'yaml_to_json.py')
|
||||||
if not args.explain:
|
if not args.explain:
|
||||||
os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'importer.py')), importer_path)
|
os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'importer.py')), importer_path)
|
||||||
|
os.symlink(os.path.abspath(os.path.join(SANITY_ROOT, 'import', 'yaml_to_json.py')), yaml_to_json_path)
|
||||||
|
|
||||||
# activate the virtual environment
|
# activate the virtual environment
|
||||||
env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH'])
|
env['PATH'] = '%s:%s' % (virtual_environment_bin, env['PATH'])
|
||||||
|
@ -115,6 +119,7 @@ class ImportTest(SanityMultipleVersion):
|
||||||
if data_context().content.collection:
|
if data_context().content.collection:
|
||||||
env.update(
|
env.update(
|
||||||
SANITY_COLLECTION_FULL_NAME=data_context().content.collection.full_name,
|
SANITY_COLLECTION_FULL_NAME=data_context().content.collection.full_name,
|
||||||
|
SANITY_EXTERNAL_PYTHON=python,
|
||||||
)
|
)
|
||||||
|
|
||||||
virtualenv_python = os.path.join(virtual_environment_bin, 'python')
|
virtualenv_python = os.path.join(virtual_environment_bin, 'python')
|
||||||
|
|
|
@ -19,7 +19,7 @@ def main():
|
||||||
)
|
)
|
||||||
|
|
||||||
skip_paths = set([
|
skip_paths = set([
|
||||||
'lib/ansible/config/routing.yml', # not included in the sanity ignore file since it won't exist until after migration
|
'lib/ansible/config/ansible_builtin_runtime.yml', # not included in the sanity ignore file since it won't exist until after migration
|
||||||
])
|
])
|
||||||
|
|
||||||
skip_directories = (
|
skip_directories = (
|
||||||
|
|
|
@ -550,12 +550,19 @@ test/units/plugins/test_plugins.py pylint:blacklisted-name
|
||||||
test/units/template/test_templar.py pylint:blacklisted-name
|
test/units/template/test_templar.py pylint:blacklisted-name
|
||||||
test/units/test_constants.py future-import-boilerplate
|
test/units/test_constants.py future-import-boilerplate
|
||||||
test/units/test_context.py future-import-boilerplate
|
test/units/test_context.py future-import-boilerplate
|
||||||
test/units/utils/fixtures/collections/ansible_collections/my_namespace/my_collection/plugins/action/my_action.py future-import-boilerplate
|
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py future-import-boilerplate
|
||||||
test/units/utils/fixtures/collections/ansible_collections/my_namespace/my_collection/plugins/action/my_action.py metaclass-boilerplate
|
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py metaclass-boilerplate
|
||||||
test/units/utils/fixtures/collections/ansible_collections/my_namespace/my_collection/plugins/module_utils/my_other_util.py future-import-boilerplate
|
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/action/my_action.py pylint:relative-beyond-top-level
|
||||||
test/units/utils/fixtures/collections/ansible_collections/my_namespace/my_collection/plugins/module_utils/my_other_util.py metaclass-boilerplate
|
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_other_util.py future-import-boilerplate
|
||||||
test/units/utils/fixtures/collections/ansible_collections/my_namespace/my_collection/plugins/module_utils/my_util.py future-import-boilerplate
|
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_other_util.py metaclass-boilerplate
|
||||||
test/units/utils/fixtures/collections/ansible_collections/my_namespace/my_collection/plugins/module_utils/my_util.py metaclass-boilerplate
|
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py future-import-boilerplate
|
||||||
|
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/module_utils/my_util.py metaclass-boilerplate
|
||||||
|
test/units/utils/collection_loader/fixtures/collections/ansible_collections/testns/testcoll/plugins/modules/__init__.py empty-init # testing that collections don't need inits
|
||||||
|
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/__init__.py empty-init # testing that collections don't need inits
|
||||||
|
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/ansible/__init__.py empty-init # testing that collections don't need inits
|
||||||
|
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/__init__.py empty-init # testing that collections don't need inits
|
||||||
|
test/units/utils/collection_loader/fixtures/collections_masked/ansible_collections/testns/testcoll/__init__.py empty-init # testing that collections don't need inits
|
||||||
|
test/units/utils/collection_loader/test_collection_loader.py pylint:undefined-variable # magic runtime local var splatting
|
||||||
test/units/utils/test_cleanup_tmp_file.py future-import-boilerplate
|
test/units/utils/test_cleanup_tmp_file.py future-import-boilerplate
|
||||||
test/units/utils/test_encrypt.py future-import-boilerplate
|
test/units/utils/test_encrypt.py future-import-boilerplate
|
||||||
test/units/utils/test_encrypt.py metaclass-boilerplate
|
test/units/utils/test_encrypt.py metaclass-boilerplate
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
raise Exception('this module should never be loaded')
|
|
@ -0,0 +1,4 @@
|
||||||
|
plugin_routing:
|
||||||
|
modules:
|
||||||
|
rerouted_module:
|
||||||
|
redirect: ansible.builtin.ping
|
|
@ -0,0 +1,5 @@
|
||||||
|
from ..module_utils.my_util import question
|
||||||
|
|
||||||
|
|
||||||
|
def action_code():
|
||||||
|
return "hello from my_action.py"
|
|
@ -0,0 +1,5 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
raise Exception('this should never run')
|
|
@ -0,0 +1,6 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
def module_code():
|
||||||
|
return "hello from amodule.py"
|
|
@ -0,0 +1,5 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
raise Exception('this code should never execute')
|
|
@ -0,0 +1,5 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
raise Exception('this code should never execute')
|
|
@ -0,0 +1,5 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
raise Exception('this code should never execute')
|
|
@ -0,0 +1,5 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
|
||||||
|
raise Exception('this code should never execute')
|
809
test/units/utils/collection_loader/test_collection_loader.py
Normal file
809
test/units/utils/collection_loader/test_collection_loader.py
Normal file
|
@ -0,0 +1,809 @@
|
||||||
|
from __future__ import (absolute_import, division, print_function)
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
import os
|
||||||
|
import pkgutil
|
||||||
|
import pytest
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from ansible.module_utils.six import PY3, string_types
|
||||||
|
from ansible.module_utils.compat.importlib import import_module
|
||||||
|
from ansible.utils.collection_loader import AnsibleCollectionConfig, AnsibleCollectionRef
|
||||||
|
from ansible.utils.collection_loader._collection_finder import (
|
||||||
|
_AnsibleCollectionFinder, _AnsibleCollectionLoader, _AnsibleCollectionNSPkgLoader, _AnsibleCollectionPkgLoader,
|
||||||
|
_AnsibleCollectionPkgLoaderBase, _AnsibleCollectionRootPkgLoader, _AnsiblePathHookFinder,
|
||||||
|
_get_collection_name_from_path, _get_collection_role_path, _get_collection_metadata, _iter_modules_impl
|
||||||
|
)
|
||||||
|
from ansible.utils.collection_loader._collection_config import _EventSource
|
||||||
|
from units.compat.mock import MagicMock, NonCallableMagicMock, patch
|
||||||
|
|
||||||
|
|
||||||
|
# fixture to ensure we always clean up the import stuff when we're done
|
||||||
|
@pytest.fixture(autouse=True, scope='function')
|
||||||
|
def teardown(*args, **kwargs):
|
||||||
|
yield
|
||||||
|
reset_collections_loader_state()
|
||||||
|
|
||||||
|
# BEGIN STANDALONE TESTS - these exercise behaviors of the individual components without the import machinery
|
||||||
|
|
||||||
|
|
||||||
|
def test_finder_setup():
|
||||||
|
# ensure scalar path is listified
|
||||||
|
f = _AnsibleCollectionFinder(paths='/bogus/bogus')
|
||||||
|
assert isinstance(f._n_collection_paths, list)
|
||||||
|
|
||||||
|
# ensure sys.path paths that have an ansible_collections dir are added to the end of the collections paths
|
||||||
|
with patch.object(sys, 'path', ['/bogus', default_test_collection_paths[1], '/morebogus', default_test_collection_paths[0]]):
|
||||||
|
f = _AnsibleCollectionFinder(paths=['/explicit', '/other'])
|
||||||
|
assert f._n_collection_paths == ['/explicit', '/other', default_test_collection_paths[1], default_test_collection_paths[0]]
|
||||||
|
|
||||||
|
configured_paths = ['/bogus']
|
||||||
|
playbook_paths = ['/playbookdir']
|
||||||
|
f = _AnsibleCollectionFinder(paths=configured_paths)
|
||||||
|
assert f._n_collection_paths == configured_paths
|
||||||
|
f.set_playbook_paths(playbook_paths)
|
||||||
|
assert f._n_collection_paths == extend_paths(playbook_paths, 'collections') + configured_paths
|
||||||
|
|
||||||
|
# ensure scalar playbook_paths gets listified
|
||||||
|
f.set_playbook_paths(playbook_paths[0])
|
||||||
|
assert f._n_collection_paths == extend_paths(playbook_paths, 'collections') + configured_paths
|
||||||
|
|
||||||
|
|
||||||
|
def test_finder_not_interested():
|
||||||
|
f = get_default_finder()
|
||||||
|
assert f.find_module('nothanks') is None
|
||||||
|
assert f.find_module('nothanks.sub', path=['/bogus/dir']) is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_finder_ns():
|
||||||
|
# ensure we can still load ansible_collections and ansible_collections.ansible when they don't exist on disk
|
||||||
|
f = _AnsibleCollectionFinder(paths=['/bogus/bogus'])
|
||||||
|
loader = f.find_module('ansible_collections')
|
||||||
|
assert isinstance(loader, _AnsibleCollectionRootPkgLoader)
|
||||||
|
|
||||||
|
loader = f.find_module('ansible_collections.ansible', path=['/bogus/bogus'])
|
||||||
|
assert isinstance(loader, _AnsibleCollectionNSPkgLoader)
|
||||||
|
|
||||||
|
f = get_default_finder()
|
||||||
|
loader = f.find_module('ansible_collections')
|
||||||
|
assert isinstance(loader, _AnsibleCollectionRootPkgLoader)
|
||||||
|
|
||||||
|
# path is not allowed for top-level
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
f.find_module('ansible_collections', path=['whatever'])
|
||||||
|
|
||||||
|
# path is required for subpackages
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
f.find_module('ansible_collections.whatever', path=None)
|
||||||
|
|
||||||
|
paths = [os.path.join(p, 'ansible_collections/nonexistns') for p in default_test_collection_paths]
|
||||||
|
|
||||||
|
# test missing
|
||||||
|
loader = f.find_module('ansible_collections.nonexistns', paths)
|
||||||
|
assert loader is None
|
||||||
|
|
||||||
|
|
||||||
|
# keep these up top to make sure the loader install/remove are working, since we rely on them heavily in the tests
|
||||||
|
def test_loader_remove():
|
||||||
|
fake_mp = [MagicMock(), _AnsibleCollectionFinder(), MagicMock(), _AnsibleCollectionFinder()]
|
||||||
|
fake_ph = [MagicMock().m1, MagicMock().m2, _AnsibleCollectionFinder()._ansible_collection_path_hook, NonCallableMagicMock]
|
||||||
|
# must nest until 2.6 compilation is totally donezo
|
||||||
|
with patch.object(sys, 'meta_path', fake_mp):
|
||||||
|
with patch.object(sys, 'path_hooks', fake_ph):
|
||||||
|
_AnsibleCollectionFinder()._remove()
|
||||||
|
assert len(sys.meta_path) == 2
|
||||||
|
# no AnsibleCollectionFinders on the meta path after remove is called
|
||||||
|
assert all((not isinstance(mpf, _AnsibleCollectionFinder) for mpf in sys.meta_path))
|
||||||
|
assert len(sys.path_hooks) == 3
|
||||||
|
# none of the remaining path hooks should point at an AnsibleCollectionFinder
|
||||||
|
assert all((not isinstance(ph.__self__, _AnsibleCollectionFinder) for ph in sys.path_hooks if hasattr(ph, '__self__')))
|
||||||
|
assert AnsibleCollectionConfig.collection_finder is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_loader_install():
|
||||||
|
fake_mp = [MagicMock(), _AnsibleCollectionFinder(), MagicMock(), _AnsibleCollectionFinder()]
|
||||||
|
fake_ph = [MagicMock().m1, MagicMock().m2, _AnsibleCollectionFinder()._ansible_collection_path_hook, NonCallableMagicMock]
|
||||||
|
# must nest until 2.6 compilation is totally donezo
|
||||||
|
with patch.object(sys, 'meta_path', fake_mp):
|
||||||
|
with patch.object(sys, 'path_hooks', fake_ph):
|
||||||
|
f = _AnsibleCollectionFinder()
|
||||||
|
f._install()
|
||||||
|
assert len(sys.meta_path) == 3 # should have removed the existing ACFs and installed a new one
|
||||||
|
assert sys.meta_path[0] is f # at the front
|
||||||
|
# the rest of the meta_path should not be AnsibleCollectionFinders
|
||||||
|
assert all((not isinstance(mpf, _AnsibleCollectionFinder) for mpf in sys.meta_path[1:]))
|
||||||
|
assert len(sys.path_hooks) == 4 # should have removed the existing ACF path hooks and installed a new one
|
||||||
|
# the first path hook should be ours, make sure it's pointing at the right instance
|
||||||
|
assert hasattr(sys.path_hooks[0], '__self__') and sys.path_hooks[0].__self__ is f
|
||||||
|
# the rest of the path_hooks should not point at an AnsibleCollectionFinder
|
||||||
|
assert all((not isinstance(ph.__self__, _AnsibleCollectionFinder) for ph in sys.path_hooks[1:] if hasattr(ph, '__self__')))
|
||||||
|
assert AnsibleCollectionConfig.collection_finder is f
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
AnsibleCollectionConfig.collection_finder = f
|
||||||
|
|
||||||
|
|
||||||
|
def test_finder_coll():
|
||||||
|
f = get_default_finder()
|
||||||
|
|
||||||
|
tests = [
|
||||||
|
{'name': 'ansible_collections.testns.testcoll', 'test_paths': [default_test_collection_paths]},
|
||||||
|
{'name': 'ansible_collections.ansible.builtin', 'test_paths': [['/bogus'], default_test_collection_paths]},
|
||||||
|
]
|
||||||
|
# ensure finder works for legit paths and bogus paths
|
||||||
|
for test_dict in tests:
|
||||||
|
# splat the dict values to our locals
|
||||||
|
globals().update(test_dict)
|
||||||
|
parent_pkg = name.rpartition('.')[0]
|
||||||
|
for paths in test_paths:
|
||||||
|
paths = [os.path.join(p, parent_pkg.replace('.', '/')) for p in paths]
|
||||||
|
loader = f.find_module(name, path=paths)
|
||||||
|
assert isinstance(loader, _AnsibleCollectionPkgLoader)
|
||||||
|
|
||||||
|
|
||||||
|
def test_root_loader_not_interested():
|
||||||
|
with pytest.raises(ImportError):
|
||||||
|
_AnsibleCollectionRootPkgLoader('not_ansible_collections_toplevel', path_list=[])
|
||||||
|
|
||||||
|
with pytest.raises(ImportError):
|
||||||
|
_AnsibleCollectionRootPkgLoader('ansible_collections.somens', path_list=['/bogus'])
|
||||||
|
|
||||||
|
|
||||||
|
def test_root_loader():
|
||||||
|
name = 'ansible_collections'
|
||||||
|
# ensure this works even when ansible_collections doesn't exist on disk
|
||||||
|
for paths in [], default_test_collection_paths:
|
||||||
|
if name in sys.modules:
|
||||||
|
del sys.modules[name]
|
||||||
|
loader = _AnsibleCollectionRootPkgLoader(name, paths)
|
||||||
|
assert repr(loader).startswith('_AnsibleCollectionRootPkgLoader(path=')
|
||||||
|
module = loader.load_module(name)
|
||||||
|
assert module.__name__ == name
|
||||||
|
assert module.__path__ == [p for p in extend_paths(paths, name) if os.path.isdir(p)]
|
||||||
|
# even if the dir exists somewhere, this loader doesn't support get_data, so make __file__ a non-file
|
||||||
|
assert module.__file__ == '<ansible_synthetic_collection_package>'
|
||||||
|
assert module.__package__ == name
|
||||||
|
assert sys.modules.get(name) == module
|
||||||
|
|
||||||
|
|
||||||
|
def test_nspkg_loader_not_interested():
|
||||||
|
with pytest.raises(ImportError):
|
||||||
|
_AnsibleCollectionNSPkgLoader('not_ansible_collections_toplevel.something', path_list=[])
|
||||||
|
|
||||||
|
with pytest.raises(ImportError):
|
||||||
|
_AnsibleCollectionNSPkgLoader('ansible_collections.somens.somecoll', path_list=[])
|
||||||
|
|
||||||
|
|
||||||
|
def test_nspkg_loader_load_module():
|
||||||
|
# ensure the loader behaves on the toplevel and ansible packages for both legit and missing/bogus paths
|
||||||
|
for name in ['ansible_collections.ansible', 'ansible_collections.testns']:
|
||||||
|
parent_pkg = name.partition('.')[0]
|
||||||
|
module_to_load = name.rpartition('.')[2]
|
||||||
|
paths = extend_paths(default_test_collection_paths, parent_pkg)
|
||||||
|
existing_child_paths = [p for p in extend_paths(paths, module_to_load) if os.path.exists(p)]
|
||||||
|
if name in sys.modules:
|
||||||
|
del sys.modules[name]
|
||||||
|
loader = _AnsibleCollectionNSPkgLoader(name, path_list=paths)
|
||||||
|
assert repr(loader).startswith('_AnsibleCollectionNSPkgLoader(path=')
|
||||||
|
module = loader.load_module(name)
|
||||||
|
assert module.__name__ == name
|
||||||
|
assert isinstance(module.__loader__, _AnsibleCollectionNSPkgLoader)
|
||||||
|
assert module.__path__ == existing_child_paths
|
||||||
|
assert module.__package__ == name
|
||||||
|
assert module.__file__ == '<ansible_synthetic_collection_package>'
|
||||||
|
assert sys.modules.get(name) == module
|
||||||
|
|
||||||
|
|
||||||
|
def test_collpkg_loader_not_interested():
|
||||||
|
with pytest.raises(ImportError):
|
||||||
|
_AnsibleCollectionPkgLoader('not_ansible_collections', path_list=[])
|
||||||
|
|
||||||
|
with pytest.raises(ImportError):
|
||||||
|
_AnsibleCollectionPkgLoader('ansible_collections.ns', path_list=['/bogus/bogus'])
|
||||||
|
|
||||||
|
|
||||||
|
def test_collpkg_loader_load_module():
|
||||||
|
reset_collections_loader_state()
|
||||||
|
with patch('ansible.utils.collection_loader.AnsibleCollectionConfig') as p:
|
||||||
|
for name in ['ansible_collections.ansible.builtin', 'ansible_collections.testns.testcoll']:
|
||||||
|
parent_pkg = name.rpartition('.')[0]
|
||||||
|
module_to_load = name.rpartition('.')[2]
|
||||||
|
paths = extend_paths(default_test_collection_paths, parent_pkg)
|
||||||
|
existing_child_paths = [p for p in extend_paths(paths, module_to_load) if os.path.exists(p)]
|
||||||
|
is_builtin = 'ansible.builtin' in name
|
||||||
|
if name in sys.modules:
|
||||||
|
del sys.modules[name]
|
||||||
|
loader = _AnsibleCollectionPkgLoader(name, path_list=paths)
|
||||||
|
assert repr(loader).startswith('_AnsibleCollectionPkgLoader(path=')
|
||||||
|
module = loader.load_module(name)
|
||||||
|
assert module.__name__ == name
|
||||||
|
assert isinstance(module.__loader__, _AnsibleCollectionPkgLoader)
|
||||||
|
if is_builtin:
|
||||||
|
assert module.__path__ == []
|
||||||
|
else:
|
||||||
|
assert module.__path__ == [existing_child_paths[0]]
|
||||||
|
|
||||||
|
assert module.__package__ == name
|
||||||
|
if is_builtin:
|
||||||
|
assert module.__file__ == '<ansible_synthetic_collection_package>'
|
||||||
|
else:
|
||||||
|
assert module.__file__.endswith('__synthetic__') and os.path.isdir(os.path.dirname(module.__file__))
|
||||||
|
assert sys.modules.get(name) == module
|
||||||
|
|
||||||
|
assert hasattr(module, '_collection_meta') and isinstance(module._collection_meta, dict)
|
||||||
|
|
||||||
|
# FIXME: validate _collection_meta contents match what's on disk (or not)
|
||||||
|
|
||||||
|
# if the module has metadata, try loading it with busted metadata
|
||||||
|
if module._collection_meta:
|
||||||
|
_collection_finder = import_module('ansible.utils.collection_loader._collection_finder')
|
||||||
|
with patch.object(_collection_finder, '_meta_yml_to_dict', side_effect=Exception('bang')):
|
||||||
|
with pytest.raises(Exception) as ex:
|
||||||
|
_AnsibleCollectionPkgLoader(name, path_list=paths).load_module(name)
|
||||||
|
assert 'error parsing collection metadata' in str(ex.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_coll_loader():
|
||||||
|
with patch('ansible.utils.collection_loader.AnsibleCollectionConfig'):
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
# not a collection
|
||||||
|
_AnsibleCollectionLoader('ansible_collections')
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
# bogus paths
|
||||||
|
_AnsibleCollectionLoader('ansible_collections.testns.testcoll', path_list=[])
|
||||||
|
|
||||||
|
# FIXME: more
|
||||||
|
|
||||||
|
|
||||||
|
def test_path_hook_setup():
|
||||||
|
with patch.object(sys, 'path_hooks', []):
|
||||||
|
found_hook = None
|
||||||
|
pathhook_exc = None
|
||||||
|
try:
|
||||||
|
found_hook = _AnsiblePathHookFinder._get_filefinder_path_hook()
|
||||||
|
except Exception as phe:
|
||||||
|
pathhook_exc = phe
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
assert str(pathhook_exc) == 'need exactly one FileFinder import hook (found 0)'
|
||||||
|
else:
|
||||||
|
assert found_hook is None
|
||||||
|
|
||||||
|
assert repr(_AnsiblePathHookFinder(object(), '/bogus/path')) == "_AnsiblePathHookFinder(path='/bogus/path')"
|
||||||
|
|
||||||
|
|
||||||
|
def test_new_or_existing_module():
|
||||||
|
module_name = 'blar.test.module'
|
||||||
|
pkg_name = module_name.rpartition('.')[0]
|
||||||
|
|
||||||
|
# create new module case
|
||||||
|
nuke_module_prefix(module_name)
|
||||||
|
with _AnsibleCollectionPkgLoaderBase._new_or_existing_module(module_name, __package__=pkg_name) as new_module:
|
||||||
|
# the module we just created should now exist in sys.modules
|
||||||
|
assert sys.modules.get(module_name) is new_module
|
||||||
|
assert new_module.__name__ == module_name
|
||||||
|
|
||||||
|
# the module should stick since we didn't raise an exception in the contextmgr
|
||||||
|
assert sys.modules.get(module_name) is new_module
|
||||||
|
|
||||||
|
# reuse existing module case
|
||||||
|
with _AnsibleCollectionPkgLoaderBase._new_or_existing_module(module_name, __attr1__=42, blar='yo') as existing_module:
|
||||||
|
assert sys.modules.get(module_name) is new_module # should be the same module we created earlier
|
||||||
|
assert hasattr(existing_module, '__package__') and existing_module.__package__ == pkg_name
|
||||||
|
assert hasattr(existing_module, '__attr1__') and existing_module.__attr1__ == 42
|
||||||
|
assert hasattr(existing_module, 'blar') and existing_module.blar == 'yo'
|
||||||
|
|
||||||
|
# exception during update existing shouldn't zap existing module from sys.modules
|
||||||
|
with pytest.raises(ValueError) as ve:
|
||||||
|
with _AnsibleCollectionPkgLoaderBase._new_or_existing_module(module_name) as existing_module:
|
||||||
|
err_to_raise = ValueError('bang')
|
||||||
|
raise err_to_raise
|
||||||
|
# make sure we got our error
|
||||||
|
assert ve.value is err_to_raise
|
||||||
|
# and that the module still exists
|
||||||
|
assert sys.modules.get(module_name) is existing_module
|
||||||
|
|
||||||
|
# test module removal after exception during creation
|
||||||
|
nuke_module_prefix(module_name)
|
||||||
|
with pytest.raises(ValueError) as ve:
|
||||||
|
with _AnsibleCollectionPkgLoaderBase._new_or_existing_module(module_name) as new_module:
|
||||||
|
err_to_raise = ValueError('bang')
|
||||||
|
raise err_to_raise
|
||||||
|
# make sure we got our error
|
||||||
|
assert ve.value is err_to_raise
|
||||||
|
# and that the module was removed
|
||||||
|
assert sys.modules.get(module_name) is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_iter_modules_impl():
|
||||||
|
modules_trailer = 'ansible_collections/testns/testcoll/plugins'
|
||||||
|
modules_pkg_prefix = modules_trailer.replace('/', '.') + '.'
|
||||||
|
modules_path = os.path.join(default_test_collection_paths[0], modules_trailer)
|
||||||
|
modules = list(_iter_modules_impl([modules_path], modules_pkg_prefix))
|
||||||
|
|
||||||
|
assert modules
|
||||||
|
assert set([('ansible_collections.testns.testcoll.plugins.action', True),
|
||||||
|
('ansible_collections.testns.testcoll.plugins.module_utils', True),
|
||||||
|
('ansible_collections.testns.testcoll.plugins.modules', True)]) == set(modules)
|
||||||
|
|
||||||
|
modules_trailer = 'ansible_collections/testns/testcoll/plugins/modules'
|
||||||
|
modules_pkg_prefix = modules_trailer.replace('/', '.') + '.'
|
||||||
|
modules_path = os.path.join(default_test_collection_paths[0], modules_trailer)
|
||||||
|
modules = list(_iter_modules_impl([modules_path], modules_pkg_prefix))
|
||||||
|
|
||||||
|
assert modules
|
||||||
|
assert len(modules) == 1
|
||||||
|
assert modules[0][0] == 'ansible_collections.testns.testcoll.plugins.modules.amodule' # name
|
||||||
|
assert modules[0][1] is False # is_pkg
|
||||||
|
|
||||||
|
# FIXME: more
|
||||||
|
|
||||||
|
|
||||||
|
# BEGIN IN-CIRCUIT TESTS - these exercise behaviors of the loader when wired up to the import machinery
|
||||||
|
|
||||||
|
|
||||||
|
def test_import_from_collection(monkeypatch):
|
||||||
|
collection_root = os.path.join(os.path.dirname(__file__), 'fixtures', 'collections')
|
||||||
|
collection_path = os.path.join(collection_root, 'ansible_collections/testns/testcoll/plugins/module_utils/my_util.py')
|
||||||
|
|
||||||
|
# THIS IS UNSTABLE UNDER A DEBUGGER
|
||||||
|
# the trace we're expecting to be generated when running the code below:
|
||||||
|
# answer = question()
|
||||||
|
expected_trace_log = [
|
||||||
|
(collection_path, 5, 'call'),
|
||||||
|
(collection_path, 6, 'line'),
|
||||||
|
(collection_path, 6, 'return'),
|
||||||
|
]
|
||||||
|
|
||||||
|
# define the collection root before any ansible code has been loaded
|
||||||
|
# otherwise config will have already been loaded and changing the environment will have no effect
|
||||||
|
monkeypatch.setenv('ANSIBLE_COLLECTIONS_PATHS', collection_root)
|
||||||
|
|
||||||
|
finder = _AnsibleCollectionFinder(paths=[collection_root])
|
||||||
|
reset_collections_loader_state(finder)
|
||||||
|
|
||||||
|
from ansible_collections.testns.testcoll.plugins.module_utils.my_util import question
|
||||||
|
|
||||||
|
original_trace_function = sys.gettrace()
|
||||||
|
trace_log = []
|
||||||
|
|
||||||
|
if original_trace_function:
|
||||||
|
# enable tracing while preserving the existing trace function (coverage)
|
||||||
|
def my_trace_function(frame, event, arg):
|
||||||
|
trace_log.append((frame.f_code.co_filename, frame.f_lineno, event))
|
||||||
|
|
||||||
|
# the original trace function expects to have itself set as the trace function
|
||||||
|
sys.settrace(original_trace_function)
|
||||||
|
# call the original trace function
|
||||||
|
original_trace_function(frame, event, arg)
|
||||||
|
# restore our trace function
|
||||||
|
sys.settrace(my_trace_function)
|
||||||
|
|
||||||
|
return my_trace_function
|
||||||
|
else:
|
||||||
|
# no existing trace function, so our trace function is much simpler
|
||||||
|
def my_trace_function(frame, event, arg):
|
||||||
|
trace_log.append((frame.f_code.co_filename, frame.f_lineno, event))
|
||||||
|
|
||||||
|
return my_trace_function
|
||||||
|
|
||||||
|
sys.settrace(my_trace_function)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# run a minimal amount of code while the trace is running
|
||||||
|
# adding more code here, including use of a context manager, will add more to our trace
|
||||||
|
answer = question()
|
||||||
|
finally:
|
||||||
|
sys.settrace(original_trace_function)
|
||||||
|
|
||||||
|
# make sure 'import ... as ...' works on builtin synthetic collections
|
||||||
|
# the following import is not supported (it tries to find module_utils in ansible.plugins)
|
||||||
|
# import ansible_collections.ansible.builtin.plugins.module_utils as c1
|
||||||
|
import ansible_collections.ansible.builtin.plugins.action as c2
|
||||||
|
import ansible_collections.ansible.builtin.plugins as c3
|
||||||
|
import ansible_collections.ansible.builtin as c4
|
||||||
|
import ansible_collections.ansible as c5
|
||||||
|
import ansible_collections as c6
|
||||||
|
|
||||||
|
# make sure 'import ...' works on builtin synthetic collections
|
||||||
|
import ansible_collections.ansible.builtin.plugins.module_utils
|
||||||
|
|
||||||
|
import ansible_collections.ansible.builtin.plugins.action
|
||||||
|
assert ansible_collections.ansible.builtin.plugins.action == c3.action == c2
|
||||||
|
|
||||||
|
import ansible_collections.ansible.builtin.plugins
|
||||||
|
assert ansible_collections.ansible.builtin.plugins == c4.plugins == c3
|
||||||
|
|
||||||
|
import ansible_collections.ansible.builtin
|
||||||
|
assert ansible_collections.ansible.builtin == c5.builtin == c4
|
||||||
|
|
||||||
|
import ansible_collections.ansible
|
||||||
|
assert ansible_collections.ansible == c6.ansible == c5
|
||||||
|
|
||||||
|
import ansible_collections
|
||||||
|
assert ansible_collections == c6
|
||||||
|
|
||||||
|
# make sure 'from ... import ...' works on builtin synthetic collections
|
||||||
|
from ansible_collections.ansible import builtin
|
||||||
|
from ansible_collections.ansible.builtin import plugins
|
||||||
|
assert builtin.plugins == plugins
|
||||||
|
|
||||||
|
from ansible_collections.ansible.builtin.plugins import action
|
||||||
|
from ansible_collections.ansible.builtin.plugins.action import command
|
||||||
|
assert action.command == command
|
||||||
|
|
||||||
|
from ansible_collections.ansible.builtin.plugins.module_utils import basic
|
||||||
|
from ansible_collections.ansible.builtin.plugins.module_utils.basic import AnsibleModule
|
||||||
|
assert basic.AnsibleModule == AnsibleModule
|
||||||
|
|
||||||
|
# make sure relative imports work from collections code
|
||||||
|
# these require __package__ to be set correctly
|
||||||
|
import ansible_collections.testns.testcoll.plugins.module_utils.my_other_util
|
||||||
|
import ansible_collections.testns.testcoll.plugins.action.my_action
|
||||||
|
|
||||||
|
# verify that code loaded from a collection does not inherit __future__ statements from the collection loader
|
||||||
|
if sys.version_info[0] == 2:
|
||||||
|
# if the collection code inherits the division future feature from the collection loader this will fail
|
||||||
|
assert answer == 1
|
||||||
|
else:
|
||||||
|
assert answer == 1.5
|
||||||
|
|
||||||
|
# verify that the filename and line number reported by the trace is correct
|
||||||
|
# this makes sure that collection loading preserves file paths and line numbers
|
||||||
|
assert trace_log == expected_trace_log
|
||||||
|
|
||||||
|
|
||||||
|
def test_eventsource():
|
||||||
|
es = _EventSource()
|
||||||
|
# fire when empty should succeed
|
||||||
|
es.fire(42)
|
||||||
|
handler1 = MagicMock()
|
||||||
|
handler2 = MagicMock()
|
||||||
|
es += handler1
|
||||||
|
es.fire(99, my_kwarg='blah')
|
||||||
|
handler1.assert_called_with(99, my_kwarg='blah')
|
||||||
|
es += handler2
|
||||||
|
es.fire(123, foo='bar')
|
||||||
|
handler1.assert_called_with(123, foo='bar')
|
||||||
|
handler2.assert_called_with(123, foo='bar')
|
||||||
|
es -= handler2
|
||||||
|
handler1.reset_mock()
|
||||||
|
handler2.reset_mock()
|
||||||
|
es.fire(123, foo='bar')
|
||||||
|
handler1.assert_called_with(123, foo='bar')
|
||||||
|
handler2.assert_not_called()
|
||||||
|
es -= handler1
|
||||||
|
handler1.reset_mock()
|
||||||
|
es.fire('blah', kwarg=None)
|
||||||
|
handler1.assert_not_called()
|
||||||
|
handler2.assert_not_called()
|
||||||
|
es -= handler1 # should succeed silently
|
||||||
|
handler_bang = MagicMock(side_effect=Exception('bang'))
|
||||||
|
es += handler_bang
|
||||||
|
with pytest.raises(Exception) as ex:
|
||||||
|
es.fire(123)
|
||||||
|
assert 'bang' in str(ex.value)
|
||||||
|
handler_bang.assert_called_with(123)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
es += 42
|
||||||
|
|
||||||
|
|
||||||
|
def test_on_collection_load():
|
||||||
|
finder = get_default_finder()
|
||||||
|
reset_collections_loader_state(finder)
|
||||||
|
|
||||||
|
load_handler = MagicMock()
|
||||||
|
AnsibleCollectionConfig.on_collection_load += load_handler
|
||||||
|
|
||||||
|
m = import_module('ansible_collections.testns.testcoll')
|
||||||
|
load_handler.assert_called_once_with(collection_name='testns.testcoll', collection_path=os.path.dirname(m.__file__))
|
||||||
|
|
||||||
|
_meta = _get_collection_metadata('testns.testcoll')
|
||||||
|
assert _meta
|
||||||
|
# FIXME: compare to disk
|
||||||
|
|
||||||
|
finder = get_default_finder()
|
||||||
|
reset_collections_loader_state(finder)
|
||||||
|
|
||||||
|
AnsibleCollectionConfig.on_collection_load += MagicMock(side_effect=Exception('bang'))
|
||||||
|
with pytest.raises(Exception) as ex:
|
||||||
|
import_module('ansible_collections.testns.testcoll')
|
||||||
|
assert 'bang' in str(ex.value)
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_collection_config():
|
||||||
|
finder = get_default_finder()
|
||||||
|
reset_collections_loader_state(finder)
|
||||||
|
assert AnsibleCollectionConfig.default_collection is None
|
||||||
|
AnsibleCollectionConfig.default_collection = 'foo.bar'
|
||||||
|
assert AnsibleCollectionConfig.default_collection == 'foo.bar'
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
AnsibleCollectionConfig.default_collection = 'bar.baz'
|
||||||
|
|
||||||
|
|
||||||
|
def test_default_collection_detection():
|
||||||
|
finder = get_default_finder()
|
||||||
|
reset_collections_loader_state(finder)
|
||||||
|
|
||||||
|
# we're clearly not under a collection path
|
||||||
|
assert _get_collection_name_from_path('/') is None
|
||||||
|
|
||||||
|
# something that looks like a collection path but isn't importable by our finder
|
||||||
|
assert _get_collection_name_from_path('/foo/ansible_collections/bogusns/boguscoll/bar') is None
|
||||||
|
|
||||||
|
# legit, at the top of the collection
|
||||||
|
live_collection_path = os.path.join(os.path.dirname(__file__), 'fixtures/collections/ansible_collections/testns/testcoll')
|
||||||
|
assert _get_collection_name_from_path(live_collection_path) == 'testns.testcoll'
|
||||||
|
|
||||||
|
# legit, deeper inside the collection
|
||||||
|
live_collection_deep_path = os.path.join(live_collection_path, 'plugins/modules')
|
||||||
|
assert _get_collection_name_from_path(live_collection_deep_path) == 'testns.testcoll'
|
||||||
|
|
||||||
|
# this one should be hidden by the real testns.testcoll, so should not resolve
|
||||||
|
masked_collection_path = os.path.join(os.path.dirname(__file__), 'fixtures/collections_masked/ansible_collections/testns/testcoll')
|
||||||
|
assert _get_collection_name_from_path(masked_collection_path) is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'role_name,collection_list,expected_collection_name,expected_path_suffix',
|
||||||
|
[
|
||||||
|
('some_role', ['testns.testcoll', 'ansible.bogus'], 'testns.testcoll', 'testns/testcoll/roles/some_role'),
|
||||||
|
('testns.testcoll.some_role', ['ansible.bogus', 'testns.testcoll'], 'testns.testcoll', 'testns/testcoll/roles/some_role'),
|
||||||
|
('testns.testcoll.some_role', [], 'testns.testcoll', 'testns/testcoll/roles/some_role'),
|
||||||
|
('testns.testcoll.some_role', None, 'testns.testcoll', 'testns/testcoll/roles/some_role'),
|
||||||
|
('some_role', [], None, None),
|
||||||
|
('some_role', None, None, None),
|
||||||
|
])
|
||||||
|
def test_collection_role_name_location(role_name, collection_list, expected_collection_name, expected_path_suffix):
|
||||||
|
finder = get_default_finder()
|
||||||
|
reset_collections_loader_state(finder)
|
||||||
|
|
||||||
|
expected_path = None
|
||||||
|
if expected_path_suffix:
|
||||||
|
expected_path = os.path.join(os.path.dirname(__file__), 'fixtures/collections/ansible_collections', expected_path_suffix)
|
||||||
|
|
||||||
|
found = _get_collection_role_path(role_name, collection_list)
|
||||||
|
|
||||||
|
if found:
|
||||||
|
assert found[0] == role_name.rpartition('.')[2]
|
||||||
|
assert found[1] == expected_path
|
||||||
|
assert found[2] == expected_collection_name
|
||||||
|
else:
|
||||||
|
assert expected_collection_name is None and expected_path_suffix is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_bogus_imports():
|
||||||
|
finder = get_default_finder()
|
||||||
|
reset_collections_loader_state(finder)
|
||||||
|
|
||||||
|
# ensure ImportError on known-bogus imports
|
||||||
|
bogus_imports = ['bogus_toplevel', 'ansible_collections.bogusns', 'ansible_collections.testns.boguscoll',
|
||||||
|
'ansible_collections.testns.testcoll.bogussub', 'ansible_collections.ansible.builtin.bogussub']
|
||||||
|
for bogus_import in bogus_imports:
|
||||||
|
with pytest.raises(ImportError):
|
||||||
|
import_module(bogus_import)
|
||||||
|
|
||||||
|
|
||||||
|
def test_finder_playbook_paths():
|
||||||
|
finder = get_default_finder()
|
||||||
|
reset_collections_loader_state(finder)
|
||||||
|
|
||||||
|
import ansible_collections
|
||||||
|
import ansible_collections.ansible
|
||||||
|
import ansible_collections.testns
|
||||||
|
|
||||||
|
# ensure the package modules look like we expect
|
||||||
|
assert hasattr(ansible_collections, '__path__') and len(ansible_collections.__path__) > 0
|
||||||
|
assert hasattr(ansible_collections.ansible, '__path__') and len(ansible_collections.ansible.__path__) > 0
|
||||||
|
assert hasattr(ansible_collections.testns, '__path__') and len(ansible_collections.testns.__path__) > 0
|
||||||
|
|
||||||
|
# these shouldn't be visible yet, since we haven't added the playbook dir
|
||||||
|
with pytest.raises(ImportError):
|
||||||
|
import ansible_collections.ansible.playbook_adj_other
|
||||||
|
|
||||||
|
with pytest.raises(ImportError):
|
||||||
|
import ansible_collections.testns.playbook_adj_other
|
||||||
|
|
||||||
|
assert AnsibleCollectionConfig.playbook_paths == []
|
||||||
|
playbook_path_fixture_dir = os.path.join(os.path.dirname(__file__), 'fixtures/playbook_path')
|
||||||
|
|
||||||
|
# configure the playbook paths
|
||||||
|
AnsibleCollectionConfig.playbook_paths = [playbook_path_fixture_dir]
|
||||||
|
|
||||||
|
# playbook paths go to the front of the line
|
||||||
|
assert AnsibleCollectionConfig.collection_paths[0] == os.path.join(playbook_path_fixture_dir, 'collections')
|
||||||
|
|
||||||
|
# playbook paths should be updated on the existing root ansible_collections path, as well as on the 'ansible' namespace (but no others!)
|
||||||
|
assert ansible_collections.__path__[0] == os.path.join(playbook_path_fixture_dir, 'collections/ansible_collections')
|
||||||
|
assert ansible_collections.ansible.__path__[0] == os.path.join(playbook_path_fixture_dir, 'collections/ansible_collections/ansible')
|
||||||
|
assert all('playbook_path' not in p for p in ansible_collections.testns.__path__)
|
||||||
|
|
||||||
|
# should succeed since we fixed up the package path
|
||||||
|
import ansible_collections.ansible.playbook_adj_other
|
||||||
|
# should succeed since we didn't import freshns before hacking in the path
|
||||||
|
import ansible_collections.freshns.playbook_adj_other
|
||||||
|
# should fail since we've already imported something from this path and didn't fix up its package path
|
||||||
|
with pytest.raises(ImportError):
|
||||||
|
import ansible_collections.testns.playbook_adj_other
|
||||||
|
|
||||||
|
|
||||||
|
def test_toplevel_iter_modules():
|
||||||
|
finder = get_default_finder()
|
||||||
|
reset_collections_loader_state(finder)
|
||||||
|
|
||||||
|
modules = list(pkgutil.iter_modules(default_test_collection_paths, ''))
|
||||||
|
assert len(modules) == 1
|
||||||
|
assert modules[0][1] == 'ansible_collections'
|
||||||
|
|
||||||
|
|
||||||
|
def test_iter_modules_namespaces():
|
||||||
|
finder = get_default_finder()
|
||||||
|
reset_collections_loader_state(finder)
|
||||||
|
|
||||||
|
paths = extend_paths(default_test_collection_paths, 'ansible_collections')
|
||||||
|
modules = list(pkgutil.iter_modules(paths, 'ansible_collections.'))
|
||||||
|
assert len(modules) == 2
|
||||||
|
assert all(m[2] is True for m in modules)
|
||||||
|
assert all(isinstance(m[0], _AnsiblePathHookFinder) for m in modules)
|
||||||
|
assert set(['ansible_collections.testns', 'ansible_collections.ansible']) == set(m[1] for m in modules)
|
||||||
|
|
||||||
|
|
||||||
|
def test_collection_get_data():
|
||||||
|
finder = get_default_finder()
|
||||||
|
reset_collections_loader_state(finder)
|
||||||
|
|
||||||
|
# something that's there
|
||||||
|
d = pkgutil.get_data('ansible_collections.testns.testcoll', 'plugins/action/my_action.py')
|
||||||
|
assert b'hello from my_action.py' in d
|
||||||
|
|
||||||
|
# something that's not there
|
||||||
|
d = pkgutil.get_data('ansible_collections.testns.testcoll', 'bogus/bogus')
|
||||||
|
assert d is None
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
plugins_pkg = import_module('ansible_collections.ansible.builtin')
|
||||||
|
assert not os.path.exists(os.path.dirname(plugins_pkg.__file__))
|
||||||
|
d = pkgutil.get_data('ansible_collections.ansible.builtin', 'plugins/connection/local.py')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'ref,ref_type,expected_collection,expected_subdirs,expected_resource,expected_python_pkg_name',
|
||||||
|
[
|
||||||
|
('ns.coll.myaction', 'action', 'ns.coll', '', 'myaction', 'ansible_collections.ns.coll.plugins.action'),
|
||||||
|
('ns.coll.subdir1.subdir2.myaction', 'action', 'ns.coll', 'subdir1.subdir2', 'myaction', 'ansible_collections.ns.coll.plugins.action.subdir1.subdir2'),
|
||||||
|
('ns.coll.myrole', 'role', 'ns.coll', '', 'myrole', 'ansible_collections.ns.coll.roles.myrole'),
|
||||||
|
('ns.coll.subdir1.subdir2.myrole', 'role', 'ns.coll', 'subdir1.subdir2', 'myrole', 'ansible_collections.ns.coll.roles.subdir1.subdir2.myrole'),
|
||||||
|
])
|
||||||
|
def test_fqcr_parsing_valid(ref, ref_type, expected_collection,
|
||||||
|
expected_subdirs, expected_resource, expected_python_pkg_name):
|
||||||
|
assert AnsibleCollectionRef.is_valid_fqcr(ref, ref_type)
|
||||||
|
|
||||||
|
r = AnsibleCollectionRef.from_fqcr(ref, ref_type)
|
||||||
|
assert r.collection == expected_collection
|
||||||
|
assert r.subdirs == expected_subdirs
|
||||||
|
assert r.resource == expected_resource
|
||||||
|
assert r.n_python_package_name == expected_python_pkg_name
|
||||||
|
|
||||||
|
r = AnsibleCollectionRef.try_parse_fqcr(ref, ref_type)
|
||||||
|
assert r.collection == expected_collection
|
||||||
|
assert r.subdirs == expected_subdirs
|
||||||
|
assert r.resource == expected_resource
|
||||||
|
assert r.n_python_package_name == expected_python_pkg_name
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'ref,ref_type,expected_error_type,expected_error_expression',
|
||||||
|
[
|
||||||
|
('no_dots_at_all_action', 'action', ValueError, 'is not a valid collection reference'),
|
||||||
|
('no_nscoll.myaction', 'action', ValueError, 'is not a valid collection reference'),
|
||||||
|
('ns.coll.myaction', 'bogus', ValueError, 'invalid collection ref_type'),
|
||||||
|
])
|
||||||
|
def test_fqcr_parsing_invalid(ref, ref_type, expected_error_type, expected_error_expression):
|
||||||
|
assert not AnsibleCollectionRef.is_valid_fqcr(ref, ref_type)
|
||||||
|
|
||||||
|
with pytest.raises(expected_error_type) as curerr:
|
||||||
|
AnsibleCollectionRef.from_fqcr(ref, ref_type)
|
||||||
|
|
||||||
|
assert re.search(expected_error_expression, str(curerr.value))
|
||||||
|
|
||||||
|
r = AnsibleCollectionRef.try_parse_fqcr(ref, ref_type)
|
||||||
|
assert r is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'name,subdirs,resource,ref_type,python_pkg_name',
|
||||||
|
[
|
||||||
|
('ns.coll', None, 'res', 'doc_fragments', 'ansible_collections.ns.coll.plugins.doc_fragments'),
|
||||||
|
('ns.coll', 'subdir1', 'res', 'doc_fragments', 'ansible_collections.ns.coll.plugins.doc_fragments.subdir1'),
|
||||||
|
('ns.coll', 'subdir1.subdir2', 'res', 'action', 'ansible_collections.ns.coll.plugins.action.subdir1.subdir2'),
|
||||||
|
])
|
||||||
|
def test_collectionref_components_valid(name, subdirs, resource, ref_type, python_pkg_name):
|
||||||
|
x = AnsibleCollectionRef(name, subdirs, resource, ref_type)
|
||||||
|
|
||||||
|
assert x.collection == name
|
||||||
|
if subdirs:
|
||||||
|
assert x.subdirs == subdirs
|
||||||
|
else:
|
||||||
|
assert x.subdirs == ''
|
||||||
|
|
||||||
|
assert x.resource == resource
|
||||||
|
assert x.ref_type == ref_type
|
||||||
|
assert x.n_python_package_name == python_pkg_name
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'dirname,expected_result',
|
||||||
|
[
|
||||||
|
('become_plugins', 'become'),
|
||||||
|
('cache_plugins', 'cache'),
|
||||||
|
('connection_plugins', 'connection'),
|
||||||
|
('library', 'modules'),
|
||||||
|
('filter_plugins', 'filter'),
|
||||||
|
('bogus_plugins', ValueError),
|
||||||
|
(None, ValueError)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
def test_legacy_plugin_dir_to_plugin_type(dirname, expected_result):
|
||||||
|
if isinstance(expected_result, string_types):
|
||||||
|
assert AnsibleCollectionRef.legacy_plugin_dir_to_plugin_type(dirname) == expected_result
|
||||||
|
else:
|
||||||
|
with pytest.raises(expected_result):
|
||||||
|
AnsibleCollectionRef.legacy_plugin_dir_to_plugin_type(dirname)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'name,subdirs,resource,ref_type,expected_error_type,expected_error_expression',
|
||||||
|
[
|
||||||
|
('bad_ns', '', 'resource', 'action', ValueError, 'invalid collection name'),
|
||||||
|
('ns.coll.', '', 'resource', 'action', ValueError, 'invalid collection name'),
|
||||||
|
('ns.coll', 'badsubdir#', 'resource', 'action', ValueError, 'invalid subdirs entry'),
|
||||||
|
('ns.coll', 'badsubdir.', 'resource', 'action', ValueError, 'invalid subdirs entry'),
|
||||||
|
('ns.coll', '.badsubdir', 'resource', 'action', ValueError, 'invalid subdirs entry'),
|
||||||
|
('ns.coll', '', 'resource', 'bogus', ValueError, 'invalid collection ref_type'),
|
||||||
|
])
|
||||||
|
def test_collectionref_components_invalid(name, subdirs, resource, ref_type, expected_error_type, expected_error_expression):
|
||||||
|
with pytest.raises(expected_error_type) as curerr:
|
||||||
|
AnsibleCollectionRef(name, subdirs, resource, ref_type)
|
||||||
|
|
||||||
|
assert re.search(expected_error_expression, str(curerr.value))
|
||||||
|
|
||||||
|
|
||||||
|
# BEGIN TEST SUPPORT
|
||||||
|
|
||||||
|
default_test_collection_paths = [
|
||||||
|
os.path.join(os.path.dirname(__file__), 'fixtures', 'collections'),
|
||||||
|
os.path.join(os.path.dirname(__file__), 'fixtures', 'collections_masked'),
|
||||||
|
'/bogus/bogussub'
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def get_default_finder():
|
||||||
|
return _AnsibleCollectionFinder(paths=default_test_collection_paths)
|
||||||
|
|
||||||
|
|
||||||
|
def extend_paths(path_list, suffix):
|
||||||
|
suffix = suffix.replace('.', '/')
|
||||||
|
return [os.path.join(p, suffix) for p in path_list]
|
||||||
|
|
||||||
|
|
||||||
|
def nuke_module_prefix(prefix):
|
||||||
|
for module_to_nuke in [m for m in sys.modules if m.startswith(prefix)]:
|
||||||
|
sys.modules.pop(module_to_nuke)
|
||||||
|
|
||||||
|
|
||||||
|
def reset_collections_loader_state(metapath_finder=None):
|
||||||
|
_AnsibleCollectionFinder._remove()
|
||||||
|
|
||||||
|
nuke_module_prefix('ansible_collections')
|
||||||
|
nuke_module_prefix('ansible.modules')
|
||||||
|
nuke_module_prefix('ansible.plugins')
|
||||||
|
|
||||||
|
# FIXME: better to move this someplace else that gets cleaned up automatically?
|
||||||
|
_AnsibleCollectionLoader._redirected_package_map = {}
|
||||||
|
|
||||||
|
AnsibleCollectionConfig._default_collection = None
|
||||||
|
AnsibleCollectionConfig._on_collection_load = _EventSource()
|
||||||
|
|
||||||
|
if metapath_finder:
|
||||||
|
metapath_finder._install()
|
|
@ -1 +0,0 @@
|
||||||
from ..module_utils.my_util import question
|
|
|
@ -1,215 +0,0 @@
|
||||||
from __future__ import (absolute_import, division, print_function)
|
|
||||||
__metaclass__ = type
|
|
||||||
|
|
||||||
import os
|
|
||||||
import pytest
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionRef
|
|
||||||
|
|
||||||
|
|
||||||
def test_import_from_collection(monkeypatch):
|
|
||||||
collection_root = os.path.join(os.path.dirname(__file__), 'fixtures', 'collections')
|
|
||||||
collection_path = os.path.join(collection_root, 'ansible_collections/my_namespace/my_collection/plugins/module_utils/my_util.py')
|
|
||||||
|
|
||||||
# the trace we're expecting to be generated when running the code below:
|
|
||||||
# answer = question()
|
|
||||||
expected_trace_log = [
|
|
||||||
(collection_path, 5, 'call'),
|
|
||||||
(collection_path, 6, 'line'),
|
|
||||||
(collection_path, 6, 'return'),
|
|
||||||
]
|
|
||||||
|
|
||||||
# define the collection root before any ansible code has been loaded
|
|
||||||
# otherwise config will have already been loaded and changing the environment will have no effect
|
|
||||||
monkeypatch.setenv('ANSIBLE_COLLECTIONS_PATHS', collection_root)
|
|
||||||
|
|
||||||
from ansible.utils.collection_loader import AnsibleCollectionLoader
|
|
||||||
|
|
||||||
# zap the singleton collection loader instance if it exists
|
|
||||||
AnsibleCollectionLoader._Singleton__instance = None
|
|
||||||
|
|
||||||
for index in [idx for idx, obj in enumerate(sys.meta_path) if isinstance(obj, AnsibleCollectionLoader)]:
|
|
||||||
# replace any existing collection loaders that may exist
|
|
||||||
# since these were loaded during unit test collection
|
|
||||||
# they will not have the correct configuration
|
|
||||||
sys.meta_path[index] = AnsibleCollectionLoader()
|
|
||||||
|
|
||||||
# make sure the collection loader is installed
|
|
||||||
# this will be a no-op if the collection loader is already installed
|
|
||||||
# which will depend on whether or not any tests being run imported ansible.plugins.loader during unit test collection
|
|
||||||
from ansible.plugins.loader import _configure_collection_loader
|
|
||||||
_configure_collection_loader() # currently redundant, the import above already calls this
|
|
||||||
|
|
||||||
from ansible_collections.my_namespace.my_collection.plugins.module_utils.my_util import question
|
|
||||||
|
|
||||||
original_trace_function = sys.gettrace()
|
|
||||||
trace_log = []
|
|
||||||
|
|
||||||
if original_trace_function:
|
|
||||||
# enable tracing while preserving the existing trace function (coverage)
|
|
||||||
def my_trace_function(frame, event, arg):
|
|
||||||
trace_log.append((frame.f_code.co_filename, frame.f_lineno, event))
|
|
||||||
|
|
||||||
# the original trace function expects to have itself set as the trace function
|
|
||||||
sys.settrace(original_trace_function)
|
|
||||||
# call the original trace function
|
|
||||||
original_trace_function(frame, event, arg)
|
|
||||||
# restore our trace function
|
|
||||||
sys.settrace(my_trace_function)
|
|
||||||
|
|
||||||
return my_trace_function
|
|
||||||
else:
|
|
||||||
# no existing trace function, so our trace function is much simpler
|
|
||||||
def my_trace_function(frame, event, arg):
|
|
||||||
trace_log.append((frame.f_code.co_filename, frame.f_lineno, event))
|
|
||||||
|
|
||||||
return my_trace_function
|
|
||||||
|
|
||||||
sys.settrace(my_trace_function)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# run a minimal amount of code while the trace is running
|
|
||||||
# adding more code here, including use of a context manager, will add more to our trace
|
|
||||||
answer = question()
|
|
||||||
finally:
|
|
||||||
sys.settrace(original_trace_function)
|
|
||||||
|
|
||||||
# make sure 'import ... as ...' works on builtin synthetic collections
|
|
||||||
# the following import is not supported (it tries to find module_utils in ansible.plugins)
|
|
||||||
# import ansible_collections.ansible.builtin.plugins.module_utils as c1
|
|
||||||
import ansible_collections.ansible.builtin.plugins.action as c2
|
|
||||||
import ansible_collections.ansible.builtin.plugins as c3
|
|
||||||
import ansible_collections.ansible.builtin as c4
|
|
||||||
import ansible_collections.ansible as c5
|
|
||||||
import ansible_collections as c6
|
|
||||||
|
|
||||||
# make sure 'import ...' works on builtin synthetic collections
|
|
||||||
import ansible_collections.ansible.builtin.plugins.module_utils
|
|
||||||
|
|
||||||
import ansible_collections.ansible.builtin.plugins.action
|
|
||||||
assert ansible_collections.ansible.builtin.plugins.action == c3.action == c2
|
|
||||||
|
|
||||||
import ansible_collections.ansible.builtin.plugins
|
|
||||||
assert ansible_collections.ansible.builtin.plugins == c4.plugins == c3
|
|
||||||
|
|
||||||
import ansible_collections.ansible.builtin
|
|
||||||
assert ansible_collections.ansible.builtin == c5.builtin == c4
|
|
||||||
|
|
||||||
import ansible_collections.ansible
|
|
||||||
assert ansible_collections.ansible == c6.ansible == c5
|
|
||||||
|
|
||||||
import ansible_collections
|
|
||||||
assert ansible_collections == c6
|
|
||||||
|
|
||||||
# make sure 'from ... import ...' works on builtin synthetic collections
|
|
||||||
from ansible_collections.ansible import builtin
|
|
||||||
from ansible_collections.ansible.builtin import plugins
|
|
||||||
assert builtin.plugins == plugins
|
|
||||||
|
|
||||||
from ansible_collections.ansible.builtin.plugins import action
|
|
||||||
from ansible_collections.ansible.builtin.plugins.action import command
|
|
||||||
assert action.command == command
|
|
||||||
|
|
||||||
from ansible_collections.ansible.builtin.plugins.module_utils import basic
|
|
||||||
from ansible_collections.ansible.builtin.plugins.module_utils.basic import AnsibleModule
|
|
||||||
assert basic.AnsibleModule == AnsibleModule
|
|
||||||
|
|
||||||
# make sure relative imports work from collections code
|
|
||||||
# these require __package__ to be set correctly
|
|
||||||
import ansible_collections.my_namespace.my_collection.plugins.module_utils.my_other_util
|
|
||||||
import ansible_collections.my_namespace.my_collection.plugins.action.my_action
|
|
||||||
|
|
||||||
# verify that code loaded from a collection does not inherit __future__ statements from the collection loader
|
|
||||||
if sys.version_info[0] == 2:
|
|
||||||
# if the collection code inherits the division future feature from the collection loader this will fail
|
|
||||||
assert answer == 1
|
|
||||||
else:
|
|
||||||
assert answer == 1.5
|
|
||||||
|
|
||||||
# verify that the filename and line number reported by the trace is correct
|
|
||||||
# this makes sure that collection loading preserves file paths and line numbers
|
|
||||||
assert trace_log == expected_trace_log
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
'ref,ref_type,expected_collection,expected_subdirs,expected_resource,expected_python_pkg_name',
|
|
||||||
[
|
|
||||||
('ns.coll.myaction', 'action', 'ns.coll', '', 'myaction', 'ansible_collections.ns.coll.plugins.action'),
|
|
||||||
('ns.coll.subdir1.subdir2.myaction', 'action', 'ns.coll', 'subdir1.subdir2', 'myaction', 'ansible_collections.ns.coll.plugins.action.subdir1.subdir2'),
|
|
||||||
('ns.coll.myrole', 'role', 'ns.coll', '', 'myrole', 'ansible_collections.ns.coll.roles.myrole'),
|
|
||||||
('ns.coll.subdir1.subdir2.myrole', 'role', 'ns.coll', 'subdir1.subdir2', 'myrole', 'ansible_collections.ns.coll.roles.subdir1.subdir2.myrole'),
|
|
||||||
])
|
|
||||||
def test_fqcr_parsing_valid(ref, ref_type, expected_collection,
|
|
||||||
expected_subdirs, expected_resource, expected_python_pkg_name):
|
|
||||||
assert AnsibleCollectionRef.is_valid_fqcr(ref, ref_type)
|
|
||||||
|
|
||||||
r = AnsibleCollectionRef.from_fqcr(ref, ref_type)
|
|
||||||
assert r.collection == expected_collection
|
|
||||||
assert r.subdirs == expected_subdirs
|
|
||||||
assert r.resource == expected_resource
|
|
||||||
assert r.n_python_package_name == expected_python_pkg_name
|
|
||||||
|
|
||||||
r = AnsibleCollectionRef.try_parse_fqcr(ref, ref_type)
|
|
||||||
assert r.collection == expected_collection
|
|
||||||
assert r.subdirs == expected_subdirs
|
|
||||||
assert r.resource == expected_resource
|
|
||||||
assert r.n_python_package_name == expected_python_pkg_name
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
'ref,ref_type,expected_error_type,expected_error_expression',
|
|
||||||
[
|
|
||||||
('no_dots_at_all_action', 'action', ValueError, 'is not a valid collection reference'),
|
|
||||||
('no_nscoll.myaction', 'action', ValueError, 'is not a valid collection reference'),
|
|
||||||
('ns.coll.myaction', 'bogus', ValueError, 'invalid collection ref_type'),
|
|
||||||
])
|
|
||||||
def test_fqcr_parsing_invalid(ref, ref_type, expected_error_type, expected_error_expression):
|
|
||||||
assert not AnsibleCollectionRef.is_valid_fqcr(ref, ref_type)
|
|
||||||
|
|
||||||
with pytest.raises(expected_error_type) as curerr:
|
|
||||||
AnsibleCollectionRef.from_fqcr(ref, ref_type)
|
|
||||||
|
|
||||||
assert re.search(expected_error_expression, str(curerr.value))
|
|
||||||
|
|
||||||
r = AnsibleCollectionRef.try_parse_fqcr(ref, ref_type)
|
|
||||||
assert r is None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
'name,subdirs,resource,ref_type,python_pkg_name',
|
|
||||||
[
|
|
||||||
('ns.coll', None, 'res', 'doc_fragments', 'ansible_collections.ns.coll.plugins.doc_fragments'),
|
|
||||||
('ns.coll', 'subdir1', 'res', 'doc_fragments', 'ansible_collections.ns.coll.plugins.doc_fragments.subdir1'),
|
|
||||||
('ns.coll', 'subdir1.subdir2', 'res', 'action', 'ansible_collections.ns.coll.plugins.action.subdir1.subdir2'),
|
|
||||||
])
|
|
||||||
def test_collectionref_components_valid(name, subdirs, resource, ref_type, python_pkg_name):
|
|
||||||
x = AnsibleCollectionRef(name, subdirs, resource, ref_type)
|
|
||||||
|
|
||||||
assert x.collection == name
|
|
||||||
if subdirs:
|
|
||||||
assert x.subdirs == subdirs
|
|
||||||
else:
|
|
||||||
assert x.subdirs == ''
|
|
||||||
|
|
||||||
assert x.resource == resource
|
|
||||||
assert x.ref_type == ref_type
|
|
||||||
assert x.n_python_package_name == python_pkg_name
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
'name,subdirs,resource,ref_type,expected_error_type,expected_error_expression',
|
|
||||||
[
|
|
||||||
('bad_ns', '', 'resource', 'action', ValueError, 'invalid collection name'),
|
|
||||||
('ns.coll.', '', 'resource', 'action', ValueError, 'invalid collection name'),
|
|
||||||
('ns.coll', 'badsubdir#', 'resource', 'action', ValueError, 'invalid subdirs entry'),
|
|
||||||
('ns.coll', 'badsubdir.', 'resource', 'action', ValueError, 'invalid subdirs entry'),
|
|
||||||
('ns.coll', '.badsubdir', 'resource', 'action', ValueError, 'invalid subdirs entry'),
|
|
||||||
('ns.coll', '', 'resource', 'bogus', ValueError, 'invalid collection ref_type'),
|
|
||||||
])
|
|
||||||
def test_collectionref_components_invalid(name, subdirs, resource, ref_type, expected_error_type, expected_error_expression):
|
|
||||||
with pytest.raises(expected_error_type) as curerr:
|
|
||||||
AnsibleCollectionRef(name, subdirs, resource, ref_type)
|
|
||||||
|
|
||||||
assert re.search(expected_error_expression, str(curerr.value))
|
|
Loading…
Reference in a new issue