Collection content loading (#52194)

* basic plugin loading working (with many hacks)

* task collections working

* play/block-level collection module/action working

* implement PEP302 loader

* implicit package support (no need for __init.py__ in collections)
* provides future options for secure loading of content that shouldn't execute inside controller (eg, actively ignore __init__.py on content/module paths)
* provide hook for synthetic collection setup (eg ansible.core pseudo-collection for specifying built-in plugins without legacy path, etc)

* synthetic package support

* ansible.core.plugins mapping works, others don't

* synthetic collections working for modules/actions

* fix direct-load legacy

* change base package name to ansible_collections

* note

* collection role loading

* expand paths from installed content root vars

* feature complete?

* rename ansible.core to ansible.builtin

* and various sanity fixes

* sanity tweaks

* unittest fixes

* less grabby error handler on has_plugin

* probably need to replace with a or harden callers

* fix win_ping test

* disable module test with explicit file extension; might be able to support in some scenarios, but can't see any other tests that verify that behavior...

* fix unicode conversion issues on py2

* attempt to keep things working-ish on py2.6

* python2.6 test fun round 2

* rename dirs/configs to "collections"

* add wrapper dir for content-adjacent

* fix pythoncheck to use localhost

* unicode tweaks, native/bytes string prefixing

* rename COLLECTION_PATHS to COLLECTIONS_PATHS

* switch to pathspec

* path handling cleanup

* change expensive `all` back to or chain

* unused import cleanup

* quotes tweak

* use wrapped iter/len in Jinja proxy

* var name expansion

* comment seemingly overcomplicated playbook_paths resolution

* drop unnecessary conditional nesting

* eliminate extraneous local

* zap superfluous validation function

* use slice for rolespec NS assembly

* misc naming/unicode fixes

* collection callback loader asks if valid FQ name instead of just '.'
* switch collection role resolution behavior to be internally `text` as much as possible

* misc fixmes

* to_native in exception constructor
* (slightly) detangle tuple accumulation mess in module_utils __init__ walker

* more misc fixmes

* tighten up action dispatch, add unqualified action test

* rename Collection mixin to CollectionSearch

* (attempt to) avoid potential confusion/conflict with builtin collections, etc

* stale fixmes

* tighten up pluginloader collections determination

* sanity test fixes

* ditch regex escape

* clarify comment

* update default collections paths config entry

* use PATH format instead of list

* skip integration tests on Python 2.6

ci_complete
This commit is contained in:
Matt Davis 2019-03-28 10:41:39 -07:00 committed by GitHub
parent 5173548a9f
commit f86345f777
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
56 changed files with 1512 additions and 109 deletions

View file

@ -0,0 +1,4 @@
major_changes:
- Experimental support for Ansible Collections and content namespacing - Ansible content can now be packaged in a
collection and addressed via namespaces. This allows for easier sharing, distribution, and installation of bundled
modules/roles/plugins, and consistent rules for accessing specific content via namespaces.

View file

@ -16,8 +16,10 @@ from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.module_utils._text import to_bytes
from ansible.playbook.block import Block
from ansible.utils.display import Display
from ansible.utils.collection_loader import set_collection_playbook_paths
from ansible.plugins.loader import add_all_plugin_dirs
display = Display()
@ -76,19 +78,21 @@ class PlaybookCLI(CLI):
# initial error check, to make sure all specified playbooks are accessible
# before we start running anything through the playbook executor
b_playbook_dirs = []
for playbook in context.CLIARGS['args']:
if not os.path.exists(playbook):
raise AnsibleError("the playbook: %s could not be found" % playbook)
if not (os.path.isfile(playbook) or stat.S_ISFIFO(os.stat(playbook).st_mode)):
raise AnsibleError("the playbook: %s does not appear to be a file" % playbook)
b_playbook_dir = os.path.dirname(os.path.abspath(to_bytes(playbook, errors='surrogate_or_strict')))
# load plugins from all playbooks in case they add callbacks/inventory/etc
add_all_plugin_dirs(
os.path.dirname(
os.path.abspath(
to_bytes(playbook, errors='surrogate_or_strict')
)
)
)
add_all_plugin_dirs(b_playbook_dir)
b_playbook_dirs.append(b_playbook_dir)
set_collection_playbook_paths(b_playbook_dirs)
# don't deal with privilege escalation or passwords when we don't need to
if not (context.CLIARGS['listhosts'] or context.CLIARGS['listtasks'] or

View file

@ -215,6 +215,14 @@ CACHE_PLUGIN_TIMEOUT:
- {key: fact_caching_timeout, section: defaults}
type: integer
yaml: {key: facts.cache.timeout}
COLLECTIONS_PATHS:
name: ordered list of root paths for loading installed Ansible collections content
default: ~/.ansible/collections:/usr/share/ansible/collections
type: pathspec
env:
- {name: ANSIBLE_COLLECTIONS_PATHS}
ini:
- {key: collections_paths, section: defaults}
COLOR_CHANGED:
name: Color for 'changed' task status
default: yellow

View file

@ -29,6 +29,7 @@ import os
import shlex
import zipfile
import re
import pkgutil
from io import BytesIO
from ansible.release import __version__, __author__
@ -45,6 +46,18 @@ from ansible.executor import action_write_locks
from ansible.utils.display import Display
# HACK: keep Python 2.6 controller tests happy in CI until they're properly split
try:
from importlib import import_module
except ImportError:
import_module = __import__
# if we're on a Python that doesn't have FNFError, redefine it as IOError (since that's what we'll see)
try:
FileNotFoundError
except NameError:
FileNotFoundError = IOError
display = Display()
REPLACER = b"#<<INCLUDE_ANSIBLE_MODULE_COMMON>>"
@ -429,10 +442,14 @@ class ModuleDepFinder(ast.NodeVisitor):
def visit_Import(self, node):
# import ansible.module_utils.MODLIB[.MODLIBn] [as asname]
for alias in (a for a in node.names if a.name.startswith('ansible.module_utils.')):
py_mod = alias.name[self.IMPORT_PREFIX_SIZE:]
py_mod = tuple(py_mod.split('.'))
self.submodules.add(py_mod)
for alias in node.names:
if alias.name.startswith('ansible.module_utils.'):
py_mod = alias.name[self.IMPORT_PREFIX_SIZE:]
py_mod = tuple(py_mod.split('.'))
self.submodules.add(py_mod)
elif alias.name.startswith('ansible_collections.'):
# keep 'ansible_collections.' as a sentinel prefix to trigger collection-loaded MU path
self.submodules.add(tuple(alias.name.split('.')))
self.generic_visit(node)
def visit_ImportFrom(self, node):
@ -453,6 +470,10 @@ class ModuleDepFinder(ast.NodeVisitor):
# from ansible.module_utils import MODLIB [,MODLIB2] [as asname]
for alias in node.names:
self.submodules.add((alias.name,))
elif node.module.startswith('ansible_collections.'):
# TODO: finish out the subpackage et al cases
self.submodules.add(tuple(node.module.split('.')))
self.generic_visit(node)
@ -555,6 +576,20 @@ def recursive_finder(name, data, py_module_names, py_module_cache, zf):
module_info = imp.find_module('_six', [os.path.join(p, 'six') for p in module_utils_paths])
py_module_name = ('six', '_six')
idx = 0
elif py_module_name[0] == 'ansible_collections':
# FIXME: replicate module name resolution like below for granular imports
# this is a collection-hosted MU; look it up with get_data
package_name = '.'.join(py_module_name[:-1])
resource_name = py_module_name[-1] + '.py'
try:
# FIXME: need this in py2 for some reason TBD, but we shouldn't (get_data delegates to wrong loader without it)
pkg = import_module(package_name)
module_info = pkgutil.get_data(package_name, resource_name)
except FileNotFoundError:
# FIXME: implement package fallback code
raise AnsibleError('unable to load collection-hosted module_util {0}.{1}'.format(to_native(package_name),
to_native(resource_name)))
idx = 0
else:
# Check whether either the last or the second to last identifier is
# a module name
@ -577,56 +612,78 @@ def recursive_finder(name, data, py_module_names, py_module_cache, zf):
msg.append(py_module_name[-1])
raise AnsibleError(' '.join(msg))
# Found a byte compiled file rather than source. We cannot send byte
# compiled over the wire as the python version might be different.
# imp.find_module seems to prefer to return source packages so we just
# error out if imp.find_module returns byte compiled files (This is
# fragile as it depends on undocumented imp.find_module behaviour)
if module_info[2][2] not in (imp.PY_SOURCE, imp.PKG_DIRECTORY):
msg = ['Could not find python source for imported module support code for %s. Looked for' % name]
if isinstance(module_info, bytes): # collection-hosted, just the code
# HACK: maybe surface collection dirs in here and use existing find_module code?
normalized_name = py_module_name
normalized_data = module_info
normalized_path = os.path.join(*py_module_name)
py_module_cache[normalized_name] = (normalized_data, normalized_path)
normalized_modules.add(normalized_name)
# HACK: walk back up the package hierarchy to pick up package inits; this won't do the right thing
# for actual packages yet...
accumulated_pkg_name = []
for pkg in py_module_name[:-1]:
accumulated_pkg_name.append(pkg) # we're accumulating this across iterations
normalized_name = tuple(accumulated_pkg_name[:] + ['__init__']) # extra machinations to get a hashable type (list is not)
if normalized_name not in py_module_cache:
normalized_path = os.path.join(*accumulated_pkg_name)
# HACK: possibly preserve some of the actual package file contents; problematic for extend_paths and others though?
normalized_data = ''
py_module_cache[normalized_name] = (normalized_data, normalized_path)
normalized_modules.add(normalized_name)
else:
# Found a byte compiled file rather than source. We cannot send byte
# compiled over the wire as the python version might be different.
# imp.find_module seems to prefer to return source packages so we just
# error out if imp.find_module returns byte compiled files (This is
# fragile as it depends on undocumented imp.find_module behaviour)
if module_info[2][2] not in (imp.PY_SOURCE, imp.PKG_DIRECTORY):
msg = ['Could not find python source for imported module support code for %s. Looked for' % name]
if idx == 2:
msg.append('either %s.py or %s.py' % (py_module_name[-1], py_module_name[-2]))
else:
msg.append(py_module_name[-1])
raise AnsibleError(' '.join(msg))
if idx == 2:
msg.append('either %s.py or %s.py' % (py_module_name[-1], py_module_name[-2]))
else:
msg.append(py_module_name[-1])
raise AnsibleError(' '.join(msg))
# We've determined that the last portion was an identifier and
# thus, not part of the module name
py_module_name = py_module_name[:-1]
if idx == 2:
# We've determined that the last portion was an identifier and
# thus, not part of the module name
py_module_name = py_module_name[:-1]
# If not already processed then we've got work to do
# If not in the cache, then read the file into the cache
# We already have a file handle for the module open so it makes
# sense to read it now
if py_module_name not in py_module_cache:
if module_info[2][2] == imp.PKG_DIRECTORY:
# Read the __init__.py instead of the module file as this is
# a python package
normalized_name = py_module_name + ('__init__',)
if normalized_name not in py_module_names:
normalized_path = os.path.join(module_info[1], '__init__.py')
normalized_data = _slurp(normalized_path)
py_module_cache[normalized_name] = (normalized_data, normalized_path)
normalized_modules.add(normalized_name)
else:
normalized_name = py_module_name
if normalized_name not in py_module_names:
normalized_path = module_info[1]
normalized_data = module_info[0].read()
module_info[0].close()
py_module_cache[normalized_name] = (normalized_data, normalized_path)
normalized_modules.add(normalized_name)
# If not already processed then we've got work to do
# If not in the cache, then read the file into the cache
# We already have a file handle for the module open so it makes
# sense to read it now
if py_module_name not in py_module_cache:
if module_info[2][2] == imp.PKG_DIRECTORY:
# Read the __init__.py instead of the module file as this is
# a python package
normalized_name = py_module_name + ('__init__',)
if normalized_name not in py_module_names:
normalized_path = os.path.join(os.path.join(module_info[1], '__init__.py'))
normalized_data = _slurp(normalized_path)
py_module_cache[normalized_name] = (normalized_data, normalized_path)
normalized_modules.add(normalized_name)
else:
normalized_name = py_module_name
if normalized_name not in py_module_names:
normalized_path = module_info[1]
normalized_data = module_info[0].read()
module_info[0].close()
py_module_cache[normalized_name] = (normalized_data, normalized_path)
normalized_modules.add(normalized_name)
# Make sure that all the packages that this module is a part of
# are also added
for i in range(1, len(py_module_name)):
py_pkg_name = py_module_name[:-i] + ('__init__',)
if py_pkg_name not in py_module_names:
pkg_dir_info = imp.find_module(py_pkg_name[-1],
[os.path.join(p, *py_pkg_name[:-1]) for p in module_utils_paths])
normalized_modules.add(py_pkg_name)
py_module_cache[py_pkg_name] = (_slurp(pkg_dir_info[1]), pkg_dir_info[1])
# Make sure that all the packages that this module is a part of
# are also added
for i in range(1, len(py_module_name)):
py_pkg_name = py_module_name[:-i] + ('__init__',)
if py_pkg_name not in py_module_names:
pkg_dir_info = imp.find_module(py_pkg_name[-1],
[os.path.join(p, *py_pkg_name[:-1]) for p in module_utils_paths])
normalized_modules.add(py_pkg_name)
py_module_cache[py_pkg_name] = (_slurp(pkg_dir_info[1]), pkg_dir_info[1])
# FIXME: Currently the AnsiBallZ wrapper monkeypatches module args into a global
# variable in basic.py. If a module doesn't import basic.py, then the AnsiBallZ wrapper will
@ -653,10 +710,16 @@ def recursive_finder(name, data, py_module_names, py_module_cache, zf):
unprocessed_py_module_names = normalized_modules.difference(py_module_names)
for py_module_name in unprocessed_py_module_names:
# HACK: this seems to work as a way to identify a collections-based import, but a stronger identifier would be better
if not py_module_cache[py_module_name][1].startswith('/'):
dir_prefix = ''
else:
dir_prefix = 'ansible/module_utils'
py_module_path = os.path.join(*py_module_name)
py_module_file_name = '%s.py' % py_module_path
zf.writestr(os.path.join("ansible/module_utils",
zf.writestr(os.path.join(dir_prefix,
py_module_file_name), py_module_cache[py_module_name][0])
display.vvvvv("Using module_utils file %s" % py_module_cache[py_module_name][1])

View file

@ -1013,13 +1013,18 @@ class TaskExecutor:
module_prefix = self._task.action.split('_')[0]
collections = self._task.collections
# let action plugin override module, fallback to 'normal' action plugin otherwise
if self._task.action in self._shared_loader_obj.action_loader:
if self._shared_loader_obj.action_loader.has_plugin(self._task.action, collection_list=collections):
handler_name = self._task.action
# FIXME: is this code path even live anymore? check w/ networking folks; it trips sometimes when it shouldn't
elif all((module_prefix in C.NETWORK_GROUP_MODULES, module_prefix in self._shared_loader_obj.action_loader)):
handler_name = module_prefix
else:
# FUTURE: once we're comfortable with collections impl, preface this action with ansible.builtin so it can't be hijacked
handler_name = 'normal'
collections = None # until then, we don't want the task's collection list to be consulted; use the builtin
handler = self._shared_loader_obj.action_loader.get(
handler_name,
@ -1029,6 +1034,7 @@ class TaskExecutor:
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
collection_list=collections
)
if not handler:

View file

@ -36,6 +36,7 @@ from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import callback_loader, strategy_loader, module_loader
from ansible.plugins.callback import CallbackBase
from ansible.template import Templar
from ansible.utils.collection_loader import is_collection_ref
from ansible.utils.helpers import pct_to_int
from ansible.vars.hostvars import HostVars
from ansible.vars.reserved import warn_if_reserved
@ -167,6 +168,10 @@ class TaskQueueManager:
" see the 2.4 porting guide for details." % callback_obj._load_name, version="2.9")
self._callback_plugins.append(callback_obj)
for callback_plugin_name in (c for c in C.DEFAULT_CALLBACK_WHITELIST if is_collection_ref(c)):
callback_obj = callback_loader.get(callback_plugin_name)
self._callback_plugins.append(callback_obj)
self._callbacks_loaded = True
def run(self, play):

View file

@ -108,12 +108,13 @@ class ModuleArgsParser:
Args may also be munged for certain shell command parameters.
"""
def __init__(self, task_ds=None):
def __init__(self, task_ds=None, collection_list=None):
task_ds = {} if task_ds is None else task_ds
if not isinstance(task_ds, dict):
raise AnsibleAssertionError("the type of 'task_ds' should be a dict, but is a %s" % type(task_ds))
self._task_ds = task_ds
self._collection_list = collection_list
def _split_module_string(self, module_string):
'''
@ -287,7 +288,8 @@ class ModuleArgsParser:
# walk the input dictionary to see we recognize a module name
for (item, value) in iteritems(self._task_ds):
if item in BUILTIN_TASKS or item in action_loader or item in module_loader:
if item in BUILTIN_TASKS or action_loader.has_plugin(item, collection_list=self._collection_list) or \
module_loader.has_plugin(item, collection_list=self._collection_list):
# finding more than one module name is a problem
if action is not None:
raise AnsibleParserError("conflicting action statements: %s, %s" % (action, item), obj=self._task_ds)

View file

@ -23,7 +23,7 @@ import os
from ansible import constants as C
from ansible.errors import AnsibleParserError
from ansible.module_utils._text import to_bytes, to_text, to_native
from ansible.module_utils._text import to_text, to_native
from ansible.playbook.play import Play
from ansible.playbook.playbook_include import PlaybookInclude
from ansible.utils.display import Display

View file

@ -24,13 +24,14 @@ from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
from ansible.playbook.conditional import Conditional
from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.helpers import load_list_of_tasks
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
from ansible.utils.sentinel import Sentinel
class Block(Base, Become, Conditional, Taggable):
class Block(Base, Become, Conditional, CollectionSearch, Taggable):
# main block fields containing the task lists
_block = FieldAttribute(isa='list', default=list, inherit=False)

View file

@ -0,0 +1,26 @@
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.six import string_types
from ansible.playbook.attribute import FieldAttribute
class CollectionSearch:
# this needs to be populated before we can resolve tasks/roles/etc
_collections = FieldAttribute(isa='list', listof=string_types, priority=100)
def _load_collections(self, attr, ds):
if not ds:
# if empty/None, just return whatever was there; legacy behavior will do the right thing
return ds
if not isinstance(ds, list):
ds = [ds]
if 'ansible.builtin' not in ds and 'ansible.legacy' not in ds:
ds.append('ansible.legacy')
return ds

View file

@ -117,7 +117,10 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
)
task_list.append(t)
else:
args_parser = ModuleArgsParser(task_ds)
collection_list = task_ds.get('collections')
if collection_list is None and block is not None and block.collections:
collection_list = block.collections
args_parser = ModuleArgsParser(task_ds, collection_list=collection_list)
try:
(action, args, delegate_to) = args_parser.parse()
except AnsibleParserError as e:
@ -382,7 +385,8 @@ def load_list_of_roles(ds, play, current_role_path=None, variable_manager=None,
roles = []
for role_def in ds:
i = RoleInclude.load(role_def, play=play, current_role_path=current_role_path, variable_manager=variable_manager, loader=loader)
i = RoleInclude.load(role_def, play=play, current_role_path=current_role_path, variable_manager=variable_manager,
loader=loader, collection_list=play.collections)
roles.append(i)
return roles

View file

@ -27,6 +27,7 @@ from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
from ansible.playbook.block import Block
from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.helpers import load_list_of_blocks, load_list_of_roles
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
@ -39,7 +40,7 @@ display = Display()
__all__ = ['Play']
class Play(Base, Taggable, Become):
class Play(Base, Taggable, Become, CollectionSearch):
"""
A play is a language feature that represents a list of roles and/or

View file

@ -27,6 +27,7 @@ from ansible.module_utils.common._collections_compat import Container, Mapping,
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.conditional import Conditional
from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.role.metadata import RoleMetadata
@ -91,7 +92,7 @@ def hash_params(params):
return frozenset((params,))
class Role(Base, Become, Conditional, Taggable):
class Role(Base, Become, Conditional, Taggable, CollectionSearch):
_delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool')
@ -99,6 +100,7 @@ class Role(Base, Become, Conditional, Taggable):
def __init__(self, play=None, from_files=None, from_include=False):
self._role_name = None
self._role_path = None
self._role_collection = None
self._role_params = dict()
self._loader = None
@ -166,6 +168,7 @@ class Role(Base, Become, Conditional, Taggable):
if role_include.role not in play.ROLE_CACHE:
play.ROLE_CACHE[role_include.role] = dict()
# FIXME: how to handle cache keys for collection-based roles, since they're technically adjustable per task?
play.ROLE_CACHE[role_include.role][hashed_params] = r
return r
@ -176,6 +179,7 @@ class Role(Base, Become, Conditional, Taggable):
def _load_role_data(self, role_include, parent_role=None):
self._role_name = role_include.role
self._role_path = role_include.get_role_path()
self._role_collection = role_include._role_collection
self._role_params = role_include.get_role_params()
self._variable_manager = role_include.get_variable_manager()
self._loader = role_include.get_loader()
@ -194,9 +198,6 @@ class Role(Base, Become, Conditional, Taggable):
else:
self._attributes[attr_name] = role_include._attributes[attr_name]
# ensure all plugins dirs for this role are added to plugin search path
add_all_plugin_dirs(self._role_path)
# vars and default vars are regular dictionaries
self._role_vars = self._load_role_yaml('vars', main=self._from_files.get('vars'), allow_dir=True)
if self._role_vars is None:
@ -218,6 +219,29 @@ class Role(Base, Become, Conditional, Taggable):
else:
self._metadata = RoleMetadata()
# reset collections list; roles do not inherit collections from parents, just use the defaults
# FUTURE: use a private config default for this so we can allow it to be overridden later
self.collections = []
# configure plugin/collection loading; either prepend the current role's collection or configure legacy plugin loading
# FIXME: need exception for explicit ansible.legacy?
if self._role_collection:
self.collections.insert(0, self._role_collection)
else:
# legacy role, ensure all plugin dirs under the role are added to plugin search path
add_all_plugin_dirs(self._role_path)
# collections can be specified in metadata for legacy or collection-hosted roles
if self._metadata.collections:
self.collections.extend(self._metadata.collections)
# if any collections were specified, ensure that core or legacy synthetic collections are always included
if self.collections:
# default append collection is core for collection-hosted roles, legacy for others
default_append_collection = 'ansible.builtin' if self.collections else 'ansible.legacy'
if 'ansible.builtin' not in self.collections and 'ansible.legacy' not in self.collections:
self.collections.append(default_append_collection)
task_data = self._load_role_yaml('tasks', main=self._from_files.get('tasks'))
if task_data:
try:

View file

@ -28,9 +28,11 @@ from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.conditional import Conditional
from ansible.playbook.taggable import Taggable
from ansible.template import Templar
from ansible.utils.collection_loader import get_collection_role_path, is_collection_ref
from ansible.utils.path import unfrackpath
from ansible.utils.display import Display
@ -39,11 +41,11 @@ __all__ = ['RoleDefinition']
display = Display()
class RoleDefinition(Base, Become, Conditional, Taggable):
class RoleDefinition(Base, Become, Conditional, Taggable, CollectionSearch):
_role = FieldAttribute(isa='string')
def __init__(self, play=None, role_basedir=None, variable_manager=None, loader=None):
def __init__(self, play=None, role_basedir=None, variable_manager=None, loader=None, collection_list=None):
super(RoleDefinition, self).__init__()
@ -52,8 +54,10 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
self._loader = loader
self._role_path = None
self._role_collection = None
self._role_basedir = role_basedir
self._role_params = dict()
self._collection_list = collection_list
# def __repr__(self):
# return 'ROLEDEF: ' + self._attributes.get('role', '<no name set>')
@ -139,6 +143,31 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
append it to the default role path
'''
# create a templar class to template the dependency names, in
# case they contain variables
if self._variable_manager is not None:
all_vars = self._variable_manager.get_vars(play=self._play)
else:
all_vars = dict()
templar = Templar(loader=self._loader, variables=all_vars)
role_name = templar.template(role_name)
role_tuple = None
# try to load as a collection-based role first
if self._collection_list or is_collection_ref(role_name):
role_tuple = get_collection_role_path(role_name, self._collection_list)
if role_tuple:
# we found it, stash collection data and return the name/path tuple
self._role_collection = role_tuple[2]
return role_tuple[0:2]
# FUTURE: refactor this to be callable from internal so we can properly order ansible.legacy searches with the collections keyword
if self._collection_list and 'ansible.legacy' not in self._collection_list:
raise AnsibleError("the role '%s' was not found in %s" % (role_name, ":".join(self._collection_list)), obj=self._ds)
# we always start the search for roles in the base directory of the playbook
role_search_paths = [
os.path.join(self._loader.get_basedir(), u'roles'),
@ -158,16 +187,6 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
# the roles/ dir appended
role_search_paths.append(self._loader.get_basedir())
# create a templar class to template the dependency names, in
# case they contain variables
if self._variable_manager is not None:
all_vars = self._variable_manager.get_vars(play=self._play)
else:
all_vars = dict()
templar = Templar(loader=self._loader, variables=all_vars)
role_name = templar.template(role_name)
# now iterate through the possible paths and return the first one we find
for path in role_search_paths:
path = templar.template(path)

View file

@ -43,11 +43,12 @@ class RoleInclude(RoleDefinition):
_delegate_to = FieldAttribute(isa='string')
_delegate_facts = FieldAttribute(isa='bool', default=False)
def __init__(self, play=None, role_basedir=None, variable_manager=None, loader=None):
super(RoleInclude, self).__init__(play=play, role_basedir=role_basedir, variable_manager=variable_manager, loader=loader)
def __init__(self, play=None, role_basedir=None, variable_manager=None, loader=None, collection_list=None):
super(RoleInclude, self).__init__(play=play, role_basedir=role_basedir, variable_manager=variable_manager,
loader=loader, collection_list=collection_list)
@staticmethod
def load(data, play, current_role_path=None, parent_role=None, variable_manager=None, loader=None):
def load(data, play, current_role_path=None, parent_role=None, variable_manager=None, loader=None, collection_list=None):
if not (isinstance(data, string_types) or isinstance(data, dict) or isinstance(data, AnsibleBaseYAMLObject)):
raise AnsibleParserError("Invalid role definition: %s" % to_native(data))
@ -55,5 +56,5 @@ class RoleInclude(RoleDefinition):
if isinstance(data, string_types) and ',' in data:
raise AnsibleError("Invalid old style role requirement: %s" % data)
ri = RoleInclude(play=play, role_basedir=current_role_path, variable_manager=variable_manager, loader=loader)
ri = RoleInclude(play=play, role_basedir=current_role_path, variable_manager=variable_manager, loader=loader, collection_list=collection_list)
return ri.load_data(data, variable_manager=variable_manager, loader=loader)

View file

@ -23,17 +23,17 @@ import os
from ansible.errors import AnsibleParserError, AnsibleError
from ansible.module_utils._text import to_native
from ansible.module_utils.six import iteritems, string_types
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.module_utils.six import string_types
from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.helpers import load_list_of_roles
from ansible.playbook.role.include import RoleInclude
from ansible.playbook.role.requirement import RoleRequirement
__all__ = ['RoleMetadata']
class RoleMetadata(Base):
class RoleMetadata(Base, CollectionSearch):
'''
This class wraps the parsing and validation of the optional metadata
within each Role (meta/main.yml).
@ -105,7 +105,7 @@ class RoleMetadata(Base):
def serialize(self):
return dict(
allow_duplicates=self._allow_duplicates,
dependencies=self._dependencies,
dependencies=self._dependencies
)
def deserialize(self, data):

View file

@ -73,7 +73,7 @@ class IncludeRole(TaskInclude):
else:
myplay = play
ri = RoleInclude.load(self._role_name, play=myplay, variable_manager=variable_manager, loader=loader)
ri = RoleInclude.load(self._role_name, play=myplay, variable_manager=variable_manager, loader=loader, collection_list=self.collections)
ri.vars.update(self.vars)
# build role
@ -97,9 +97,14 @@ class IncludeRole(TaskInclude):
p_block = self.build_parent_block()
# collections value is not inherited; override with the value we calculated during role setup
p_block.collections = actual_role.collections
blocks = actual_role.compile(play=myplay, dep_chain=dep_chain)
for b in blocks:
b._parent = p_block
# HACK: parent inheritance doesn't seem to have a way to handle this intermediate override until squashed/finalized
b.collections = actual_role.collections
# updated available handlers in play
handlers = actual_role.get_handler_blocks(play=myplay)

View file

@ -32,6 +32,7 @@ from ansible.playbook.attribute import FieldAttribute
from ansible.playbook.base import Base
from ansible.playbook.become import Become
from ansible.playbook.block import Block
from ansible.playbook.collectionsearch import CollectionSearch
from ansible.playbook.conditional import Conditional
from ansible.playbook.loop_control import LoopControl
from ansible.playbook.role import Role
@ -44,7 +45,7 @@ __all__ = ['Task']
display = Display()
class Task(Base, Conditional, Taggable, Become):
class Task(Base, Conditional, Taggable, Become, CollectionSearch):
"""
A task is a language feature that represents a call to a module, with given arguments and other parameters.
@ -180,7 +181,7 @@ class Task(Base, Conditional, Taggable, Become):
# use the args parsing class to determine the action, args,
# and the delegate_to value from the various possible forms
# supported as legacy
args_parser = ModuleArgsParser(task_ds=ds)
args_parser = ModuleArgsParser(task_ds=ds, collection_list=self.collections)
try:
(action, args, delegate_to) = args_parser.parse()
except AnsibleParserError as e:

View file

@ -165,7 +165,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if key in module_args:
module_args[key] = self._connection._shell._unquote(module_args[key])
module_path = self._shared_loader_obj.module_loader.find_plugin(module_name, mod_type)
module_path = self._shared_loader_obj.module_loader.find_plugin(module_name, mod_type, collection_list=self._task.collections)
if module_path:
break
else: # This is a for-else: http://bit.ly/1ElPkyg

View file

@ -11,6 +11,7 @@ import glob
import imp
import os
import os.path
import pkgutil
import sys
import warnings
@ -23,9 +24,15 @@ from ansible.module_utils.six import string_types
from ansible.parsing.utils.yaml import from_yaml
from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE
from ansible.utils.collection_loader import AnsibleCollectionLoader, AnsibleFlatMapLoader, is_collection_ref
from ansible.utils.display import Display
from ansible.utils.plugin_docs import add_fragments
# HACK: keep Python 2.6 controller tests happy in CI until they're properly split
try:
from importlib import import_module
except ImportError:
import_module = __import__
display = Display()
@ -298,7 +305,69 @@ class PluginLoader:
self._clear_caches()
display.debug('Added %s to loader search path' % (directory))
def _find_plugin(self, name, mod_type='', ignore_deprecated=False, check_aliases=False):
def _find_fq_plugin(self, fq_name, extension):
fq_name = to_native(fq_name)
# prefix our extension Python namespace if it isn't already there
if not fq_name.startswith('ansible_collections.'):
fq_name = 'ansible_collections.' + fq_name
splitname = fq_name.rsplit('.', 1)
if len(splitname) != 2:
raise ValueError('{0} is not a valid namespace-qualified plugin name'.format(to_native(fq_name)))
package = splitname[0]
resource = splitname[1]
append_plugin_type = self.class_name or self.subdir
if append_plugin_type:
# only current non-class special case, module_utils don't use this loader method
if append_plugin_type == 'library':
append_plugin_type = 'modules'
else:
append_plugin_type = get_plugin_class(append_plugin_type)
package += '.plugins.{0}'.format(append_plugin_type)
if extension:
resource += extension
pkg = sys.modules.get(package)
if not pkg:
# FIXME: there must be cheaper/safer way to do this
pkg = import_module(package)
# if the package is one of our flatmaps, we need to consult its loader to find the path, since the file could be
# anywhere in the tree
if hasattr(pkg, '__loader__') and isinstance(pkg.__loader__, AnsibleFlatMapLoader):
try:
file_path = pkg.__loader__.find_file(resource)
return to_text(file_path)
except IOError:
# this loader already takes care of extensionless files, so if we didn't find it, just bail
return None
pkg_path = os.path.dirname(pkg.__file__)
resource_path = os.path.join(pkg_path, resource)
# FIXME: and is file or file link or ...
if os.path.exists(resource_path):
return to_text(resource_path)
# look for any matching extension in the package location (sans filter)
ext_blacklist = ['.pyc', '.pyo']
found_files = [f for f in glob.iglob(os.path.join(pkg_path, resource) + '.*') if os.path.isfile(f) and os.path.splitext(f)[1] not in ext_blacklist]
if not found_files:
return None
if len(found_files) > 1:
# TODO: warn?
pass
return to_text(found_files[0])
def _find_plugin(self, name, mod_type='', ignore_deprecated=False, check_aliases=False, collection_list=None):
''' Find a plugin named name '''
global _PLUGIN_FILTERS
@ -315,6 +384,38 @@ class PluginLoader:
# they can have any suffix
suffix = ''
# HACK: need this right now so we can still load shipped PS module_utils
if (is_collection_ref(name) or collection_list) and not name.startswith('Ansible'):
if '.' in name or not collection_list:
candidates = [name]
else:
candidates = ['{0}.{1}'.format(c, name) for c in collection_list]
# TODO: keep actual errors, not just assembled messages
errors = []
for candidate_name in candidates:
try:
# HACK: refactor this properly
if candidate_name.startswith('ansible.legacy'):
# just pass the raw name to the old lookup function to check in all the usual locations
p = self._find_plugin_legacy(name.replace('ansible.legacy.', '', 1), ignore_deprecated, check_aliases, suffix)
else:
p = self._find_fq_plugin(candidate_name, suffix)
if p:
return p
except Exception as ex:
errors.append(to_native(ex))
if errors:
display.debug(msg='plugin lookup for {0} failed; errors: {1}'.format(name, '; '.join(errors)))
return None
# if we got here, there's no collection list and it's not an FQ name, so do legacy lookup
return self._find_plugin_legacy(name, ignore_deprecated, check_aliases, suffix)
def _find_plugin_legacy(self, name, ignore_deprecated=False, check_aliases=False, suffix=None):
if check_aliases:
name = self.aliases.get(name, name)
@ -388,13 +489,13 @@ class PluginLoader:
return None
def find_plugin(self, name, mod_type='', ignore_deprecated=False, check_aliases=False):
def find_plugin(self, name, mod_type='', ignore_deprecated=False, check_aliases=False, collection_list=None):
''' Find a plugin named name '''
# Import here to avoid circular import
from ansible.vars.reserved import is_reserved_name
plugin = self._find_plugin(name, mod_type=mod_type, ignore_deprecated=ignore_deprecated, check_aliases=check_aliases)
plugin = self._find_plugin(name, mod_type=mod_type, ignore_deprecated=ignore_deprecated, check_aliases=check_aliases, collection_list=collection_list)
if plugin and self.package == 'ansible.modules' and name not in ('gather_facts',) and is_reserved_name(name):
raise AnsibleError(
'Module "%s" shadows the name of a reserved keyword. Please rename or remove this module. Found at %s' % (name, plugin)
@ -402,10 +503,16 @@ class PluginLoader:
return plugin
def has_plugin(self, name):
def has_plugin(self, name, collection_list=None):
''' Checks if a plugin named name exists '''
return self.find_plugin(name) is not None
try:
return self.find_plugin(name, collection_list=collection_list) is not None
except Exception as ex:
if isinstance(ex, AnsibleError):
raise
# log and continue, likely an innocuous type/package loading failure in collections import
display.debug('has_plugin error: {0}'.format(to_native(ex)))
__contains__ = has_plugin
@ -436,9 +543,10 @@ class PluginLoader:
found_in_cache = True
class_only = kwargs.pop('class_only', False)
collection_list = kwargs.pop('collection_list', None)
if name in self.aliases:
name = self.aliases[name]
path = self.find_plugin(name)
path = self.find_plugin(name, collection_list=collection_list)
if path is None:
return None
@ -600,14 +708,20 @@ class Jinja2Loader(PluginLoader):
The filter and test plugins are Jinja2 plugins encapsulated inside of our plugin format.
The way the calling code is setup, we need to do a few things differently in the all() method
"""
def find_plugin(self, name):
def find_plugin(self, name, collection_list=None):
# Nothing using Jinja2Loader use this method. We can't use the base class version because
# we deduplicate differently than the base class
if '.' in name:
return super(Jinja2Loader, self).find_plugin(name, collection_list=collection_list)
raise AnsibleError('No code should call find_plugin for Jinja2Loaders (Not implemented)')
def get(self, name, *args, **kwargs):
# Nothing using Jinja2Loader use this method. We can't use the base class version because
# we deduplicate differently than the base class
if '.' in name:
return super(Jinja2Loader, self).get(name, *args, **kwargs)
raise AnsibleError('No code should call find_plugin for Jinja2Loaders (Not implemented)')
def all(self, *args, **kwargs):
@ -695,11 +809,18 @@ def _load_plugin_filter():
return filters
def _configure_collection_loader():
if not any((isinstance(l, AnsibleCollectionLoader) for l in sys.meta_path)):
sys.meta_path.insert(0, AnsibleCollectionLoader())
# TODO: All of the following is initialization code It should be moved inside of an initialization
# function which is called at some point early in the ansible and ansible-playbook CLI startup.
_PLUGIN_FILTERS = _load_plugin_filter()
_configure_collection_loader()
# doc fragments first
fragment_loader = PluginLoader(
'ModuleDocFragment',

View file

@ -20,15 +20,14 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ast
import contextlib
import datetime
import os
import pkgutil
import pwd
import re
import time
from functools import wraps
from io import StringIO
from numbers import Number
try:
@ -42,9 +41,9 @@ from jinja2.runtime import Context, StrictUndefined
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFilterError, AnsibleUndefinedVariable, AnsibleAssertionError
from ansible.module_utils.six import string_types, text_type
from ansible.module_utils.six import iteritems, string_types, text_type
from ansible.module_utils._text import to_native, to_text, to_bytes
from ansible.module_utils.common._collections_compat import Sequence, Mapping
from ansible.module_utils.common._collections_compat import Sequence, Mapping, MutableMapping
from ansible.plugins.loader import filter_loader, lookup_loader, test_loader
from ansible.template.safe_eval import safe_eval
from ansible.template.template import AnsibleJ2Template
@ -52,6 +51,12 @@ from ansible.template.vars import AnsibleJ2Vars
from ansible.utils.display import Display
from ansible.utils.unsafe_proxy import UnsafeProxy, wrap_var
# HACK: keep Python 2.6 controller tests happy in CI until they're properly split
try:
from importlib import import_module
except ImportError:
import_module = __import__
display = Display()
@ -255,6 +260,83 @@ class AnsibleContext(Context):
return val
class JinjaPluginIntercept(MutableMapping):
def __init__(self, delegatee, pluginloader, *args, **kwargs):
super(JinjaPluginIntercept, self).__init__(*args, **kwargs)
self._delegatee = delegatee
self._pluginloader = pluginloader
if self._pluginloader.class_name == 'FilterModule':
self._method_map_name = 'filters'
self._dirname = 'filter'
elif self._pluginloader.class_name == 'TestModule':
self._method_map_name = 'tests'
self._dirname = 'test'
self._collection_jinja_func_cache = {}
# FUTURE: we can cache FQ filter/test calls for the entire duration of a run, since a given collection's impl's
# aren't supposed to change during a run
def __getitem__(self, key):
if not isinstance(key, string_types):
raise ValueError('key must be a string')
key = to_native(key)
if '.' not in key: # might be a built-in value, delegate to base dict
return self._delegatee.__getitem__(key)
func = self._collection_jinja_func_cache.get(key)
if func:
return func
components = key.split('.')
if len(components) != 3:
raise KeyError('invalid plugin name: {0}'.format(key))
collection_name = '.'.join(components[0:2])
collection_pkg = 'ansible_collections.{0}.plugins.{1}'.format(collection_name, self._dirname)
# FIXME: error handling for bogus plugin name, bogus impl, bogus filter/test
# FIXME: move this capability into the Jinja plugin loader
pkg = import_module(collection_pkg)
for dummy, module_name, ispkg in pkgutil.iter_modules(pkg.__path__, prefix=collection_name + '.'):
if ispkg:
continue
plugin_impl = self._pluginloader.get(module_name)
method_map = getattr(plugin_impl, self._method_map_name)
for f in iteritems(method_map()):
fq_name = '.'.join((collection_name, f[0]))
self._collection_jinja_func_cache[fq_name] = f[1]
function_impl = self._collection_jinja_func_cache[key]
# FIXME: detect/warn on intra-collection function name collisions
return function_impl
def __setitem__(self, key, value):
return self._delegatee.__setitem__(key, value)
def __delitem__(self, key):
raise NotImplementedError()
def __iter__(self):
# not strictly accurate since we're not counting dynamically-loaded values
return iter(self._delegatee)
def __len__(self):
# not strictly accurate since we're not counting dynamically-loaded values
return len(self._delegatee)
class AnsibleEnvironment(Environment):
'''
Our custom environment, which simply allows us to override the class-level
@ -263,6 +345,12 @@ class AnsibleEnvironment(Environment):
context_class = AnsibleContext
template_class = AnsibleJ2Template
def __init__(self, *args, **kwargs):
super(AnsibleEnvironment, self).__init__(*args, **kwargs)
self.filters = JinjaPluginIntercept(self.filters, filter_loader)
self.tests = JinjaPluginIntercept(self.tests, test_loader)
class Templar:
'''

View file

@ -0,0 +1,304 @@
# (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os.path
import pkgutil
import re
import sys
from types import ModuleType
from ansible import constants as C
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.six import iteritems, string_types
# HACK: keep Python 2.6 controller tests happy in CI until they're properly split
try:
from importlib import import_module
except ImportError:
import_module = __import__
_SYNTHETIC_PACKAGES = {
'ansible_collections.ansible': dict(type='pkg_only'),
'ansible_collections.ansible.builtin': dict(type='pkg_only'),
'ansible_collections.ansible.builtin.plugins': dict(type='map', map='ansible.plugins'),
'ansible_collections.ansible.builtin.plugins.module_utils': dict(type='map', map='ansible.module_utils', graft=True),
'ansible_collections.ansible.builtin.plugins.modules': dict(type='flatmap', flatmap='ansible.modules', graft=True),
}
# TODO: tighten this up to subset Python identifier requirements (and however we want to restrict ns/collection names)
_collection_qualified_re = re.compile(to_text(r'^(\w+)\.(\w+)\.(\w+)$'))
# FIXME: exception handling/error logging
class AnsibleCollectionLoader(object):
def __init__(self):
self._n_configured_paths = C.config.get_config_value('COLLECTIONS_PATHS')
if isinstance(self._n_configured_paths, string_types):
self._n_configured_paths = [self._n_configured_paths]
elif self._n_configured_paths is None:
self._n_configured_paths = []
# expand any placeholders in configured paths
self._n_configured_paths = [to_native(os.path.expanduser(p), errors='surrogate_or_strict') for p in self._n_configured_paths]
self._n_playbook_paths = []
# pre-inject grafted package maps so we can force them to use the right loader instead of potentially delegating to a "normal" loader
for syn_pkg_def in (p for p in iteritems(_SYNTHETIC_PACKAGES) if p[1].get('graft')):
pkg_name = syn_pkg_def[0]
pkg_def = syn_pkg_def[1]
newmod = ModuleType(pkg_name)
newmod.__package__ = pkg_name
newmod.__file__ = '<ansible_synthetic_collection_package>'
pkg_type = pkg_def.get('type')
# TODO: need to rethink map style so we can just delegate all the loading
if pkg_type == 'flatmap':
newmod.__loader__ = AnsibleFlatMapLoader(import_module(pkg_def['flatmap']))
newmod.__path__ = []
sys.modules[pkg_name] = newmod
@property
def _n_collection_paths(self):
return self._n_playbook_paths + self._n_configured_paths
def set_playbook_paths(self, b_playbook_paths):
if isinstance(b_playbook_paths, string_types):
b_playbook_paths = [b_playbook_paths]
# track visited paths; we have to preserve the dir order as-passed in case there are duplicate collections (first one wins)
added_paths = set()
# de-dupe and ensure the paths are native strings (Python seems to do this for package paths etc, so assume it's safe)
self._n_playbook_paths = [os.path.join(to_native(p), 'collections') for p in b_playbook_paths if not (p in added_paths or added_paths.add(p))]
# FIXME: only allow setting this once, or handle any necessary cache/package path invalidations internally?
def find_module(self, fullname, path=None):
# this loader is only concerned with items under the Ansible Collections namespace hierarchy, ignore others
if fullname.startswith('ansible_collections.') or fullname == 'ansible_collections':
return self
return None
def load_module(self, fullname):
if sys.modules.get(fullname):
return sys.modules[fullname]
# this loader implements key functionality for Ansible collections
# * implicit distributed namespace packages for the root Ansible namespace (no pkgutil.extend_path hackery reqd)
# * implicit package support for Python 2.7 (no need for __init__.py in collections, except to use standard Py2.7 tooling)
# * preventing controller-side code injection during collection loading
# * (default loader would execute arbitrary package code from all __init__.py's)
parent_pkg_name = '.'.join(fullname.split('.')[:-1])
parent_pkg = sys.modules.get(parent_pkg_name)
if parent_pkg_name and not parent_pkg:
raise ImportError('parent package {0} not found'.format(parent_pkg_name))
# are we at or below the collection level? eg a.mynamespace.mycollection.something.else
# if so, we don't want distributed namespace behavior; first mynamespace.mycollection on the path is where
# we'll load everything from (ie, don't fall back to another mynamespace.mycollection lower on the path)
sub_collection = fullname.count('.') > 1
synpkg_def = _SYNTHETIC_PACKAGES.get(fullname)
# FIXME: collapse as much of this back to on-demand as possible (maybe stub packages that get replaced when actually loaded?)
if synpkg_def:
pkg_type = synpkg_def.get('type')
if not pkg_type:
raise KeyError('invalid synthetic package type (no package "type" specified)')
if pkg_type == 'map':
map_package = synpkg_def.get('map')
if not map_package:
raise KeyError('invalid synthetic map package definition (no target "map" defined)')
mod = import_module(map_package)
sys.modules[fullname] = mod
return mod
elif pkg_type == 'flatmap':
raise NotImplementedError()
elif pkg_type == 'pkg_only':
newmod = ModuleType(fullname)
newmod.__package__ = fullname
newmod.__file__ = '<ansible_synthetic_collection_package>'
newmod.__loader__ = self
newmod.__path__ = []
sys.modules[fullname] = newmod
return newmod
if not parent_pkg: # top-level package, look for NS subpackages on all collection paths
package_paths = [self._extend_path_with_ns(p, fullname) for p in self._n_collection_paths]
else: # subpackage; search in all subpaths (we'll limit later inside a collection)
package_paths = [self._extend_path_with_ns(p, fullname) for p in parent_pkg.__path__]
for candidate_child_path in package_paths:
source = None
is_package = True
location = None
# check for implicit sub-package first
if os.path.isdir(candidate_child_path):
# Py3.x implicit namespace packages don't have a file location, so they don't support get_data
# (which assumes the parent dir or that the loader has an internal mapping); so we have to provide
# a bogus leaf file on the __file__ attribute for pkgutil.get_data to strip off
location = os.path.join(candidate_child_path, '__synthetic__')
else:
for source_path in [os.path.join(candidate_child_path, '__init__.py'),
candidate_child_path + '.py']:
if not os.path.isfile(source_path):
continue
with open(source_path, 'rb') as fd:
source = fd.read()
location = source_path
is_package = source_path.endswith('__init__.py')
break
if not location:
continue
newmod = ModuleType(fullname)
newmod.__package__ = fullname
newmod.__file__ = location
newmod.__loader__ = self
if is_package:
if sub_collection: # we never want to search multiple instances of the same collection; use first found
newmod.__path__ = [candidate_child_path]
else:
newmod.__path__ = package_paths
if source:
# FIXME: decide cases where we don't actually want to exec the code?
exec(source, newmod.__dict__)
sys.modules[fullname] = newmod
return newmod
# FIXME: need to handle the "no dirs present" case for at least the root and synthetic internal collections like ansible.builtin
return None
@staticmethod
def _extend_path_with_ns(path, ns):
ns_path_add = ns.rsplit('.', 1)[-1]
return os.path.join(path, ns_path_add)
def get_data(self, filename):
with open(filename, 'rb') as fd:
return fd.read()
class AnsibleFlatMapLoader(object):
_extension_blacklist = ['.pyc', '.pyo']
def __init__(self, root_package):
self._root_package = root_package
self._dirtree = None
def _init_dirtree(self):
# FIXME: thread safety
root_path = os.path.dirname(self._root_package.__file__)
flat_files = []
# FIXME: make this a dict of filename->dir for faster direct lookup?
# FIXME: deal with _ prefixed deprecated files (or require another method for collections?)
# FIXME: fix overloaded filenames (eg, rename Windows setup to win_setup)
for root, dirs, files in os.walk(root_path):
# add all files in this dir that don't have a blacklisted extension
flat_files.extend(((root, f) for f in files if not any((f.endswith(ext) for ext in self._extension_blacklist))))
self._dirtree = flat_files
def find_file(self, filename):
# FIXME: thread safety
if not self._dirtree:
self._init_dirtree()
if '.' not in filename: # no extension specified, use extension regex to filter
extensionless_re = re.compile(r'^{0}(\..+)?$'.format(re.escape(filename)))
# why doesn't Python have first()?
try:
# FIXME: store extensionless in a separate direct lookup?
filepath = next(os.path.join(r, f) for r, f in self._dirtree if extensionless_re.match(f))
except StopIteration:
raise IOError("couldn't find {0}".format(filename))
else: # actual filename, just look it up
# FIXME: this case sucks; make it a lookup
try:
filepath = next(os.path.join(r, f) for r, f in self._dirtree if f == filename)
except StopIteration:
raise IOError("couldn't find {0}".format(filename))
return filepath
def get_data(self, filename):
found_file = self.find_file(filename)
with open(found_file, 'rb') as fd:
return fd.read()
# TODO: implement these for easier inline debugging?
# def get_source(self, fullname):
# def get_code(self, fullname):
# def is_package(self, fullname):
def get_collection_role_path(role_name, collection_list=None):
match = _collection_qualified_re.match(role_name)
if match:
grps = match.groups()
collection_list = ['.'.join(grps[:2])]
role = grps[2]
elif not collection_list:
return None # not a FQ role and no collection search list spec'd, nothing to do
else:
role = role_name
for collection_name in collection_list:
try:
role_package = u'ansible_collections.{0}.roles.{1}'.format(collection_name, role)
# FIXME: error handling/logging; need to catch any import failures and move along
# FIXME: this line shouldn't be necessary, but py2 pkgutil.get_data is delegating back to built-in loader when it shouldn't
pkg = import_module(role_package + u'.tasks')
# get_data input must be a native string
tasks_file = pkgutil.get_data(to_native(role_package) + '.tasks', 'main.yml')
if tasks_file is not None:
# the package is now loaded, get the collection's package and ask where it lives
path = os.path.dirname(to_bytes(sys.modules[role_package].__file__, errors='surrogate_or_strict'))
return role, to_text(path, errors='surrogate_or_strict'), collection_name
except IOError:
continue
except Exception as ex:
# FIXME: pick out typical import errors first, then error logging
continue
return None
def is_collection_ref(candidate_name):
return bool(_collection_qualified_re.match(candidate_name))
def set_collection_playbook_paths(b_playbook_paths):
# set for any/all AnsibleCollectionLoader instance(s) on meta_path
for loader in (l for l in sys.meta_path if isinstance(l, AnsibleCollectionLoader)):
loader.set_playbook_paths(b_playbook_paths)

View file

@ -0,0 +1,3 @@
# use a plugin defined in a content-adjacent collection to ensure we added it properly
plugin: testns.content_adj.statichost
hostname: dynamic_host_a

View file

@ -0,0 +1,2 @@
shippable/posix/group4
skip/python2.6

View file

@ -0,0 +1,11 @@
#!/usr/bin/env python
import json
def main():
print(json.dumps(dict(changed=False, source='sys')))
if __name__ == '__main__':
main()

View file

@ -0,0 +1,11 @@
#!/usr/bin/env python
import json
def main():
print(json.dumps(dict(changed=False, failed=True, msg='this collection should be masked by testcoll in the user content root')))
if __name__ == '__main__':
main()

View file

@ -0,0 +1,11 @@
#!/usr/bin/env python
import json
def main():
print(json.dumps(dict(changed=False, source='sys')))
if __name__ == '__main__':
main()

View file

@ -0,0 +1,2 @@
- fail:
msg: this role should never be visible or runnable

View file

@ -0,0 +1,30 @@
from ansible.plugins.action import ActionBase
from ansible.plugins import loader
class ActionModule(ActionBase):
TRANSFERS_FILES = False
_VALID_ARGS = frozenset(('type', 'name'))
def run(self, tmp=None, task_vars=None):
if task_vars is None:
task_vars = dict()
result = super(ActionModule, self).run(None, task_vars)
type = self._task.args.get('type')
name = self._task.args.get('name')
result = dict(changed=False, collection_list=self._task.collections)
if all([type, name]):
attr_name = '{0}_loader'.format(type)
typed_loader = getattr(loader, attr_name, None)
if not typed_loader:
return (dict(failed=True, msg='invalid plugin type {0}'.format(type)))
result['plugin_path'] = typed_loader.find_plugin(name, collection_list=self._task.collections)
return result

View file

@ -0,0 +1,24 @@
from ansible.plugins.callback import CallbackBase
DOCUMENTATION = '''
callback: usercallback
callback_type: notification
short_description: does stuff
description:
- does some stuff
'''
class CallbackModule(CallbackBase):
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'aggregate'
CALLBACK_NAME = 'usercallback'
CALLBACK_NEEDS_WHITELIST = True
def __init__(self):
super(CallbackModule, self).__init__()
self._display.display("loaded usercallback from collection, yay")
def v2_runner_on_ok(self, result):
self._display.display("usercallback says ok")

View file

@ -0,0 +1,38 @@
from ansible.module_utils._text import to_native
from ansible.plugins.connection import ConnectionBase
DOCUMENTATION = """
connection: localconn
short_description: do stuff local
description:
- does stuff
options:
connectionvar:
description:
- something we set
default: the_default
vars:
- name: ansible_localconn_connectionvar
"""
class Connection(ConnectionBase):
transport = 'local'
has_pipelining = True
def _connect(self):
return self
def exec_command(self, cmd, in_data=None, sudoable=True):
stdout = 'localconn ran {0}'.format(to_native(cmd))
stderr = 'connectionvar is {0}'.format(to_native(self.get_option('connectionvar')))
return (0, stdout, stderr)
def put_file(self, in_path, out_path):
raise NotImplementedError('just a test')
def fetch_file(self, in_path, out_path):
raise NotImplementedError('just a test')
def close(self):
self._connected = False

View file

@ -0,0 +1,10 @@
def testfilter(data):
return "{0}_from_userdir".format(data)
class FilterModule(object):
def filters(self):
return {
'testfilter': testfilter
}

View file

@ -0,0 +1,8 @@
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
return ['lookup_from_user_dir']

View file

@ -0,0 +1,7 @@
# FIXME: this style (full module import via from) doesn't work yet from collections
# from ansible_collections.testns.testcoll.plugins.module_utils import secondary
import ansible_collections.testns.testcoll.plugins.module_utils.secondary
def thingtocall():
return "thingtocall in base called " + ansible_collections.testns.testcoll.plugins.module_utils.secondary.thingtocall()

View file

@ -0,0 +1,2 @@
def thingtocall():
return "thingtocall in leaf"

View file

@ -0,0 +1,2 @@
def thingtocall():
return "thingtocall in secondary"

View file

@ -0,0 +1,11 @@
#!/usr/bin/env python
import json
def main():
print(json.dumps(dict(changed=False, source='user')))
if __name__ == '__main__':
main()

View file

@ -0,0 +1,11 @@
#!/usr/bin/env python
import json
def main():
print(json.dumps(dict(changed=False, source='user')))
if __name__ == '__main__':
main()

View file

@ -0,0 +1,19 @@
#!/usr/bin/env python
import json
import sys
# FIXME: this is only required due to a bug around "new style module detection"
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.testns.testcoll.plugins.module_utils.base import thingtocall
def main():
mu_result = thingtocall()
print(json.dumps(dict(changed=False, source='user', mu_result=mu_result)))
sys.exit()
if __name__ == '__main__':
main()

View file

@ -0,0 +1,19 @@
#!/usr/bin/env python
import json
import sys
# FIXME: this is only required due to a bug around "new style module detection"
from ansible.module_utils.basic import AnsibleModule
import ansible_collections.testns.testcoll.plugins.module_utils.leaf
def main():
mu_result = ansible_collections.testns.testcoll.plugins.module_utils.leaf.thingtocall()
print(json.dumps(dict(changed=False, source='user', mu_result=mu_result)))
sys.exit()
if __name__ == '__main__':
main()

View file

@ -0,0 +1,19 @@
#!/usr/bin/env python
import json
import sys
# FIXME: this is only required due to a bug around "new style module detection"
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.testns.testcoll.plugins.module_utils.leaf import thingtocall
def main():
mu_result = thingtocall()
print(json.dumps(dict(changed=False, source='user', mu_result=mu_result)))
sys.exit()
if __name__ == '__main__':
main()

View file

@ -0,0 +1,20 @@
#!/usr/bin/env python
import json
import sys
# FIXME: this is only required due to a bug around "new style module detection"
from ansible.module_utils.basic import AnsibleModule
# FIXME: this style doesn't work yet under collections
from ansible_collections.testns.testcoll.plugins.module_utils import leaf
def main():
mu_result = leaf.thingtocall()
print(json.dumps(dict(changed=False, source='user', mu_result=mu_result)))
sys.exit()
if __name__ == '__main__':
main()

View file

@ -0,0 +1,9 @@
def testtest(data):
return data == 'from_user'
class TestModule(object):
def tests(self):
return {
'testtest': testtest
}

View file

@ -0,0 +1,4 @@
collections:
- ansible.builtin
- testns.coll_in_sys
- bogus.fromrolemeta

View file

@ -0,0 +1,30 @@
- name: check collections list from role meta
plugin_lookup:
register: pluginlookup_out
- name: call role-local ping module
ping:
register: ping_out
- name: call unqualified module in another collection listed in role meta (testns.coll_in_sys)
systestmodule:
register: systestmodule_out
# verify that pluginloader caching doesn't prevent us from explicitly calling a builtin plugin with the same name
- name: call builtin ping module explicitly
ansible.builtin.ping:
register: builtinping_out
- debug:
msg: '{{ test_role_input | default("(undefined)") }}'
register: test_role_output
# FIXME: add tests to ensure that block/task level stuff in a collection-hosted role properly inherit role default/meta values
- assert:
that:
- pluginlookup_out.collection_list == ['testns.testcoll', 'ansible.builtin', 'testns.coll_in_sys', 'bogus.fromrolemeta']
- ping_out.source is defined and ping_out.source == 'user'
- systestmodule_out.source is defined and systestmodule_out.source == 'sys'
- builtinping_out.ping is defined and builtinping_out.ping == 'pong'
- test_role_input is not defined or test_role_input == test_role_output.msg

View file

@ -0,0 +1,55 @@
# Copyright (c) 2018 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
inventory: statichost
short_description: Add a single host
options:
plugin:
description: plugin name (must be statichost)
required: true
hostname:
description: Toggle display of stderr even when script was successful
type: list
'''
from ansible.errors import AnsibleParserError
from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable
class InventoryModule(BaseInventoryPlugin):
NAME = 'statichost'
def __init__(self):
super(InventoryModule, self).__init__()
self._hosts = set()
def verify_file(self, path):
''' Verify if file is usable by this plugin, base does minimal accessibility check '''
if not path.endswith('.statichost.yml') and not path.endswith('.statichost.yaml'):
return False
return super(InventoryModule, self).verify_file(path)
def parse(self, inventory, loader, path, cache=None):
super(InventoryModule, self).parse(inventory, loader, path)
config_data = loader.load_from_file(path, cache=False)
host_to_add = config_data.get('hostname')
if not host_to_add:
raise AnsibleParserError("hostname was not specified")
# this is where the magic happens
self.inventory.add_host(host_to_add, 'all')
# self.inventory.add_group()...
# self.inventory.add_child()...
# self.inventory.set_variable()..

View file

@ -0,0 +1,11 @@
#!/usr/bin/env python
import json
def main():
print(json.dumps(dict(changed=False, source='content_adj')))
if __name__ == '__main__':
main()

View file

@ -0,0 +1,11 @@
#!/usr/bin/python
import json
def main():
print(json.dumps(dict(changed=False, source='legacy_library_dir')))
if __name__ == '__main__':
main()

View file

@ -0,0 +1,280 @@
- hosts: testhost
tasks:
# basic test of FQ module lookup and that we got the right one (user-dir hosted)
- name: exec FQ module in a user-dir testns collection
testns.testcoll.testmodule:
register: testmodule_out
# verifies that distributed collection subpackages are visible under a multi-location namespace (testns exists in user and sys locations)
- name: exec FQ module in a sys-dir testns collection
testns.coll_in_sys.systestmodule:
register: systestmodule_out
# verifies that content-adjacent collections were automatically added to the installed content roots
- name: exec FQ module from content-adjacent collection
testns.content_adj.contentadjmodule:
register: contentadjmodule_out
# content should only be loaded from the first visible instance of a collection
- name: attempt to look up FQ module in a masked collection
testns.testcoll.plugin_lookup:
type: module
name: testns.testcoll.maskedmodule
register: maskedmodule_out
# module with a granular module_utils import (from (this collection).module_utils.leaf import thingtocall)
- name: exec module with granular module utils import from this collection
testns.testcoll.uses_leaf_mu_granular_import:
register: granular_out
# module with a granular nested module_utils import (from (this collection).module_utils.base import thingtocall,
# where base imports secondary from the same collection's module_utils)
- name: exec module with nested module utils from this collection
testns.testcoll.uses_base_mu_granular_nested_import:
register: granular_nested_out
# module with a flat module_utils import (import (this collection).module_utils.leaf)
- name: exec module with flat module_utils import from this collection
testns.testcoll.uses_leaf_mu_flat_import:
register: flat_out
# FIXME: this one doesn't work yet
# module with a full-module module_utils import using 'from' (from (this collection).module_utils import leaf)
- name: exec module with full-module module_utils import using 'from' from this collection
testns.testcoll.uses_leaf_mu_module_import_from:
ignore_errors: true
register: from_out
- assert:
that:
- testmodule_out.source == 'user'
- systestmodule_out.source == 'sys'
- contentadjmodule_out.source == 'content_adj'
- not maskedmodule_out.plugin_path
- granular_out.mu_result == 'thingtocall in leaf'
- granular_nested_out.mu_result == 'thingtocall in base called thingtocall in secondary'
- flat_out.mu_result == 'thingtocall in leaf'
- from_out is failed # FIXME: switch back once this import is fixed --> from_out.mu_result == 'thingtocall in leaf'
- name: exercise filters/tests/lookups
assert:
that:
- "'data' | testns.testcoll.testfilter == 'data_from_userdir'"
- "'from_user' is testns.testcoll.testtest"
- lookup('testns.testcoll.mylookup') == 'lookup_from_user_dir'
# ensure that the synthetic ansible.builtin collection limits to builtin plugins, that ansible.legacy loads overrides
# from legacy plugin dirs, and that a same-named plugin loaded from a real collection is not masked by the others
- hosts: testhost
tasks:
- name: test unqualified ping from library dir
ping:
register: unqualified_ping_out
- name: test legacy-qualified ping from library dir
ansible.legacy.ping:
register: legacy_ping_out
- name: test builtin ping
ansible.builtin.ping:
register: builtin_ping_out
- name: test collection-based ping
testns.testcoll.ping:
register: collection_ping_out
- assert:
that:
- unqualified_ping_out.source == 'legacy_library_dir'
- legacy_ping_out.source == 'legacy_library_dir'
- builtin_ping_out.ping == 'pong'
- collection_ping_out.source == 'user'
# verify the default value for the collections list is empty
- hosts: testhost
tasks:
- name: sample default collections value
testns.testcoll.plugin_lookup:
register: coll_default_out
- assert:
that:
# in original release, collections defaults to empty, which is mostly equivalent to ansible.legacy
- not coll_default_out.collection_list
# ensure that inheritance/masking works as expected, that the proper default values are injected when missing,
# and that the order is preserved if one of the magic values is explicitly specified
- name: verify collections keyword play/block/task inheritance and magic values
hosts: testhost
collections:
- bogus.fromplay
tasks:
- name: sample play collections value
testns.testcoll.plugin_lookup:
register: coll_play_out
- name: collections override block-level
collections:
- bogus.fromblock
block:
- name: sample block collections value
testns.testcoll.plugin_lookup:
register: coll_block_out
- name: sample task collections value
collections:
- bogus.fromtask
testns.testcoll.plugin_lookup:
register: coll_task_out
- name: sample task with explicit core
collections:
- ansible.builtin
- bogus.fromtaskexplicitcore
testns.testcoll.plugin_lookup:
register: coll_task_core
- name: sample task with explicit legacy
collections:
- ansible.legacy
- bogus.fromtaskexplicitlegacy
testns.testcoll.plugin_lookup:
register: coll_task_legacy
- assert:
that:
# ensure that parent value inheritance is masked properly by explicit setting
- coll_play_out.collection_list == ['bogus.fromplay', 'ansible.legacy']
- coll_block_out.collection_list == ['bogus.fromblock', 'ansible.legacy']
- coll_task_out.collection_list == ['bogus.fromtask', 'ansible.legacy']
- coll_task_core.collection_list == ['ansible.builtin', 'bogus.fromtaskexplicitcore']
- coll_task_legacy.collection_list == ['ansible.legacy', 'bogus.fromtaskexplicitlegacy']
- name: verify unqualified plugin resolution behavior
hosts: testhost
collections:
- testns.testcoll
- testns.coll_in_sys
- testns.contentadj
tasks:
# basic test of unqualified module lookup and that we got the right one (user-dir hosted, there's another copy of
# this one in the same-named collection in sys dir that should be masked
- name: exec unqualified module in a user-dir testns collection
testmodule:
register: testmodule_out
# use another collection to verify that we're looking in all collections listed on the play
- name: exec unqualified module in a sys-dir testns collection
systestmodule:
register: systestmodule_out
# ensure we're looking up actions properly
- name: unqualified action test
plugin_lookup:
register: pluginlookup_out
- assert:
that:
- testmodule_out.source == 'user'
- systestmodule_out.source == 'sys'
- pluginlookup_out.collection_list == ['testns.testcoll', 'testns.coll_in_sys', 'testns.contentadj', 'ansible.legacy']
# FIXME: this won't work until collections list gets passed through task templar
# - name: exercise unqualified filters/tests/lookups
# assert:
# that:
# - "'data' | testfilter == 'data_from_userdir'"
# - "'from_user' is testtest"
# - lookup('mylookup') == 'lookup_from_user_dir'
# test keyword-static execution of a FQ collection-backed role
- name: verify collection-backed role execution (keyword static)
hosts: testhost
collections:
# set to ansible.builtin only to ensure that roles function properly without inheriting the play's collections config
- ansible.builtin
vars:
test_role_input: keyword static
roles:
- role: testns.testcoll.testrole
tasks:
- name: ensure role executed
assert:
that:
- test_role_output.msg == test_role_input
# test dynamic execution of a FQ collection-backed role
- name: verify collection-backed role execution (dynamic)
hosts: testhost
collections:
# set to ansible.builtin only to ensure that roles function properly without inheriting the play's collections config
- ansible.builtin
vars:
test_role_input: dynamic
tasks:
- include_role:
name: testns.testcoll.testrole
- name: ensure role executed
assert:
that:
- test_role_output.msg == test_role_input
# test task-static execution of a FQ collection-backed role
- name: verify collection-backed role execution (task static)
hosts: testhost
collections:
- ansible.builtin
vars:
test_role_input: task static
tasks:
- import_role:
name: testns.testcoll.testrole
- name: ensure role executed
assert:
that:
- test_role_output.msg == test_role_input
# test a legacy playbook-adjacent role, ensure that play collections config is not inherited
- name: verify legacy playbook-adjacent role behavior
hosts: testhost
collections:
- bogus.bogus
vars:
test_role_input: legacy playbook-adjacent
roles:
- testrole
# FIXME: this should technically work to look up a playbook-adjacent role
# - ansible.legacy.testrole
tasks:
- name: ensure role executed
assert:
that:
- test_role_output.msg == test_role_input
- name: test a collection-hosted connection plugin against a host from a collection-hosted inventory plugin
hosts: dynamic_host_a
vars:
ansible_connection: testns.testcoll.localconn
ansible_localconn_connectionvar: from_play
tasks:
- raw: echo 'hello world'
register: connection_out
- assert:
that:
- connection_out.stdout == "localconn ran echo 'hello world'"
# ensure that the connection var we overrode above made it into the running config
- connection_out.stderr == "connectionvar is from_play"
- hosts: testhost
tasks:
- assert:
that:
- hostvars['dynamic_host_a'] is defined
- hostvars['dynamic_host_a'].connection_out.stdout == "localconn ran echo 'hello world'"

View file

@ -0,0 +1,8 @@
# this test specifically avoids testhost because we need to know about the controller's Python
- hosts: localhost
gather_facts: yes
gather_subset: min
tasks:
- debug:
msg: UNSUPPORTEDPYTHON {{ ansible_python_version }}
when: ansible_python_version is version('2.7', '<')

View file

@ -0,0 +1,25 @@
- debug:
msg: executing testrole from legacy playbook-adjacent roles dir
- name: exec a FQ module from a legacy role
testns.testcoll.testmodule:
register: coll_module_out
- name: exec a legacy playbook-adjacent module from a legacy role
ping:
register: ping_out
- name: sample collections list inside a legacy role (should be empty)
testns.testcoll.plugin_lookup:
register: plugin_lookup_out
- debug:
msg: '{{ test_role_input | default("(undefined)") }}'
register: test_role_output
- assert:
that:
- coll_module_out.source == 'user'
# ensure we used the library/ ping override, not the builtin or one from another collection
- ping_out.source == 'legacy_library_dir'
- not plugin_lookup_out.collection_list

View file

@ -0,0 +1,19 @@
#!/usr/bin/env bash
set -eux
export ANSIBLE_COLLECTIONS_PATHS=$PWD/collection_root_user:$PWD/collection_root_sys
export ANSIBLE_GATHERING=explicit
export ANSIBLE_GATHER_SUBSET=minimal
# temporary hack to keep this test from running on Python 2.6 in CI
if ansible-playbook -i ../../inventory pythoncheck.yml | grep UNSUPPORTEDPYTHON; then
echo skipping test for unsupported Python version...
exit 0
fi
# test callback
ANSIBLE_CALLBACK_WHITELIST=testns.testcoll.usercallback ansible localhost -m ping | grep "usercallback says ok"
# run test playbook
ansible-playbook -i ../../inventory -i ./a.statichost.yml -v play.yml

View file

@ -40,7 +40,8 @@
- win_ping_with_data_result.ping == '☠'
- name: test win_ping.ps1 with data as complex args
win_ping.ps1:
# win_ping.ps1: # TODO: do we want to actually support this? no other tests that I can see...
win_ping:
data: bleep
register: win_ping_ps1_result

View file

@ -329,6 +329,7 @@ class TestRole(unittest.TestCase):
})
mock_play = MagicMock()
mock_play.collections = None
mock_play.ROLE_CACHE = {}
i = RoleInclude.load('foo_metadata', play=mock_play, loader=fake_loader)

View file

@ -43,6 +43,8 @@ class MixinForMocks(object):
self.mock_tqm = MagicMock(name='MockTaskQueueManager')
self.mock_play = MagicMock(name='MockPlay')
self.mock_play._attributes = []
self.mock_play.collections = None
self.mock_iterator = MagicMock(name='MockIterator')
self.mock_iterator._play = self.mock_play

View file

@ -121,7 +121,7 @@ class TestActionBase(unittest.TestCase):
mock_connection = MagicMock()
# create a mock shared loader object
def mock_find_plugin(name, options):
def mock_find_plugin(name, options, collection_list=None):
if name == 'badmodule':
return None
elif '.ps1' in options: