default collection support (#61415)

* default collection support

* playbooks run from inside a registered collection will set that collection as the first item in the search order (as will all non-collection roles)
* this allows easy migration of runme.sh style playbook/role integration tests to collections without the playbooks/roles needing to know the name of their enclosing collection

* ignore bogus sanity error

* filed #61460

* fixed task unit test failure

* don't append an empty collections list to the ds

* ignore leftover local_action in mod_args ds action parsing

* fix async_extra_data test to not require ssh and bogus locale

* disable default collection test under Windows

* ensure collection location FS code is always bytes

* add changelog
This commit is contained in:
Matt Davis 2019-08-28 16:31:40 -07:00 committed by GitHub
parent caa5abdfc9
commit 7d1a981b61
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
22 changed files with 278 additions and 64 deletions

View file

@ -0,0 +1,3 @@
minor_changes:
- default collection - a playbook run inside a collection (eg, as part of a runme.sh integration test) will
first search the containing collection for unqualified module/action references (https://github.com/ansible/ansible/pull/61415)

View file

@ -16,7 +16,7 @@ from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.module_utils._text import to_bytes
from ansible.playbook.block import Block
from ansible.utils.display import Display
from ansible.utils.collection_loader import set_collection_playbook_paths
from ansible.utils.collection_loader import AnsibleCollectionLoader, get_collection_name_from_path, set_collection_playbook_paths
from ansible.plugins.loader import add_all_plugin_dirs
@ -92,6 +92,12 @@ class PlaybookCLI(CLI):
set_collection_playbook_paths(b_playbook_dirs)
playbook_collection = get_collection_name_from_path(b_playbook_dirs[0])
if playbook_collection:
display.warning("running playbook inside collection {0}".format(playbook_collection))
AnsibleCollectionLoader().set_default_collection(playbook_collection)
# don't deal with privilege escalation or passwords when we don't need to
if not (context.CLIARGS['listhosts'] or context.CLIARGS['listtasks'] or
context.CLIARGS['listtags'] or context.CLIARGS['syntax']):

View file

@ -121,8 +121,8 @@ class ModuleArgsParser:
# store the valid Task/Handler attrs for quick access
self._task_attrs = set(Task._valid_attrs.keys())
self._task_attrs.update(set(Handler._valid_attrs.keys()))
# HACK: why is static not a FieldAttribute on task with a post-validate to bomb if not include/import?
self._task_attrs.add('static')
# HACK: why are these not FieldAttributes on task with a post-validate to check usage?
self._task_attrs.update(['local_action', 'static'])
self._task_attrs = frozenset(self._task_attrs)
def _split_module_string(self, module_string):
@ -259,7 +259,7 @@ class ModuleArgsParser:
return (action, args)
def parse(self):
def parse(self, skip_action_validation=False):
'''
Given a task in one of the supported forms, parses and returns
returns the action, arguments, and delegate_to values for the
@ -300,7 +300,7 @@ class ModuleArgsParser:
# walk the filtered input dictionary to see if we recognize a module name
for item, value in iteritems(non_task_ds):
if item in BUILTIN_TASKS or action_loader.has_plugin(item, collection_list=self._collection_list) or \
if item in BUILTIN_TASKS or skip_action_validation or action_loader.has_plugin(item, collection_list=self._collection_list) or \
module_loader.has_plugin(item, collection_list=self._collection_list):
# finding more than one module name is a problem
if action is not None:

View file

@ -6,21 +6,39 @@ __metaclass__ = type
from ansible.module_utils.six import string_types
from ansible.playbook.attribute import FieldAttribute
from ansible.utils.collection_loader import AnsibleCollectionLoader
def _ensure_default_collection(collection_list=None):
default_collection = AnsibleCollectionLoader().default_collection
if collection_list is None:
collection_list = []
if default_collection: # FIXME: exclude role tasks?
if isinstance(collection_list, string_types):
collection_list = [collection_list]
if default_collection not in collection_list:
collection_list.insert(0, default_collection)
# if there's something in the list, ensure that builtin or legacy is always there too
if collection_list and 'ansible.builtin' not in collection_list and 'ansible.legacy' not in collection_list:
collection_list.append('ansible.legacy')
return collection_list
class CollectionSearch:
# this needs to be populated before we can resolve tasks/roles/etc
_collections = FieldAttribute(isa='list', listof=string_types, priority=100)
_collections = FieldAttribute(isa='list', listof=string_types, priority=100, default=_ensure_default_collection)
def _load_collections(self, attr, ds):
if not ds:
# if empty/None, just return whatever was there; legacy behavior will do the right thing
return ds
# this will only be called if someone specified a value; call the shared value
_ensure_default_collection(collection_list=ds)
if not isinstance(ds, list):
ds = [ds]
if 'ansible.builtin' not in ds and 'ansible.legacy' not in ds:
ds.append('ansible.legacy')
if not ds: # don't return an empty collection list, just return None
return None
return ds

View file

@ -25,6 +25,7 @@ from ansible.errors import AnsibleParserError, AnsibleUndefinedVariable, Ansible
from ansible.module_utils._text import to_native
from ansible.module_utils.six import string_types
from ansible.parsing.mod_args import ModuleArgsParser
from ansible.utils.collection_loader import AnsibleCollectionLoader
from ansible.utils.display import Display
display = Display()
@ -117,12 +118,9 @@ def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_h
)
task_list.append(t)
else:
collection_list = task_ds.get('collections')
if collection_list is None and block is not None and block.collections:
collection_list = block.collections
args_parser = ModuleArgsParser(task_ds, collection_list=collection_list)
args_parser = ModuleArgsParser(task_ds)
try:
(action, args, delegate_to) = args_parser.parse()
(action, args, delegate_to) = args_parser.parse(skip_action_validation=True)
except AnsibleParserError as e:
# if the raises exception was created with obj=ds args, then it includes the detail
# so we dont need to add it so we can just re raise.

View file

@ -32,6 +32,7 @@ from ansible.playbook.helpers import load_list_of_blocks
from ansible.playbook.role.metadata import RoleMetadata
from ansible.playbook.taggable import Taggable
from ansible.plugins.loader import add_all_plugin_dirs
from ansible.utils.collection_loader import AnsibleCollectionLoader
from ansible.utils.vars import combine_vars
@ -224,20 +225,23 @@ class Role(Base, Conditional, Taggable, CollectionSearch):
# configure plugin/collection loading; either prepend the current role's collection or configure legacy plugin loading
# FIXME: need exception for explicit ansible.legacy?
if self._role_collection:
if self._role_collection: # this is a collection-hosted role
self.collections.insert(0, self._role_collection)
else:
else: # this is a legacy role, but set the default collection if there is one
default_collection = AnsibleCollectionLoader().default_collection
if default_collection:
self.collections.insert(0, default_collection)
# legacy role, ensure all plugin dirs under the role are added to plugin search path
add_all_plugin_dirs(self._role_path)
# collections can be specified in metadata for legacy or collection-hosted roles
if self._metadata.collections:
self.collections.extend(self._metadata.collections)
self.collections.extend((c for c in self._metadata.collections if c not in self.collections))
# if any collections were specified, ensure that core or legacy synthetic collections are always included
if self.collections:
# default append collection is core for collection-hosted roles, legacy for others
default_append_collection = 'ansible.builtin' if self.collections else 'ansible.legacy'
default_append_collection = 'ansible.builtin' if self._role_collection else 'ansible.legacy'
if 'ansible.builtin' not in self.collections and 'ansible.legacy' not in self.collections:
self.collections.append(default_append_collection)

View file

@ -36,6 +36,7 @@ from ansible.playbook.conditional import Conditional
from ansible.playbook.loop_control import LoopControl
from ansible.playbook.role import Role
from ansible.playbook.taggable import Taggable
from ansible.utils.collection_loader import AnsibleCollectionLoader
from ansible.utils.display import Display
from ansible.utils.sentinel import Sentinel
@ -177,10 +178,35 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
if isinstance(ds, AnsibleBaseYAMLObject):
new_ds.ansible_pos = ds.ansible_pos
# since this affects the task action parsing, we have to resolve in preprocess instead of in typical validator
default_collection = AnsibleCollectionLoader().default_collection
# use the parent value if our ds doesn't define it
collections_list = ds.get('collections', self.collections)
if collections_list is None:
collections_list = []
if isinstance(collections_list, string_types):
collections_list = [collections_list]
if default_collection and not self._role: # FIXME: and not a collections role
if collections_list:
if default_collection not in collections_list:
collections_list.insert(0, default_collection)
else:
collections_list = [default_collection]
if collections_list and 'ansible.builtin' not in collections_list and 'ansible.legacy' not in collections_list:
collections_list.append('ansible.legacy')
if collections_list:
ds['collections'] = collections_list
# use the args parsing class to determine the action, args,
# and the delegate_to value from the various possible forms
# supported as legacy
args_parser = ModuleArgsParser(task_ds=ds, collection_list=self.collections)
args_parser = ModuleArgsParser(task_ds=ds, collection_list=collections_list)
try:
(action, args, delegate_to) = args_parser.parse()
except AnsibleParserError as e:
@ -264,6 +290,9 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
if self._parent:
self._parent.post_validate(templar)
if AnsibleCollectionLoader().default_collection:
pass
super(Task, self).post_validate(templar)
def _post_validate_loop(self, attr, value, templar):

View file

@ -42,7 +42,7 @@ class TaskInclude(Task):
BASE = frozenset(('file', '_raw_params')) # directly assigned
OTHER_ARGS = frozenset(('apply',)) # assigned to matching property
VALID_ARGS = BASE.union(OTHER_ARGS) # all valid args
VALID_INCLUDE_KEYWORDS = frozenset(('action', 'args', 'debugger', 'ignore_errors', 'loop', 'loop_control',
VALID_INCLUDE_KEYWORDS = frozenset(('action', 'args', 'collections', 'debugger', 'ignore_errors', 'loop', 'loop_control',
'loop_with', 'name', 'no_log', 'register', 'run_once', 'tags', 'vars',
'when'))

View file

@ -169,20 +169,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if module_path:
break
else: # This is a for-else: http://bit.ly/1ElPkyg
# Use Windows version of ping module to check module paths when
# using a connection that supports .ps1 suffixes. We check specifically
# for win_ping here, otherwise the code would look for ping.ps1
if '.ps1' in self._connection.module_implementation_preferences:
ping_module = 'win_ping'
else:
ping_module = 'ping'
module_path2 = self._shared_loader_obj.module_loader.find_plugin(ping_module, self._connection.module_implementation_preferences)
if module_path2 is not None:
raise AnsibleError("The module %s was not found in configured module paths" % (module_name))
else:
raise AnsibleError("The module %s was not found in configured module paths. "
"Additionally, core modules are missing. If this is a checkout, "
"run 'git pull --rebase' to correct this problem." % (module_name))
raise AnsibleError("The module %s was not found in configured module paths" % (module_name))
# insert shared code and arguments into the module
final_environment = dict()

View file

@ -48,6 +48,7 @@ class AnsibleCollectionLoader(with_metaclass(Singleton, object)):
self._n_configured_paths = [to_native(os.path.expanduser(p), errors='surrogate_or_strict') for p in self._n_configured_paths]
self._n_playbook_paths = []
self._default_collection = None
# pre-inject grafted package maps so we can force them to use the right loader instead of potentially delegating to a "normal" loader
for syn_pkg_def in (p for p in iteritems(_SYNTHETIC_PACKAGES) if p[1].get('graft')):
pkg_name = syn_pkg_def[0]
@ -70,6 +71,14 @@ class AnsibleCollectionLoader(with_metaclass(Singleton, object)):
def n_collection_paths(self):
return self._n_playbook_paths + self._n_configured_paths
def get_collection_path(self, collection_name):
if not AnsibleCollectionRef.is_valid_collection_name(collection_name):
raise ValueError('{0} is not a valid collection name'.format(to_native(collection_name)))
m = import_module('ansible_collections.{0}'.format(collection_name))
return m.__file__
def set_playbook_paths(self, b_playbook_paths):
if isinstance(b_playbook_paths, string_types):
b_playbook_paths = [b_playbook_paths]
@ -81,6 +90,15 @@ class AnsibleCollectionLoader(with_metaclass(Singleton, object)):
self._n_playbook_paths = [os.path.join(to_native(p), 'collections') for p in b_playbook_paths if not (p in added_paths or added_paths.add(p))]
# FIXME: only allow setting this once, or handle any necessary cache/package path invalidations internally?
# FIXME: is there a better place to store this?
# FIXME: only allow setting this once
def set_default_collection(self, collection_name):
self._default_collection = collection_name
@property
def default_collection(self):
return self._default_collection
def find_module(self, fullname, path=None):
# this loader is only concerned with items under the Ansible Collections namespace hierarchy, ignore others
if fullname.startswith('ansible_collections.') or fullname == 'ansible_collections':
@ -432,14 +450,17 @@ def get_collection_role_path(role_name, collection_list=None):
# looks like a valid qualified collection ref; skip the collection_list
role = acr.resource
collection_list = [acr.collection]
subdirs = acr.subdirs
resource = acr.resource
elif not collection_list:
return None # not a FQ role and no collection search list spec'd, nothing to do
else:
role = role_name # treat as unqualified, loop through the collection search list to try and resolve
resource = role_name # treat as unqualified, loop through the collection search list to try and resolve
subdirs = ''
for collection_name in collection_list:
try:
acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=acr.subdirs, resource=acr.resource, ref_type=acr.ref_type)
acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type='role')
# FIXME: error handling/logging; need to catch any import failures and move along
# FIXME: this line shouldn't be necessary, but py2 pkgutil.get_data is delegating back to built-in loader when it shouldn't
@ -448,7 +469,7 @@ def get_collection_role_path(role_name, collection_list=None):
if pkg is not None:
# the package is now loaded, get the collection's package and ask where it lives
path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict'))
return role, to_text(path, errors='surrogate_or_strict'), collection_name
return resource, to_text(path, errors='surrogate_or_strict'), collection_name
except IOError:
continue
@ -459,5 +480,50 @@ def get_collection_role_path(role_name, collection_list=None):
return None
_N_COLLECTION_PATH_RE = re.compile(r'/ansible_collections/([^/]+)/([^/]+)')
def get_collection_name_from_path(path):
"""
Return the containing collection name for a given path, or None if the path is not below a configured collection, or
the collection cannot be loaded (eg, the collection is masked by another of the same name higher in the configured
collection roots).
:param n_path: native-string path to evaluate for collection containment
:return: collection name or None
"""
n_collection_paths = [to_native(os.path.realpath(to_bytes(p))) for p in AnsibleCollectionLoader().n_collection_paths]
b_path = os.path.realpath(to_bytes(path))
n_path = to_native(b_path)
for coll_path in n_collection_paths:
common_prefix = to_native(os.path.commonprefix([b_path, to_bytes(coll_path)]))
if common_prefix == coll_path:
# strip off the common prefix (handle weird testing cases of nested collection roots, eg)
collection_remnant = n_path[len(coll_path):]
# commonprefix may include the trailing /, prepend to the remnant if necessary (eg trailing / on root)
if collection_remnant[0] != '/':
collection_remnant = '/' + collection_remnant
# the path lives under this collection root, see if it maps to a collection
found_collection = _N_COLLECTION_PATH_RE.search(collection_remnant)
if not found_collection:
continue
n_collection_name = '{0}.{1}'.format(*found_collection.groups())
loaded_collection_path = AnsibleCollectionLoader().get_collection_path(n_collection_name)
if not loaded_collection_path:
return None
# ensure we're using the canonical real path, with the bogus __synthetic__ stripped off
b_loaded_collection_path = os.path.dirname(os.path.realpath(to_bytes(loaded_collection_path)))
# if the collection path prefix matches the path prefix we were passed, it's the same collection that's loaded
if os.path.commonprefix([b_path, b_loaded_collection_path]) == b_loaded_collection_path:
return n_collection_name
return None # if not, it's a collection, but not the same collection the loader sees, so ignore it
def set_collection_playbook_paths(b_playbook_paths):
AnsibleCollectionLoader().set_playbook_paths(b_playbook_paths)

View file

@ -1,4 +1 @@
needs/ssh
shippable/posix/group3
skip/freebsd
skip/osx

View file

@ -1 +0,0 @@
localhost ansible_connection=ssh ansible_python_interpreter="{{ ansible_playbook_python }}"

View file

@ -0,0 +1,15 @@
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
def main():
print("junk_before_module_output")
print(json.dumps(dict(changed=False, source='user')))
print("junk_after_module_output")
if __name__ == '__main__':
main()

View file

@ -2,8 +2,6 @@
set -eux
# Verify that extra data before module JSON output during async call is ignored.
ANSIBLE_DEBUG=0 LC_ALL=bogus ansible-playbook test_async.yml -i inventory -v "$@"
# Verify that the warning exists by examining debug output.
ANSIBLE_DEBUG=1 LC_ALL=bogus ansible-playbook test_async.yml -i inventory -v "$@" \
| grep 'bash: warning: setlocale: LC_ALL: cannot change locale (bogus)' > /dev/null
# Verify that extra data before module JSON output during async call is ignored, and that the warning exists.
ANSIBLE_DEBUG=0 ansible-playbook -i ../../inventory test_async.yml -v "$@" \
| grep 'junk after the JSON data: junk_after_module_output'

View file

@ -1,9 +1,9 @@
- hosts: localhost
- hosts: testhost
gather_facts: false
tasks:
# make sure non-JSON data before module output is ignored
- name: async ping with invalid locale via ssh
ping:
- name: async ping wrapped in extra junk
junkping:
async: 10
poll: 1
register: result

View file

@ -0,0 +1,49 @@
# verify default collection action/module lookup works
# since we're running this playbook inside a collection, it will set that collection as the default search for all playboooks
# and non-collection roles to allow for easy migration of old integration tests to collections
- hosts: testhost
tasks:
- testmodule:
- hosts: testhost
vars:
test_role_input: task static default collection
tasks:
- import_role:
name: testrole # unqualified role lookup should work; inheriting from the containing collection
- assert:
that:
- test_role_output.msg == test_role_input
- vars:
test_role_input: task static legacy embedded default collection
block:
- import_role:
name: non_coll_role
- assert:
that:
- test_role_output.msg == test_role_input
- hosts: testhost
vars:
test_role_input: keyword static default collection
roles:
- testrole
tasks:
- debug: var=test_role_input
- debug: var=test_role_output
- assert:
that:
- test_role_output.msg == test_role_input
- hosts: testhost
vars:
test_role_input: task dynamic default collection
tasks:
- include_role:
name: testrole # unqualified role lookup should work; inheriting from the containing collection
- include_role:
name: non_coll_role
- assert:
that:
- testmodule_out_from_non_coll_role is success
- embedded_module_out_from_non_coll_role is success

View file

@ -0,0 +1,13 @@
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
def main():
print(json.dumps(dict(changed=False, source='collection_embedded_non_collection_role')))
if __name__ == '__main__':
main()

View file

@ -0,0 +1,29 @@
- testmodule:
register: testmodule_out_from_non_coll_role
- embedded_module:
register: embedded_module_out_from_non_coll_role
- name: check collections list from role meta
plugin_lookup:
register: pluginlookup_out
- debug: var=pluginlookup_out
- debug:
msg: '{{ test_role_input | default("(undefined)") }}'
register: test_role_output
- assert:
that:
- test_role_input is not defined or test_role_input == test_role_output.msg
- vars:
test_role_input: include another non-coll role
block:
- include_role:
name: non_coll_role_to_call
- assert:
that:
- test_role_output.msg == test_role_input

View file

@ -0,0 +1,7 @@
- debug:
msg: '{{ test_role_input | default("(undefined)") }}'
register: test_role_output
- assert:
that:
- test_role_input is not defined or test_role_input == test_role_output.msg

View file

@ -193,15 +193,6 @@
- testmodule_out.source == 'user'
- systestmodule_out.source == 'sys'
# FIXME: this won't work until collections list gets passed through task templar
# - name: exercise unqualified filters/tests/lookups
# assert:
# that:
# - "'data' | testfilter == 'data_from_userdir'"
# - "'from_user' is testtest"
# - lookup('mylookup') == 'lookup_from_user_dir'
# test keyword-static execution of a FQ collection-backed role with "tasks/main.yaml"
- name: verify collection-backed role execution (keyword static)
hosts: testhost

View file

@ -7,7 +7,8 @@ export ANSIBLE_GATHERING=explicit
export ANSIBLE_GATHER_SUBSET=minimal
export ANSIBLE_HOST_PATTERN_MISMATCH=error
# FIXME: just use INVENTORY_PATH as-is once ansible-test sets the right dir
# FUTURE: just use INVENTORY_PATH as-is once ansible-test sets the right dir
ipath=../../$(basename "${INVENTORY_PATH}")
export INVENTORY_PATH="$ipath"
@ -26,6 +27,9 @@ if [[ ${INVENTORY_PATH} == *.winrm ]]; then
export TEST_PLAYBOOK=windows.yml
else
export TEST_PLAYBOOK=posix.yml
echo "testing default collection support"
ansible-playbook -i "${INVENTORY_PATH}" collection_root_user/ansible_collections/testns/testcoll/playbooks/default_collection_playbook.yml
fi
# run test playbook

View file

@ -5714,6 +5714,7 @@ lib/ansible/modules/windows/win_webpicmd.ps1 pslint:PSAvoidUsingInvokeExpression
lib/ansible/modules/windows/win_xml.ps1 pslint:PSCustomUseLiteralPath
lib/ansible/parsing/vault/__init__.py pylint:blacklisted-name
lib/ansible/playbook/base.py pylint:blacklisted-name
lib/ansible/playbook/collectionsearch.py required-and-default-attributes # https://github.com/ansible/ansible/issues/61460
lib/ansible/playbook/helpers.py pylint:blacklisted-name
lib/ansible/playbook/role/__init__.py pylint:blacklisted-name
lib/ansible/plugins/action/aireos.py action-plugin-docs # base class for deprecated network platform modules using `connection: local`