run playbook from collections (#67435)
* fixes for collection playbooks - add fqcn invocation, also handles extensions - bring import_playbook into new normal - avoid adding collection playbooks to adjacency - deal with extensions or lack of em - added tests - fix bugs with multiple playbooks supplied - unicode/bytes/native fixes - fix modulenotfound for < py3.7 - tests
This commit is contained in:
parent
8f9cf456b0
commit
e05c62547b
11 changed files with 173 additions and 60 deletions
|
@ -15,10 +15,10 @@ from ansible.errors import AnsibleError
|
|||
from ansible.executor.playbook_executor import PlaybookExecutor
|
||||
from ansible.module_utils._text import to_bytes
|
||||
from ansible.playbook.block import Block
|
||||
from ansible.utils.display import Display
|
||||
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||
from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path
|
||||
from ansible.plugins.loader import add_all_plugin_dirs
|
||||
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||
from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path, _get_collection_playbook_path
|
||||
from ansible.utils.display import Display
|
||||
|
||||
|
||||
display = Display()
|
||||
|
@ -77,27 +77,35 @@ class PlaybookCLI(CLI):
|
|||
|
||||
# initial error check, to make sure all specified playbooks are accessible
|
||||
# before we start running anything through the playbook executor
|
||||
|
||||
# also prep plugin paths
|
||||
b_playbook_dirs = []
|
||||
for playbook in context.CLIARGS['args']:
|
||||
if not os.path.exists(playbook):
|
||||
raise AnsibleError("the playbook: %s could not be found" % playbook)
|
||||
if not (os.path.isfile(playbook) or stat.S_ISFIFO(os.stat(playbook).st_mode)):
|
||||
raise AnsibleError("the playbook: %s does not appear to be a file" % playbook)
|
||||
|
||||
b_playbook_dir = os.path.dirname(os.path.abspath(to_bytes(playbook, errors='surrogate_or_strict')))
|
||||
# load plugins from all playbooks in case they add callbacks/inventory/etc
|
||||
add_all_plugin_dirs(b_playbook_dir)
|
||||
# resolve if it is collection playbook with FQCN notation, if not, leaves unchanged
|
||||
resource = _get_collection_playbook_path(playbook)
|
||||
if resource is not None:
|
||||
playbook_collection = resource[2]
|
||||
else:
|
||||
# not an FQCN so must be a file
|
||||
if not os.path.exists(playbook):
|
||||
raise AnsibleError("the playbook: %s could not be found" % playbook)
|
||||
if not (os.path.isfile(playbook) or stat.S_ISFIFO(os.stat(playbook).st_mode)):
|
||||
raise AnsibleError("the playbook: %s does not appear to be a file" % playbook)
|
||||
|
||||
b_playbook_dirs.append(b_playbook_dir)
|
||||
# check if playbook is from collection (path can be passed directly)
|
||||
playbook_collection = _get_collection_name_from_path(playbook)
|
||||
|
||||
AnsibleCollectionConfig.playbook_paths = b_playbook_dirs
|
||||
# don't add collection playbooks to adjacency search path
|
||||
if not playbook_collection:
|
||||
# setup dirs to enable loading plugins from all playbooks in case they add callbacks/inventory/etc
|
||||
b_playbook_dir = os.path.dirname(os.path.abspath(to_bytes(playbook, errors='surrogate_or_strict')))
|
||||
add_all_plugin_dirs(b_playbook_dir)
|
||||
b_playbook_dirs.append(b_playbook_dir)
|
||||
|
||||
playbook_collection = _get_collection_name_from_path(b_playbook_dirs[0])
|
||||
|
||||
if playbook_collection:
|
||||
display.warning("running playbook inside collection {0}".format(playbook_collection))
|
||||
AnsibleCollectionConfig.default_collection = playbook_collection
|
||||
if b_playbook_dirs:
|
||||
# allow collections adjacent to these playbooks
|
||||
# we use list copy to avoid opening up 'adjacency' in the previous loop
|
||||
AnsibleCollectionConfig.playbook_paths = b_playbook_dirs
|
||||
|
||||
# don't deal with privilege escalation or passwords when we don't need to
|
||||
if not (context.CLIARGS['listhosts'] or context.CLIARGS['listtasks'] or
|
||||
|
|
|
@ -30,10 +30,13 @@ from ansible.plugins.loader import become_loader, connection_loader, shell_loade
|
|||
from ansible.playbook import Playbook
|
||||
from ansible.template import Templar
|
||||
from ansible.utils.helpers import pct_to_int
|
||||
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||
from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path, _get_collection_playbook_path
|
||||
from ansible.utils.path import makedirs_safe
|
||||
from ansible.utils.ssh_functions import set_default_transport
|
||||
from ansible.utils.display import Display
|
||||
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
|
@ -87,7 +90,24 @@ class PlaybookExecutor:
|
|||
list(shell_loader.all(class_only=True))
|
||||
list(become_loader.all(class_only=True))
|
||||
|
||||
for playbook_path in self._playbooks:
|
||||
for playbook in self._playbooks:
|
||||
|
||||
# deal with FQCN
|
||||
resource = _get_collection_playbook_path(playbook)
|
||||
if resource is not None:
|
||||
playbook_path = resource[1]
|
||||
playbook_collection = resource[2]
|
||||
else:
|
||||
playbook_path = playbook
|
||||
# not fqcn, but might still be colleciotn playbook
|
||||
playbook_collection = _get_collection_name_from_path(playbook)
|
||||
|
||||
if playbook_collection:
|
||||
display.warning("running playbook inside collection {0}".format(playbook_collection))
|
||||
AnsibleCollectionConfig.default_collection = playbook_collection
|
||||
else:
|
||||
AnsibleCollectionConfig.default_collection = None
|
||||
|
||||
pb = Playbook.load(playbook_path, variable_manager=self._variable_manager, loader=self._loader)
|
||||
# FIXME: move out of inventory self._inventory.set_playbook_basedir(os.path.realpath(os.path.dirname(playbook_path)))
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ __metaclass__ = type
|
|||
import os
|
||||
|
||||
from ansible.errors import AnsibleParserError, AnsibleAssertionError
|
||||
from ansible.module_utils._text import to_bytes
|
||||
from ansible.module_utils.six import iteritems, string_types
|
||||
from ansible.parsing.splitter import split_args, parse_kv
|
||||
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
|
||||
|
@ -29,6 +30,8 @@ from ansible.playbook.attribute import FieldAttribute
|
|||
from ansible.playbook.base import Base
|
||||
from ansible.playbook.conditional import Conditional
|
||||
from ansible.playbook.taggable import Taggable
|
||||
from ansible.utils.collection_loader import AnsibleCollectionConfig
|
||||
from ansible.utils.collection_loader._collection_finder import _get_collection_name_from_path, _get_collection_playbook_path
|
||||
from ansible.template import Templar
|
||||
from ansible.utils.display import Display
|
||||
|
||||
|
@ -69,10 +72,29 @@ class PlaybookInclude(Base, Conditional, Taggable):
|
|||
pb = Playbook(loader=loader)
|
||||
|
||||
file_name = templar.template(new_obj.import_playbook)
|
||||
if not os.path.isabs(file_name):
|
||||
file_name = os.path.join(basedir, file_name)
|
||||
|
||||
pb._load_playbook_data(file_name=file_name, variable_manager=variable_manager, vars=self.vars.copy())
|
||||
# check for FQCN
|
||||
resource = _get_collection_playbook_path(file_name)
|
||||
if resource is not None:
|
||||
playbook = resource[1]
|
||||
playbook_collection = resource[2]
|
||||
else:
|
||||
# not FQCN try path
|
||||
playbook = file_name
|
||||
if not os.path.isabs(playbook):
|
||||
playbook = os.path.join(basedir, playbook)
|
||||
|
||||
# might still be collection playbook
|
||||
playbook_collection = _get_collection_name_from_path(playbook)
|
||||
|
||||
if playbook_collection:
|
||||
# it is a collection playbook, setup default collections
|
||||
AnsibleCollectionConfig.default_collection = playbook_collection
|
||||
else:
|
||||
# it is NOT a collection playbook, setup adjecent paths
|
||||
AnsibleCollectionConfig.playbook_paths.append(os.path.dirname(os.path.abspath(to_bytes(playbook, errors='surrogate_or_strict'))))
|
||||
|
||||
pb._load_playbook_data(file_name=playbook, variable_manager=variable_manager, vars=self.vars.copy())
|
||||
|
||||
# finally, update each loaded playbook entry with any variables specified
|
||||
# on the included playbook and/or any tags which may have been set
|
||||
|
@ -90,7 +112,7 @@ class PlaybookInclude(Base, Conditional, Taggable):
|
|||
entry.vars = temp_vars
|
||||
entry.tags = list(set(entry.tags).union(new_obj.tags))
|
||||
if entry._included_path is None:
|
||||
entry._included_path = os.path.dirname(file_name)
|
||||
entry._included_path = os.path.dirname(playbook)
|
||||
|
||||
# Check to see if we need to forward the conditionals on to the included
|
||||
# plays. If so, we can take a shortcut here and simply prepend them to
|
||||
|
|
|
@ -38,12 +38,6 @@ except ImportError:
|
|||
SpecifierSet = None
|
||||
Version = None
|
||||
|
||||
try:
|
||||
# use C version if possible for speedup
|
||||
from yaml import CSafeLoader as SafeLoader
|
||||
except ImportError:
|
||||
from yaml import SafeLoader
|
||||
|
||||
try:
|
||||
import importlib.util
|
||||
imp = None
|
||||
|
@ -312,7 +306,7 @@ class PluginLoader:
|
|||
parts = self.package.split('.')[1:]
|
||||
for parent_mod in parts:
|
||||
m = getattr(m, parent_mod)
|
||||
self.package_path = os.path.dirname(m.__file__)
|
||||
self.package_path = to_text(os.path.dirname(m.__file__), errors='surrogate_or_strict')
|
||||
if subdirs:
|
||||
return self._all_directories(self.package_path)
|
||||
return [self.package_path]
|
||||
|
@ -331,12 +325,15 @@ class PluginLoader:
|
|||
# look in any configured plugin paths, allow one level deep for subcategories
|
||||
if self.config is not None:
|
||||
for path in self.config:
|
||||
path = os.path.realpath(os.path.expanduser(path))
|
||||
path = os.path.abspath(os.path.expanduser(path))
|
||||
if subdirs:
|
||||
contents = glob.glob("%s/*" % path) + glob.glob("%s/*/*" % path)
|
||||
for c in contents:
|
||||
c = to_text(c, errors='surrogate_or_strict')
|
||||
if os.path.isdir(c) and c not in ret:
|
||||
ret.append(PluginPathContext(c, False))
|
||||
|
||||
path = to_text(path, errors='surrogate_or_strict')
|
||||
if path not in ret:
|
||||
ret.append(PluginPathContext(path, False))
|
||||
|
||||
|
@ -657,16 +654,18 @@ class PluginLoader:
|
|||
# we need to make sure we don't want to add additional directories
|
||||
# (add_directory()) once we start using the iterator.
|
||||
# We can use _get_paths_with_context() since add_directory() forces a cache refresh.
|
||||
for path_context in (p for p in self._get_paths_with_context() if p.path not in self._searched_paths and os.path.isdir(p.path)):
|
||||
path = path_context.path
|
||||
for path_with_context in (p for p in self._get_paths_with_context() if p.path not in self._searched_paths and os.path.isdir(to_bytes(p.path))):
|
||||
path = path_with_context.path
|
||||
b_path = to_bytes(path)
|
||||
display.debug('trying %s' % path)
|
||||
plugin_load_context.load_attempts.append(path)
|
||||
internal = path_with_context.internal
|
||||
try:
|
||||
full_paths = (os.path.join(path, f) for f in os.listdir(path))
|
||||
full_paths = (os.path.join(b_path, f) for f in os.listdir(b_path))
|
||||
except OSError as e:
|
||||
display.warning("Error accessing plugin paths: %s" % to_text(e))
|
||||
|
||||
for full_path in (f for f in full_paths if os.path.isfile(f) and not f.endswith('__init__.py')):
|
||||
for full_path in (to_native(f) for f in full_paths if os.path.isfile(f) and not f.endswith(b'__init__.py')):
|
||||
full_name = os.path.basename(full_path)
|
||||
|
||||
# HACK: We have no way of executing python byte compiled files as ansible modules so specifically exclude them
|
||||
|
@ -674,15 +673,15 @@ class PluginLoader:
|
|||
# For all other plugins we want .pyc and .pyo should be valid
|
||||
if any(full_path.endswith(x) for x in C.MODULE_IGNORE_EXTS):
|
||||
continue
|
||||
|
||||
splitname = os.path.splitext(full_name)
|
||||
base_name = splitname[0]
|
||||
internal = path_context.internal
|
||||
try:
|
||||
extension = splitname[1]
|
||||
except IndexError:
|
||||
extension = ''
|
||||
|
||||
# everything downstream expects unicode
|
||||
full_path = to_text(full_path, errors='surrogate_or_strict')
|
||||
# Module found, now enter it into the caches that match this file
|
||||
if base_name not in self._plugin_path_cache['']:
|
||||
self._plugin_path_cache[''][base_name] = PluginPathContext(full_path, internal)
|
||||
|
@ -895,7 +894,7 @@ class PluginLoader:
|
|||
found_in_cache = True
|
||||
|
||||
for i in self._get_paths():
|
||||
all_matches.extend(glob.glob(os.path.join(i, "*.py")))
|
||||
all_matches.extend(glob.glob(to_native(os.path.join(i, "*.py"))))
|
||||
|
||||
loaded_modules = set()
|
||||
for path in sorted(all_matches, key=os.path.basename):
|
||||
|
|
|
@ -67,8 +67,6 @@ class _AnsibleCollectionConfig(type):
|
|||
|
||||
@default_collection.setter
|
||||
def default_collection(cls, value):
|
||||
if cls._default_collection:
|
||||
raise ValueError('default collection {0} has already been configured'.format(value))
|
||||
|
||||
cls._default_collection = value
|
||||
|
||||
|
|
|
@ -40,6 +40,14 @@ except ImportError:
|
|||
_meta_yml_to_dict = None
|
||||
|
||||
|
||||
if not hasattr(__builtins__, 'ModuleNotFoundError'):
|
||||
# this was introduced in Python 3.6
|
||||
ModuleNotFoundError = ImportError
|
||||
|
||||
|
||||
PB_EXTENSIONS = ('.yml', '.yaml')
|
||||
|
||||
|
||||
class _AnsibleCollectionFinder:
|
||||
def __init__(self, paths=None, scan_sys_paths=True):
|
||||
# TODO: accept metadata loader override
|
||||
|
@ -663,7 +671,7 @@ class AnsibleCollectionRef:
|
|||
VALID_REF_TYPES = frozenset(to_text(r) for r in ['action', 'become', 'cache', 'callback', 'cliconf', 'connection',
|
||||
'doc_fragments', 'filter', 'httpapi', 'inventory', 'lookup',
|
||||
'module_utils', 'modules', 'netconf', 'role', 'shell', 'strategy',
|
||||
'terminal', 'test', 'vars'])
|
||||
'terminal', 'test', 'vars', 'playbook'])
|
||||
|
||||
# FIXME: tighten this up to match Python identifier reqs, etc
|
||||
VALID_COLLECTION_NAME_RE = re.compile(to_text(r'^(\w+)\.(\w+)$'))
|
||||
|
@ -708,6 +716,8 @@ class AnsibleCollectionRef:
|
|||
|
||||
if self.ref_type == u'role':
|
||||
package_components.append(u'roles')
|
||||
elif self.ref_type == u'playbook':
|
||||
package_components.append(u'playbooks')
|
||||
else:
|
||||
# we assume it's a plugin
|
||||
package_components += [u'plugins', self.ref_type]
|
||||
|
@ -716,8 +726,8 @@ class AnsibleCollectionRef:
|
|||
package_components.append(self.subdirs)
|
||||
fqcr_components.append(self.subdirs)
|
||||
|
||||
if self.ref_type == u'role':
|
||||
# roles are their own resource
|
||||
if self.ref_type in (u'role', u'playbook'):
|
||||
# playbooks and roles are their own resource
|
||||
package_components.append(self.resource)
|
||||
|
||||
fqcr_components.append(self.resource)
|
||||
|
@ -751,10 +761,17 @@ class AnsibleCollectionRef:
|
|||
|
||||
ref = to_text(ref, errors='strict')
|
||||
ref_type = to_text(ref_type, errors='strict')
|
||||
ext = ''
|
||||
|
||||
resource_splitname = ref.rsplit(u'.', 1)
|
||||
package_remnant = resource_splitname[0]
|
||||
resource = resource_splitname[1]
|
||||
if ref_type == u'playbook' and ref.endswith(PB_EXTENSIONS):
|
||||
resource_splitname = ref.rsplit(u'.', 2)
|
||||
package_remnant = resource_splitname[0]
|
||||
resource = resource_splitname[1]
|
||||
ext = '.' + resource_splitname[2]
|
||||
else:
|
||||
resource_splitname = ref.rsplit(u'.', 1)
|
||||
package_remnant = resource_splitname[0]
|
||||
resource = resource_splitname[1]
|
||||
|
||||
# split the left two components of the collection package name off, anything remaining is plugin-type
|
||||
# specific subdirs to be added back on below the plugin type
|
||||
|
@ -766,7 +783,7 @@ class AnsibleCollectionRef:
|
|||
|
||||
collection_name = u'.'.join(package_splitname[0:2])
|
||||
|
||||
return AnsibleCollectionRef(collection_name, subdirs, resource, ref_type)
|
||||
return AnsibleCollectionRef(collection_name, subdirs, resource + ext, ref_type)
|
||||
|
||||
@staticmethod
|
||||
def try_parse_fqcr(ref, ref_type):
|
||||
|
@ -829,23 +846,55 @@ class AnsibleCollectionRef:
|
|||
return bool(re.match(AnsibleCollectionRef.VALID_COLLECTION_NAME_RE, collection_name))
|
||||
|
||||
|
||||
def _get_collection_role_path(role_name, collection_list=None):
|
||||
acr = AnsibleCollectionRef.try_parse_fqcr(role_name, 'role')
|
||||
def _get_collection_playbook_path(playbook):
|
||||
|
||||
acr = AnsibleCollectionRef.try_parse_fqcr(playbook, u'playbook')
|
||||
if acr:
|
||||
try:
|
||||
# get_collection_path
|
||||
pkg = import_module(acr.n_python_collection_package_name)
|
||||
except (IOError, ModuleNotFoundError) as e:
|
||||
# leaving e as debug target, even though not used in normal code
|
||||
pkg = None
|
||||
|
||||
if pkg:
|
||||
cpath = os.path.join(sys.modules[acr.n_python_collection_package_name].__file__.replace('__synthetic__', 'playbooks'))
|
||||
path = os.path.join(cpath, to_native(acr.resource))
|
||||
if os.path.exists(to_bytes(path)):
|
||||
return acr.resource, path, acr.collection
|
||||
elif not acr.resource.endswith(PB_EXTENSIONS):
|
||||
for ext in PB_EXTENSIONS:
|
||||
path = os.path.join(cpath, to_native(acr.resource + ext))
|
||||
if os.path.exists(to_bytes(path)):
|
||||
return acr.resource, path, acr.collection
|
||||
return None
|
||||
|
||||
|
||||
def _get_collection_role_path(role_name, collection_list=None):
|
||||
return _get_collection_resource_path(role_name, u'role', collection_list)
|
||||
|
||||
|
||||
def _get_collection_resource_path(name, ref_type, collection_list=None):
|
||||
|
||||
if ref_type == u'playbook':
|
||||
# they are handled a bit diff due to 'extension variance' and no collection_list
|
||||
return _get_collection_playbook_path(name)
|
||||
|
||||
acr = AnsibleCollectionRef.try_parse_fqcr(name, ref_type)
|
||||
if acr:
|
||||
# looks like a valid qualified collection ref; skip the collection_list
|
||||
collection_list = [acr.collection]
|
||||
subdirs = acr.subdirs
|
||||
resource = acr.resource
|
||||
elif not collection_list:
|
||||
return None # not a FQ role and no collection search list spec'd, nothing to do
|
||||
return None # not a FQ and no collection search list spec'd, nothing to do
|
||||
else:
|
||||
resource = role_name # treat as unqualified, loop through the collection search list to try and resolve
|
||||
resource = name # treat as unqualified, loop through the collection search list to try and resolve
|
||||
subdirs = ''
|
||||
|
||||
for collection_name in collection_list:
|
||||
try:
|
||||
acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type='role')
|
||||
acr = AnsibleCollectionRef(collection_name=collection_name, subdirs=subdirs, resource=resource, ref_type=ref_type)
|
||||
# FIXME: error handling/logging; need to catch any import failures and move along
|
||||
pkg = import_module(acr.n_python_package_name)
|
||||
|
||||
|
@ -854,7 +903,7 @@ def _get_collection_role_path(role_name, collection_list=None):
|
|||
path = os.path.dirname(to_bytes(sys.modules[acr.n_python_package_name].__file__, errors='surrogate_or_strict'))
|
||||
return resource, to_text(path, errors='surrogate_or_strict'), collection_name
|
||||
|
||||
except IOError:
|
||||
except (IOError, ModuleNotFoundError) as e:
|
||||
continue
|
||||
except Exception as ex:
|
||||
# FIXME: pick out typical import errors first, then error logging
|
||||
|
@ -872,8 +921,8 @@ def _get_collection_name_from_path(path):
|
|||
:return: collection name or None
|
||||
"""
|
||||
|
||||
# FIXME: mess with realpath canonicalization or not?
|
||||
path = to_native(path)
|
||||
# ensure we compare full paths since pkg path will be abspath
|
||||
path = to_native(os.path.abspath(to_bytes(path)))
|
||||
|
||||
path_parts = path.split('/')
|
||||
if path_parts.count('ansible_collections') != 1:
|
||||
|
|
|
@ -5,7 +5,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = """
|
||||
module: fakemodule
|
||||
short_desciptoin: fake module
|
||||
short_desciption: fake module
|
||||
description:
|
||||
- this is a fake module
|
||||
options:
|
||||
|
|
|
@ -11,5 +11,4 @@ OPTIONS (= is mandatory):
|
|||
|
||||
AUTHOR: me
|
||||
|
||||
SHORT_DESCIPTOIN: fake module
|
||||
|
||||
SHORT_DESCIPTION: fake module
|
||||
|
|
|
@ -0,0 +1,2 @@
|
|||
- import_playbook: testns.testcoll.default_collection_playbook.yml
|
||||
- import_playbook: testns.testcoll.default_collection_playbook
|
|
@ -6,6 +6,7 @@ export ANSIBLE_COLLECTIONS_PATH=$PWD/collection_root_user:$PWD/collection_root_s
|
|||
export ANSIBLE_GATHERING=explicit
|
||||
export ANSIBLE_GATHER_SUBSET=minimal
|
||||
export ANSIBLE_HOST_PATTERN_MISMATCH=error
|
||||
export ANSIBLE_COLLECTIONS_ON_ANSIBLE_VERSION_MISMATCH=0
|
||||
|
||||
# FUTURE: just use INVENTORY_PATH as-is once ansible-test sets the right dir
|
||||
ipath=../../$(basename "${INVENTORY_PATH:-../../inventory}")
|
||||
|
@ -75,6 +76,23 @@ echo "--- validating inventory"
|
|||
# test collection inventories
|
||||
ansible-playbook inventory_test.yml -i a.statichost.yml -i redirected.statichost.yml "$@"
|
||||
|
||||
if [[ ${INVENTORY_PATH} != *.winrm ]]; then
|
||||
# base invocation tests
|
||||
ansible-playbook -i "${INVENTORY_PATH}" -v invocation_tests.yml "$@"
|
||||
|
||||
# run playbook from collection, test default again, but with FQCN
|
||||
ansible-playbook -i "${INVENTORY_PATH}" testns.testcoll.default_collection_playbook.yml "$@"
|
||||
|
||||
# run playbook from collection, test default again, but with FQCN and no extension
|
||||
ansible-playbook -i "${INVENTORY_PATH}" testns.testcoll.default_collection_playbook "$@"
|
||||
|
||||
# run playbook that imports from collection
|
||||
ansible-playbook -i "${INVENTORY_PATH}" import_collection_pb.yml "$@"
|
||||
fi
|
||||
|
||||
# test collection inventories
|
||||
ansible-playbook inventory_test.yml -i a.statichost.yml -i redirected.statichost.yml "$@"
|
||||
|
||||
# test adjacent with --playbook-dir
|
||||
export ANSIBLE_COLLECTIONS_PATH=''
|
||||
ANSIBLE_INVENTORY_ANY_UNPARSED_IS_FAILED=1 ansible-inventory --list --export --playbook-dir=. -v "$@"
|
||||
|
|
|
@ -527,8 +527,6 @@ def test_default_collection_config():
|
|||
assert AnsibleCollectionConfig.default_collection is None
|
||||
AnsibleCollectionConfig.default_collection = 'foo.bar'
|
||||
assert AnsibleCollectionConfig.default_collection == 'foo.bar'
|
||||
with pytest.raises(ValueError):
|
||||
AnsibleCollectionConfig.default_collection = 'bar.baz'
|
||||
|
||||
|
||||
def test_default_collection_detection():
|
||||
|
|
Loading…
Reference in a new issue