Replace the inhouse collection dependency resolver with resolvelib
PR #72591 This change: * Adds an artifacts manager that abstracts away extracting the metadata from artifacts, downloading and caching them in a temporary location. * Adds `resolvelib` to direct ansible-core dependencies[0]. * Implements a `resolvelib`-based dependency resolver for `collection` subcommands that replaces the legacy in-house code. This is a dependency resolution library that pip 20.3+ uses by default. It's now integrated for use for the collection dependency resolution in ansible-galaxy CLI. * Refactors of the `ansible-galaxy collection` CLI. In particular, it: - reimplements most of the `download`, `install`, `list` and `verify` subcommands from scratch; - reuses helper bits previously moved out into external modules; - replaces the old in-house resolver with a more clear implementation based on the resolvelib library[0][1][2]. * Adds a multi Galaxy API proxy layer that abstracts accessing the version and dependencies via API or local artifacts manager. * Makes `GalaxyAPI` instances sortable. * Adds string representation methods to `GalaxyAPI`. * Adds dev representation to `GalaxyAPI`. * Removes unnecessary integration and unit tests. * Aligns the tests with the new expectations. * Adds more tests, integration ones in particular. [0]: https://pypi.org/p/resolvelib [1]: https://github.com/sarugaku/resolvelib [2]: https://pradyunsg.me/blog/2020/03/27/pip-resolver-testing Co-Authored-By: Jordan Borean <jborean93@gmail.com> Co-Authored-By: Matt Clay <matt@mystile.com> Co-Authored-By: Sam Doran <sdoran@redhat.com> Co-Authored-By: Sloane Hertel <shertel@redhat.com> Co-Authored-By: Sviatoslav Sydorenko <webknjaz@redhat.com> Signed-Off-By: Sviatoslav Sydorenko <webknjaz@redhat.com>
This commit is contained in:
parent
82b74f7fd7
commit
595413d113
35 changed files with 3255 additions and 1915 deletions
|
@ -0,0 +1,27 @@
|
|||
---
|
||||
breaking_changes:
|
||||
- >-
|
||||
Replaced the in-tree dependency resolver with an external implementation
|
||||
that pip >= 20.3 uses now by default — ``resolvelib``.
|
||||
(https://github.com/ansible/ansible/issues/71784)
|
||||
- >-
|
||||
Made SCM collections be reinstalled regardless of ``--force`` being
|
||||
present.
|
||||
major_changes:
|
||||
- |
|
||||
Declared ``resolvelib >= 0.5.3, < 0.6.0`` a direct dependency of
|
||||
ansible-core. Refs:
|
||||
- https://github.com/sarugaku/resolvelib
|
||||
- https://pypi.org/p/resolvelib
|
||||
- https://pradyunsg.me/blog/2020/03/27/pip-resolver-testing
|
||||
- >-
|
||||
It became possible to install Ansible Collections from local folders and
|
||||
namespaces folder similar to SCM structure with multiple collections.
|
||||
minor_changes:
|
||||
- >-
|
||||
Refactored ``ansible-galaxy collection [download|install|list|verify]``
|
||||
CLI subcommands with the public interface kept intact.
|
||||
- >-
|
||||
The new dependency resolver prefers ``MANIFEST.json`` over ``galaxy.yml``
|
||||
if it exists in the target directory.
|
||||
...
|
|
@ -1,5 +1,5 @@
|
|||
# Copyright: (c) 2013, James Cammarata <jcammarata@ansible.com>
|
||||
# Copyright: (c) 2018, Ansible Project
|
||||
# Copyright: (c) 2018-2021, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
@ -24,7 +24,6 @@ from ansible.galaxy import Galaxy, get_collections_galaxy_meta_info
|
|||
from ansible.galaxy.api import GalaxyAPI
|
||||
from ansible.galaxy.collection import (
|
||||
build_collection,
|
||||
CollectionRequirement,
|
||||
download_collections,
|
||||
find_existing_collections,
|
||||
install_collections,
|
||||
|
@ -33,6 +32,10 @@ from ansible.galaxy.collection import (
|
|||
validate_collection_path,
|
||||
verify_collections
|
||||
)
|
||||
from ansible.galaxy.collection.concrete_artifact_manager import (
|
||||
ConcreteArtifactsManager,
|
||||
)
|
||||
from ansible.galaxy.dependency_resolution.dataclasses import Requirement
|
||||
|
||||
from ansible.galaxy.role import GalaxyRole
|
||||
from ansible.galaxy.token import BasicAuthToken, GalaxyToken, KeycloakToken, NoTokenSentinel
|
||||
|
@ -52,6 +55,26 @@ display = Display()
|
|||
urlparse = six.moves.urllib.parse.urlparse
|
||||
|
||||
|
||||
def with_collection_artifacts_manager(wrapped_method):
|
||||
"""Inject an artifacts manager if not passed explicitly.
|
||||
|
||||
This decorator constructs a ConcreteArtifactsManager and maintains
|
||||
the related temporary directory auto-cleanup around the target
|
||||
method invocation.
|
||||
"""
|
||||
def method_wrapper(*args, **kwargs):
|
||||
if 'artifacts_manager' in kwargs:
|
||||
return wrapped_method(*args, **kwargs)
|
||||
|
||||
with ConcreteArtifactsManager.under_tmpdir(
|
||||
C.DEFAULT_LOCAL_TMP,
|
||||
validate_certs=not context.CLIARGS['ignore_certs'],
|
||||
) as concrete_artifact_cm:
|
||||
kwargs['artifacts_manager'] = concrete_artifact_cm
|
||||
return wrapped_method(*args, **kwargs)
|
||||
return method_wrapper
|
||||
|
||||
|
||||
def _display_header(path, h1, h2, w1=10, w2=7):
|
||||
display.display('\n# {0}\n{1:{cwidth}} {2:{vwidth}}\n{3} {4}\n'.format(
|
||||
path,
|
||||
|
@ -76,20 +99,19 @@ def _display_role(gr):
|
|||
|
||||
def _display_collection(collection, cwidth=10, vwidth=7, min_cwidth=10, min_vwidth=7):
|
||||
display.display('{fqcn:{cwidth}} {version:{vwidth}}'.format(
|
||||
fqcn=to_text(collection),
|
||||
version=collection.latest_version,
|
||||
fqcn=to_text(collection.fqcn),
|
||||
version=collection.ver,
|
||||
cwidth=max(cwidth, min_cwidth), # Make sure the width isn't smaller than the header
|
||||
vwidth=max(vwidth, min_vwidth)
|
||||
))
|
||||
|
||||
|
||||
def _get_collection_widths(collections):
|
||||
if is_iterable(collections):
|
||||
fqcn_set = set(to_text(c) for c in collections)
|
||||
version_set = set(to_text(c.latest_version) for c in collections)
|
||||
else:
|
||||
fqcn_set = set([to_text(collections)])
|
||||
version_set = set([collections.latest_version])
|
||||
if not is_iterable(collections):
|
||||
collections = (collections, )
|
||||
|
||||
fqcn_set = {to_text(c.fqcn) for c in collections}
|
||||
version_set = {to_text(c.ver) for c in collections}
|
||||
|
||||
fqcn_length = len(max(fqcn_set, key=len))
|
||||
version_length = len(max(version_set, key=len))
|
||||
|
@ -447,7 +469,7 @@ class GalaxyCLI(CLI):
|
|||
|
||||
# Need to filter out empty strings or non truthy values as an empty server list env var is equal to [''].
|
||||
server_list = [s for s in C.GALAXY_SERVER_LIST or [] if s]
|
||||
for server_key in server_list:
|
||||
for server_priority, server_key in enumerate(server_list, start=1):
|
||||
# Config definitions are looked up dynamically based on the C.GALAXY_SERVER_LIST entry. We look up the
|
||||
# section [galaxy_server.<server>] for the values url, username, password, and token.
|
||||
config_dict = dict((k, server_config_def(server_key, k, req)) for k, req in server_def)
|
||||
|
@ -486,7 +508,11 @@ class GalaxyCLI(CLI):
|
|||
server_options['token'] = GalaxyToken(token=token_val)
|
||||
|
||||
server_options.update(galaxy_options)
|
||||
config_servers.append(GalaxyAPI(self.galaxy, server_key, **server_options))
|
||||
config_servers.append(GalaxyAPI(
|
||||
self.galaxy, server_key,
|
||||
priority=server_priority,
|
||||
**server_options
|
||||
))
|
||||
|
||||
cmd_server = context.CLIARGS['api_server']
|
||||
cmd_token = GalaxyToken(token=context.CLIARGS['api_key'])
|
||||
|
@ -497,15 +523,21 @@ class GalaxyCLI(CLI):
|
|||
if config_server:
|
||||
self.api_servers.append(config_server)
|
||||
else:
|
||||
self.api_servers.append(GalaxyAPI(self.galaxy, 'cmd_arg', cmd_server, token=cmd_token,
|
||||
**galaxy_options))
|
||||
self.api_servers.append(GalaxyAPI(
|
||||
self.galaxy, 'cmd_arg', cmd_server, token=cmd_token,
|
||||
priority=len(config_servers) + 1,
|
||||
**galaxy_options
|
||||
))
|
||||
else:
|
||||
self.api_servers = config_servers
|
||||
|
||||
# Default to C.GALAXY_SERVER if no servers were defined
|
||||
if len(self.api_servers) == 0:
|
||||
self.api_servers.append(GalaxyAPI(self.galaxy, 'default', C.GALAXY_SERVER, token=cmd_token,
|
||||
**galaxy_options))
|
||||
self.api_servers.append(GalaxyAPI(
|
||||
self.galaxy, 'default', C.GALAXY_SERVER, token=cmd_token,
|
||||
priority=0,
|
||||
**galaxy_options
|
||||
))
|
||||
|
||||
context.CLIARGS['func']()
|
||||
|
||||
|
@ -530,7 +562,7 @@ class GalaxyCLI(CLI):
|
|||
def _get_default_collection_path(self):
|
||||
return C.COLLECTIONS_PATHS[0]
|
||||
|
||||
def _parse_requirements_file(self, requirements_file, allow_old_format=True):
|
||||
def _parse_requirements_file(self, requirements_file, allow_old_format=True, artifacts_manager=None):
|
||||
"""
|
||||
Parses an Ansible requirement.yml file and returns all the roles and/or collections defined in it. There are 2
|
||||
requirements file format:
|
||||
|
@ -556,6 +588,7 @@ class GalaxyCLI(CLI):
|
|||
|
||||
:param requirements_file: The path to the requirements file.
|
||||
:param allow_old_format: Will fail if a v1 requirements file is found and this is set to False.
|
||||
:param artifacts_manager: Artifacts manager.
|
||||
:return: a dict containing roles and collections to found in the requirements file.
|
||||
"""
|
||||
requirements = {
|
||||
|
@ -619,33 +652,48 @@ class GalaxyCLI(CLI):
|
|||
for role_req in file_requirements.get('roles') or []:
|
||||
requirements['roles'] += parse_role_req(role_req)
|
||||
|
||||
for collection_req in file_requirements.get('collections') or []:
|
||||
if isinstance(collection_req, dict):
|
||||
req_name = collection_req.get('name', None)
|
||||
if req_name is None:
|
||||
raise AnsibleError("Collections requirement entry should contain the key name.")
|
||||
|
||||
req_type = collection_req.get('type')
|
||||
if req_type not in ('file', 'galaxy', 'git', 'url', None):
|
||||
raise AnsibleError("The collection requirement entry key 'type' must be one of file, galaxy, git, or url.")
|
||||
|
||||
req_version = collection_req.get('version', '*')
|
||||
req_source = collection_req.get('source', None)
|
||||
if req_source:
|
||||
# Try and match up the requirement source with our list of Galaxy API servers defined in the
|
||||
# config, otherwise create a server with that URL without any auth.
|
||||
req_source = next(iter([a for a in self.api_servers if req_source in [a.name, a.api_server]]),
|
||||
GalaxyAPI(self.galaxy,
|
||||
"explicit_requirement_%s" % req_name,
|
||||
req_source,
|
||||
validate_certs=not context.CLIARGS['ignore_certs']))
|
||||
|
||||
requirements['collections'].append((req_name, req_version, req_source, req_type))
|
||||
else:
|
||||
requirements['collections'].append((collection_req, '*', None, None))
|
||||
requirements['collections'] = [
|
||||
Requirement.from_requirement_dict(
|
||||
self._init_coll_req_dict(collection_req),
|
||||
artifacts_manager,
|
||||
)
|
||||
for collection_req in file_requirements.get('collections') or []
|
||||
]
|
||||
|
||||
return requirements
|
||||
|
||||
def _init_coll_req_dict(self, coll_req):
|
||||
if not isinstance(coll_req, dict):
|
||||
# Assume it's a string:
|
||||
return {'name': coll_req}
|
||||
|
||||
if (
|
||||
'name' not in coll_req or
|
||||
not coll_req.get('source') or
|
||||
coll_req.get('type', 'galaxy') != 'galaxy'
|
||||
):
|
||||
return coll_req
|
||||
|
||||
# Try and match up the requirement source with our list of Galaxy API
|
||||
# servers defined in the config, otherwise create a server with that
|
||||
# URL without any auth.
|
||||
coll_req['source'] = next(
|
||||
iter(
|
||||
srvr for srvr in self.api_servers
|
||||
if coll_req['source'] in {srvr.name, srvr.api_server}
|
||||
),
|
||||
GalaxyAPI(
|
||||
self.galaxy,
|
||||
'explicit_requirement_{name!s}'.format(
|
||||
name=coll_req['name'],
|
||||
),
|
||||
coll_req['source'],
|
||||
validate_certs=not context.CLIARGS['ignore_certs'],
|
||||
),
|
||||
)
|
||||
|
||||
return coll_req
|
||||
|
||||
@staticmethod
|
||||
def exit_without_ignore(rc=1):
|
||||
"""
|
||||
|
@ -733,26 +781,29 @@ class GalaxyCLI(CLI):
|
|||
|
||||
return meta_value
|
||||
|
||||
def _require_one_of_collections_requirements(self, collections, requirements_file):
|
||||
def _require_one_of_collections_requirements(
|
||||
self, collections, requirements_file,
|
||||
artifacts_manager=None,
|
||||
):
|
||||
if collections and requirements_file:
|
||||
raise AnsibleError("The positional collection_name arg and --requirements-file are mutually exclusive.")
|
||||
elif not collections and not requirements_file:
|
||||
raise AnsibleError("You must specify a collection name or a requirements file.")
|
||||
elif requirements_file:
|
||||
requirements_file = GalaxyCLI._resolve_path(requirements_file)
|
||||
requirements = self._parse_requirements_file(requirements_file, allow_old_format=False)
|
||||
requirements = self._parse_requirements_file(
|
||||
requirements_file,
|
||||
allow_old_format=False,
|
||||
artifacts_manager=artifacts_manager,
|
||||
)
|
||||
else:
|
||||
requirements = {'collections': [], 'roles': []}
|
||||
for collection_input in collections:
|
||||
requirement = None
|
||||
if os.path.isfile(to_bytes(collection_input, errors='surrogate_or_strict')) or \
|
||||
urlparse(collection_input).scheme.lower() in ['http', 'https'] or \
|
||||
collection_input.startswith(('git+', 'git@')):
|
||||
# Arg is a file path or URL to a collection
|
||||
name = collection_input
|
||||
else:
|
||||
name, dummy, requirement = collection_input.partition(':')
|
||||
requirements['collections'].append((name, requirement or '*', None, None))
|
||||
requirements = {
|
||||
'collections': [
|
||||
Requirement.from_string(coll_input, artifacts_manager)
|
||||
for coll_input in collections
|
||||
],
|
||||
'roles': [],
|
||||
}
|
||||
return requirements
|
||||
|
||||
############################
|
||||
|
@ -792,27 +843,37 @@ class GalaxyCLI(CLI):
|
|||
|
||||
for collection_path in context.CLIARGS['args']:
|
||||
collection_path = GalaxyCLI._resolve_path(collection_path)
|
||||
build_collection(collection_path, output_path, force)
|
||||
build_collection(
|
||||
to_text(collection_path, errors='surrogate_or_strict'),
|
||||
to_text(output_path, errors='surrogate_or_strict'),
|
||||
force,
|
||||
)
|
||||
|
||||
def execute_download(self):
|
||||
@with_collection_artifacts_manager
|
||||
def execute_download(self, artifacts_manager=None):
|
||||
collections = context.CLIARGS['args']
|
||||
no_deps = context.CLIARGS['no_deps']
|
||||
download_path = context.CLIARGS['download_path']
|
||||
ignore_certs = context.CLIARGS['ignore_certs']
|
||||
|
||||
requirements_file = context.CLIARGS['requirements']
|
||||
if requirements_file:
|
||||
requirements_file = GalaxyCLI._resolve_path(requirements_file)
|
||||
|
||||
requirements = self._require_one_of_collections_requirements(collections, requirements_file)['collections']
|
||||
requirements = self._require_one_of_collections_requirements(
|
||||
collections, requirements_file,
|
||||
artifacts_manager=artifacts_manager,
|
||||
)['collections']
|
||||
|
||||
download_path = GalaxyCLI._resolve_path(download_path)
|
||||
b_download_path = to_bytes(download_path, errors='surrogate_or_strict')
|
||||
if not os.path.exists(b_download_path):
|
||||
os.makedirs(b_download_path)
|
||||
|
||||
download_collections(requirements, download_path, self.api_servers, (not ignore_certs), no_deps,
|
||||
context.CLIARGS['allow_pre_release'])
|
||||
download_collections(
|
||||
requirements, download_path, self.api_servers, no_deps,
|
||||
context.CLIARGS['allow_pre_release'],
|
||||
artifacts_manager=artifacts_manager,
|
||||
)
|
||||
|
||||
return 0
|
||||
|
||||
|
@ -1002,29 +1063,38 @@ class GalaxyCLI(CLI):
|
|||
|
||||
self.pager(data)
|
||||
|
||||
def execute_verify(self):
|
||||
@with_collection_artifacts_manager
|
||||
def execute_verify(self, artifacts_manager=None):
|
||||
|
||||
collections = context.CLIARGS['args']
|
||||
search_paths = context.CLIARGS['collections_path']
|
||||
ignore_certs = context.CLIARGS['ignore_certs']
|
||||
ignore_errors = context.CLIARGS['ignore_errors']
|
||||
requirements_file = context.CLIARGS['requirements']
|
||||
|
||||
requirements = self._require_one_of_collections_requirements(collections, requirements_file)['collections']
|
||||
requirements = self._require_one_of_collections_requirements(
|
||||
collections, requirements_file,
|
||||
artifacts_manager=artifacts_manager,
|
||||
)['collections']
|
||||
|
||||
resolved_paths = [validate_collection_path(GalaxyCLI._resolve_path(path)) for path in search_paths]
|
||||
|
||||
verify_collections(requirements, resolved_paths, self.api_servers, (not ignore_certs), ignore_errors,
|
||||
allow_pre_release=True)
|
||||
verify_collections(
|
||||
requirements, resolved_paths,
|
||||
self.api_servers, ignore_errors,
|
||||
artifacts_manager=artifacts_manager,
|
||||
)
|
||||
|
||||
return 0
|
||||
|
||||
def execute_install(self):
|
||||
@with_collection_artifacts_manager
|
||||
def execute_install(self, artifacts_manager=None):
|
||||
"""
|
||||
Install one or more roles(``ansible-galaxy role install``), or one or more collections(``ansible-galaxy collection install``).
|
||||
You can pass in a list (roles or collections) or use the file
|
||||
option listed below (these are mutually exclusive). If you pass in a list, it
|
||||
can be a name (which will be downloaded via the galaxy API and github), or it can be a local tar archive file.
|
||||
|
||||
:param artifacts_manager: Artifacts manager.
|
||||
"""
|
||||
install_items = context.CLIARGS['args']
|
||||
requirements_file = context.CLIARGS['requirements']
|
||||
|
@ -1042,7 +1112,10 @@ class GalaxyCLI(CLI):
|
|||
role_requirements = []
|
||||
if context.CLIARGS['type'] == 'collection':
|
||||
collection_path = GalaxyCLI._resolve_path(context.CLIARGS['collections_path'])
|
||||
requirements = self._require_one_of_collections_requirements(install_items, requirements_file)
|
||||
requirements = self._require_one_of_collections_requirements(
|
||||
install_items, requirements_file,
|
||||
artifacts_manager=artifacts_manager,
|
||||
)
|
||||
|
||||
collection_requirements = requirements['collections']
|
||||
if requirements['roles']:
|
||||
|
@ -1055,7 +1128,10 @@ class GalaxyCLI(CLI):
|
|||
if not (requirements_file.endswith('.yaml') or requirements_file.endswith('.yml')):
|
||||
raise AnsibleError("Invalid role requirements file, it must end with a .yml or .yaml extension")
|
||||
|
||||
requirements = self._parse_requirements_file(requirements_file)
|
||||
requirements = self._parse_requirements_file(
|
||||
requirements_file,
|
||||
artifacts_manager=artifacts_manager,
|
||||
)
|
||||
role_requirements = requirements['roles']
|
||||
|
||||
# We can only install collections and roles at the same time if the type wasn't specified and the -p
|
||||
|
@ -1090,11 +1166,15 @@ class GalaxyCLI(CLI):
|
|||
display.display("Starting galaxy collection install process")
|
||||
# Collections can technically be installed even when ansible-galaxy is in role mode so we need to pass in
|
||||
# the install path as context.CLIARGS['collections_path'] won't be set (default is calculated above).
|
||||
self._execute_install_collection(collection_requirements, collection_path)
|
||||
self._execute_install_collection(
|
||||
collection_requirements, collection_path,
|
||||
artifacts_manager=artifacts_manager,
|
||||
)
|
||||
|
||||
def _execute_install_collection(self, requirements, path):
|
||||
def _execute_install_collection(
|
||||
self, requirements, path, artifacts_manager,
|
||||
):
|
||||
force = context.CLIARGS['force']
|
||||
ignore_certs = context.CLIARGS['ignore_certs']
|
||||
ignore_errors = context.CLIARGS['ignore_errors']
|
||||
no_deps = context.CLIARGS['no_deps']
|
||||
force_with_deps = context.CLIARGS['force_with_deps']
|
||||
|
@ -1111,8 +1191,12 @@ class GalaxyCLI(CLI):
|
|||
if not os.path.exists(b_output_path):
|
||||
os.makedirs(b_output_path)
|
||||
|
||||
install_collections(requirements, output_path, self.api_servers, (not ignore_certs), ignore_errors,
|
||||
no_deps, force, force_with_deps, allow_pre_release=allow_pre_release)
|
||||
install_collections(
|
||||
requirements, output_path, self.api_servers, ignore_errors,
|
||||
no_deps, force, force_with_deps,
|
||||
allow_pre_release=allow_pre_release,
|
||||
artifacts_manager=artifacts_manager,
|
||||
)
|
||||
|
||||
return 0
|
||||
|
||||
|
@ -1283,9 +1367,12 @@ class GalaxyCLI(CLI):
|
|||
|
||||
return 0
|
||||
|
||||
def execute_list_collection(self):
|
||||
@with_collection_artifacts_manager
|
||||
def execute_list_collection(self, artifacts_manager=None):
|
||||
"""
|
||||
List all collections installed on the local system
|
||||
|
||||
:param artifacts_manager: Artifacts manager.
|
||||
"""
|
||||
|
||||
collections_search_paths = set(context.CLIARGS['collections_path'])
|
||||
|
@ -1328,8 +1415,16 @@ class GalaxyCLI(CLI):
|
|||
continue
|
||||
|
||||
collection_found = True
|
||||
collection = CollectionRequirement.from_path(b_collection_path, False, fallback_metadata=True)
|
||||
fqcn_width, version_width = _get_collection_widths(collection)
|
||||
|
||||
try:
|
||||
collection = Requirement.from_dir_path_as_unknown(
|
||||
b_collection_path,
|
||||
artifacts_manager,
|
||||
)
|
||||
except ValueError as val_err:
|
||||
six.raise_from(AnsibleError(val_err), val_err)
|
||||
|
||||
fqcn_width, version_width = _get_collection_widths([collection])
|
||||
|
||||
_display_header(collection_path, 'Collection', 'Version', fqcn_width, version_width)
|
||||
_display_collection(collection, fqcn_width, version_width)
|
||||
|
@ -1339,7 +1434,9 @@ class GalaxyCLI(CLI):
|
|||
collection_path = validate_collection_path(path)
|
||||
if os.path.isdir(collection_path):
|
||||
display.vvv("Searching {0} for collections".format(collection_path))
|
||||
collections = find_existing_collections(collection_path, fallback_metadata=True)
|
||||
collections = list(find_existing_collections(
|
||||
collection_path, artifacts_manager,
|
||||
))
|
||||
else:
|
||||
# There was no 'ansible_collections/' directory in the path, so there
|
||||
# or no collections here.
|
||||
|
@ -1355,8 +1452,7 @@ class GalaxyCLI(CLI):
|
|||
_display_header(collection_path, 'Collection', 'Version', fqcn_width, version_width)
|
||||
|
||||
# Sort collections by the namespace and name
|
||||
collections.sort(key=to_text)
|
||||
for collection in collections:
|
||||
for collection in sorted(collections, key=to_text):
|
||||
_display_collection(collection, fqcn_width, version_width)
|
||||
|
||||
# Do not warn if the specific collection was found in any of the search paths
|
||||
|
|
|
@ -7,6 +7,7 @@ __metaclass__ = type
|
|||
|
||||
import collections
|
||||
import datetime
|
||||
import functools
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
|
@ -233,11 +234,17 @@ class CollectionVersionMetadata:
|
|||
self.dependencies = dependencies
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
class GalaxyAPI:
|
||||
""" This class is meant to be used as a API client for an Ansible Galaxy server """
|
||||
|
||||
def __init__(self, galaxy, name, url, username=None, password=None, token=None, validate_certs=True,
|
||||
available_api_versions=None, clear_response_cache=False, no_cache=True):
|
||||
def __init__(
|
||||
self, galaxy, name, url,
|
||||
username=None, password=None, token=None, validate_certs=True,
|
||||
available_api_versions=None,
|
||||
clear_response_cache=False, no_cache=True,
|
||||
priority=float('inf'),
|
||||
):
|
||||
self.galaxy = galaxy
|
||||
self.name = name
|
||||
self.username = username
|
||||
|
@ -246,6 +253,7 @@ class GalaxyAPI:
|
|||
self.api_server = url
|
||||
self.validate_certs = validate_certs
|
||||
self._available_api_versions = available_api_versions or {}
|
||||
self._priority = priority
|
||||
|
||||
b_cache_dir = to_bytes(C.config.get_config_value('GALAXY_CACHE_DIR'), errors='surrogate_or_strict')
|
||||
makedirs_safe(b_cache_dir, mode=0o700)
|
||||
|
@ -263,6 +271,38 @@ class GalaxyAPI:
|
|||
|
||||
display.debug('Validate TLS certificates for %s: %s' % (self.api_server, self.validate_certs))
|
||||
|
||||
def __str__(self):
|
||||
# type: (GalaxyAPI) -> str
|
||||
"""Render GalaxyAPI as a native string representation."""
|
||||
return to_native(self.name)
|
||||
|
||||
def __unicode__(self):
|
||||
# type: (GalaxyAPI) -> unicode
|
||||
"""Render GalaxyAPI as a unicode/text string representation."""
|
||||
return to_text(self.name)
|
||||
|
||||
def __repr__(self):
|
||||
# type: (GalaxyAPI) -> str
|
||||
"""Render GalaxyAPI as an inspectable string representation."""
|
||||
return (
|
||||
'<{instance!s} "{name!s}" @ {url!s} with priority {priority!s}>'.
|
||||
format(
|
||||
instance=self, name=self.name,
|
||||
priority=self._priority, url=self.api_server,
|
||||
)
|
||||
)
|
||||
|
||||
def __lt__(self, other_galaxy_api):
|
||||
# type: (GalaxyAPI, GalaxyAPI) -> Union[bool, 'NotImplemented']
|
||||
"""Return whether the instance priority is higher than other."""
|
||||
if not isinstance(other_galaxy_api, self.__class__):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self._priority > other_galaxy_api._priority or
|
||||
self.name < self.name
|
||||
)
|
||||
|
||||
@property
|
||||
@g_connect(['v1', 'v2', 'v3'])
|
||||
def available_api_versions(self):
|
||||
|
|
File diff suppressed because it is too large
Load diff
646
lib/ansible/galaxy/collection/concrete_artifact_manager.py
Normal file
646
lib/ansible/galaxy/collection/concrete_artifact_manager.py
Normal file
|
@ -0,0 +1,646 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright: (c) 2020-2021, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
"""Concrete collection candidate management helper module."""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
import os
|
||||
import tarfile
|
||||
import subprocess
|
||||
from contextlib import contextmanager
|
||||
from hashlib import sha256
|
||||
from shutil import rmtree
|
||||
from tempfile import mkdtemp
|
||||
|
||||
try:
|
||||
from typing import TYPE_CHECKING
|
||||
except ImportError:
|
||||
TYPE_CHECKING = False
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import (
|
||||
Any, # FIXME: !!!111
|
||||
BinaryIO, Dict, IO,
|
||||
Iterator, List, Optional,
|
||||
Set, Tuple, Type, Union,
|
||||
)
|
||||
|
||||
from ansible.galaxy.dependency_resolution.dataclasses import (
|
||||
Candidate, Requirement,
|
||||
)
|
||||
from ansible.galaxy.token import GalaxyToken
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.galaxy import get_collections_galaxy_meta_info
|
||||
from ansible.galaxy.dependency_resolution.dataclasses import _GALAXY_YAML
|
||||
from ansible.galaxy.user_agent import user_agent
|
||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.six.moves.urllib.error import URLError
|
||||
from ansible.module_utils.six.moves.urllib.parse import urldefrag
|
||||
from ansible.module_utils.six import raise_from
|
||||
from ansible.module_utils.urls import open_url
|
||||
from ansible.utils.display import Display
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
class ConcreteArtifactsManager:
|
||||
"""Manager for on-disk collection artifacts.
|
||||
|
||||
It is responsible for:
|
||||
* downloading remote collections from Galaxy-compatible servers and
|
||||
direct links to tarballs or SCM repositories
|
||||
* keeping track of local ones
|
||||
* keeping track of Galaxy API tokens for downloads from Galaxy'ish
|
||||
as well as the artifact hashes
|
||||
* caching all of above
|
||||
* retrieving the metadata out of the downloaded artifacts
|
||||
"""
|
||||
|
||||
def __init__(self, b_working_directory, validate_certs=True):
|
||||
# type: (bytes, bool) -> None
|
||||
"""Initialize ConcreteArtifactsManager caches and costraints."""
|
||||
self._validate_certs = validate_certs # type: bool
|
||||
self._artifact_cache = {} # type: Dict[bytes, bytes]
|
||||
self._galaxy_artifact_cache = {} # type: Dict[Union[Candidate, Requirement], bytes]
|
||||
self._artifact_meta_cache = {} # type: Dict[bytes, Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]]
|
||||
self._galaxy_collection_cache = {} # type: Dict[Union[Candidate, Requirement], Tuple[str, str, GalaxyToken]]
|
||||
self._b_working_directory = b_working_directory # type: bytes
|
||||
|
||||
def get_galaxy_artifact_path(self, collection):
|
||||
# type: (Union[Candidate, Requirement]) -> bytes
|
||||
"""Given a Galaxy-stored collection, return a cached path.
|
||||
|
||||
If it's not yet on disk, this method downloads the artifact first.
|
||||
"""
|
||||
try:
|
||||
return self._galaxy_artifact_cache[collection]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
url, sha256_hash, token = self._galaxy_collection_cache[collection]
|
||||
except KeyError as key_err:
|
||||
raise_from(
|
||||
RuntimeError(
|
||||
'The is no known source for {coll!s}'.
|
||||
format(coll=collection),
|
||||
),
|
||||
key_err,
|
||||
)
|
||||
|
||||
display.vvvv(
|
||||
"Fetching a collection tarball for '{collection!s}' from "
|
||||
'Ansible Galaxy'.format(collection=collection),
|
||||
)
|
||||
|
||||
try:
|
||||
b_artifact_path = _download_file(
|
||||
url,
|
||||
self._b_working_directory,
|
||||
expected_hash=sha256_hash,
|
||||
validate_certs=self._validate_certs,
|
||||
token=token,
|
||||
) # type: bytes
|
||||
except URLError as err:
|
||||
raise_from(
|
||||
AnsibleError(
|
||||
'Failed to download collection tar '
|
||||
"from '{coll_src!s}': {download_err!s}".
|
||||
format(
|
||||
coll_src=to_native(collection.src),
|
||||
download_err=to_native(err),
|
||||
),
|
||||
),
|
||||
err,
|
||||
)
|
||||
else:
|
||||
display.vvv(
|
||||
"Collection '{coll!s}' obtained from "
|
||||
'server {server!s} {url!s}'.format(
|
||||
coll=collection, server=collection.src or 'Galaxy',
|
||||
url=collection.src.api_server if collection.src is not None
|
||||
else '',
|
||||
)
|
||||
)
|
||||
|
||||
self._galaxy_artifact_cache[collection] = b_artifact_path
|
||||
return b_artifact_path
|
||||
|
||||
def get_artifact_path(self, collection):
|
||||
# type: (Union[Candidate, Requirement]) -> bytes
|
||||
"""Given a concrete collection pointer, return a cached path.
|
||||
|
||||
If it's not yet on disk, this method downloads the artifact first.
|
||||
"""
|
||||
try:
|
||||
return self._artifact_cache[collection.src]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# NOTE: SCM needs to be special-cased as it may contain either
|
||||
# NOTE: one collection in its root, or a number of top-level
|
||||
# NOTE: collection directories instead.
|
||||
# NOTE: The idea is to store the SCM collection as unpacked
|
||||
# NOTE: directory structure under the temporary location and use
|
||||
# NOTE: a "virtual" collection that has pinned requirements on
|
||||
# NOTE: the directories under that SCM checkout that correspond
|
||||
# NOTE: to collections.
|
||||
# NOTE: This brings us to the idea that we need two separate
|
||||
# NOTE: virtual Requirement/Candidate types --
|
||||
# NOTE: (single) dir + (multidir) subdirs
|
||||
if collection.is_url:
|
||||
display.vvvv(
|
||||
"Collection requirement '{collection!s}' is a URL "
|
||||
'to a tar artifact'.format(collection=collection.fqcn),
|
||||
)
|
||||
try:
|
||||
b_artifact_path = _download_file(
|
||||
collection.src,
|
||||
self._b_working_directory,
|
||||
expected_hash=None, # NOTE: URLs don't support checksums
|
||||
validate_certs=self._validate_certs,
|
||||
)
|
||||
except URLError as err:
|
||||
raise_from(
|
||||
AnsibleError(
|
||||
'Failed to download collection tar '
|
||||
"from '{coll_src!s}': {download_err!s}".
|
||||
format(
|
||||
coll_src=to_native(collection.src),
|
||||
download_err=to_native(err),
|
||||
),
|
||||
),
|
||||
err,
|
||||
)
|
||||
elif collection.is_scm:
|
||||
b_artifact_path = _extract_collection_from_git(
|
||||
collection.src,
|
||||
collection.ver,
|
||||
self._b_working_directory,
|
||||
)
|
||||
elif collection.is_file or collection.is_dir or collection.is_subdirs:
|
||||
b_artifact_path = to_bytes(collection.src)
|
||||
else:
|
||||
# NOTE: This may happen `if collection.is_online_index_pointer`
|
||||
raise RuntimeError(
|
||||
'The artifact is of an unexpected type {art_type!s}'.
|
||||
format(art_type=collection.type)
|
||||
)
|
||||
|
||||
self._artifact_cache[collection.src] = b_artifact_path
|
||||
return b_artifact_path
|
||||
|
||||
def _get_direct_collection_namespace(self, collection):
|
||||
# type: (Candidate) -> Optional[str]
|
||||
return self.get_direct_collection_meta(collection)['namespace'] # type: ignore[return-value]
|
||||
|
||||
def _get_direct_collection_name(self, collection):
|
||||
# type: (Candidate) -> Optional[str]
|
||||
return self.get_direct_collection_meta(collection)['name'] # type: ignore[return-value]
|
||||
|
||||
def get_direct_collection_fqcn(self, collection):
|
||||
# type: (Candidate) -> Optional[str]
|
||||
"""Extract FQCN from the given on-disk collection artifact.
|
||||
|
||||
If the collection is virtual, ``None`` is returned instead
|
||||
of a string.
|
||||
"""
|
||||
if collection.is_virtual:
|
||||
# NOTE: should it be something like "<virtual>"?
|
||||
return None
|
||||
|
||||
return '.'.join(( # type: ignore[type-var]
|
||||
self._get_direct_collection_namespace(collection), # type: ignore[arg-type]
|
||||
self._get_direct_collection_name(collection),
|
||||
))
|
||||
|
||||
def get_direct_collection_version(self, collection):
|
||||
# type: (Union[Candidate, Requirement]) -> str
|
||||
"""Extract version from the given on-disk collection artifact."""
|
||||
return self.get_direct_collection_meta(collection)['version'] # type: ignore[return-value]
|
||||
|
||||
def get_direct_collection_dependencies(self, collection):
|
||||
# type: (Union[Candidate, Requirement]) -> Dict[str, str]
|
||||
"""Extract deps from the given on-disk collection artifact."""
|
||||
return self.get_direct_collection_meta(collection)['dependencies'] # type: ignore[return-value]
|
||||
|
||||
def get_direct_collection_meta(self, collection):
|
||||
# type: (Union[Candidate, Requirement]) -> Dict[str, Optional[Union[str, Dict[str, str], List[str]]]]
|
||||
"""Extract meta from the given on-disk collection artifact."""
|
||||
try: # FIXME: use unique collection identifier as a cache key?
|
||||
return self._artifact_meta_cache[collection.src]
|
||||
except KeyError:
|
||||
b_artifact_path = self.get_artifact_path(collection)
|
||||
|
||||
if collection.is_url or collection.is_file:
|
||||
collection_meta = _get_meta_from_tar(b_artifact_path)
|
||||
elif collection.is_dir: # should we just build a coll instead?
|
||||
# FIXME: what if there's subdirs?
|
||||
try:
|
||||
collection_meta = _get_meta_from_dir(b_artifact_path)
|
||||
except LookupError as lookup_err:
|
||||
raise_from(
|
||||
AnsibleError(
|
||||
'Failed to find the collection dir deps: {err!s}'.
|
||||
format(err=to_native(lookup_err)),
|
||||
),
|
||||
lookup_err,
|
||||
)
|
||||
elif collection.is_scm:
|
||||
collection_meta = {
|
||||
'name': None,
|
||||
'namespace': None,
|
||||
'dependencies': {to_native(b_artifact_path): '*'},
|
||||
'version': '*',
|
||||
}
|
||||
elif collection.is_subdirs:
|
||||
collection_meta = {
|
||||
'name': None,
|
||||
'namespace': None,
|
||||
# NOTE: Dropping b_artifact_path since it's based on src anyway
|
||||
'dependencies': dict.fromkeys(
|
||||
map(to_native, collection.namespace_collection_paths),
|
||||
'*',
|
||||
),
|
||||
'version': '*',
|
||||
}
|
||||
else:
|
||||
raise RuntimeError
|
||||
|
||||
self._artifact_meta_cache[collection.src] = collection_meta
|
||||
return collection_meta
|
||||
|
||||
def save_collection_source(self, collection, url, sha256_hash, token):
|
||||
# type: (Candidate, str, str, GalaxyToken) -> None
|
||||
"""Store collection URL, SHA256 hash and Galaxy API token.
|
||||
|
||||
This is a hook that is supposed to be called before attempting to
|
||||
download Galaxy-based collections with ``get_galaxy_artifact_path()``.
|
||||
"""
|
||||
self._galaxy_collection_cache[collection] = url, sha256_hash, token
|
||||
|
||||
@classmethod
|
||||
@contextmanager
|
||||
def under_tmpdir(
|
||||
cls, # type: Type[ConcreteArtifactsManager]
|
||||
temp_dir_base, # type: str
|
||||
validate_certs=True, # type: bool
|
||||
): # type: (...) -> Iterator[ConcreteArtifactsManager]
|
||||
"""Custom ConcreteArtifactsManager constructor with temp dir.
|
||||
|
||||
This method returns a context manager that allocates and cleans
|
||||
up a temporary directory for caching the collection artifacts
|
||||
during the dependency resolution process.
|
||||
"""
|
||||
# NOTE: Can't use `with tempfile.TemporaryDirectory:`
|
||||
# NOTE: because it's not in Python 2 stdlib.
|
||||
temp_path = mkdtemp(
|
||||
dir=to_bytes(temp_dir_base, errors='surrogate_or_strict'),
|
||||
)
|
||||
b_temp_path = to_bytes(temp_path, errors='surrogate_or_strict')
|
||||
try:
|
||||
yield cls(b_temp_path, validate_certs)
|
||||
finally:
|
||||
rmtree(b_temp_path)
|
||||
|
||||
|
||||
def parse_scm(collection, version):
|
||||
"""Extract name, version, path and subdir out of the SCM pointer."""
|
||||
if ',' in collection:
|
||||
collection, version = collection.split(',', 1)
|
||||
elif version == '*' or not version:
|
||||
version = 'HEAD'
|
||||
|
||||
if collection.startswith('git+'):
|
||||
path = collection[4:]
|
||||
else:
|
||||
path = collection
|
||||
|
||||
path, fragment = urldefrag(path)
|
||||
fragment = fragment.strip(os.path.sep)
|
||||
|
||||
if path.endswith(os.path.sep + '.git'):
|
||||
name = path.split(os.path.sep)[-2]
|
||||
elif '://' not in path and '@' not in path:
|
||||
name = path
|
||||
else:
|
||||
name = path.split('/')[-1]
|
||||
if name.endswith('.git'):
|
||||
name = name[:-4]
|
||||
|
||||
return name, version, path, fragment
|
||||
|
||||
|
||||
def _extract_collection_from_git(repo_url, coll_ver, b_path):
|
||||
name, version, git_url, fragment = parse_scm(repo_url, coll_ver)
|
||||
b_checkout_path = mkdtemp(
|
||||
dir=b_path,
|
||||
prefix=to_bytes(name, errors='surrogate_or_strict'),
|
||||
) # type: bytes
|
||||
git_clone_cmd = 'git', 'clone', git_url, to_text(b_checkout_path)
|
||||
# FIXME: '--depth', '1', '--branch', version
|
||||
try:
|
||||
subprocess.check_call(git_clone_cmd)
|
||||
except subprocess.CalledProcessError as proc_err:
|
||||
raise_from(
|
||||
AnsibleError( # should probably be LookupError
|
||||
'Failed to clone a Git repository from `{repo_url!s}`.'.
|
||||
format(repo_url=to_native(git_url)),
|
||||
),
|
||||
proc_err,
|
||||
)
|
||||
|
||||
git_switch_cmd = 'git', 'checkout', to_text(version)
|
||||
try:
|
||||
subprocess.check_call(git_switch_cmd, cwd=b_checkout_path)
|
||||
except subprocess.CalledProcessError as proc_err:
|
||||
raise_from(
|
||||
AnsibleError( # should probably be LookupError
|
||||
'Failed to switch a cloned Git repo `{repo_url!s}` '
|
||||
'to the requested revision `{commitish!s}`.'.
|
||||
format(
|
||||
commitish=to_native(version),
|
||||
repo_url=to_native(git_url),
|
||||
),
|
||||
),
|
||||
proc_err,
|
||||
)
|
||||
|
||||
return (
|
||||
os.path.join(b_checkout_path, to_bytes(fragment))
|
||||
if fragment else b_checkout_path
|
||||
)
|
||||
|
||||
|
||||
# FIXME: use random subdirs while preserving the file names
|
||||
def _download_file(url, b_path, expected_hash, validate_certs, token=None):
|
||||
# type: (str, bytes, Optional[str], bool, GalaxyToken) -> bytes
|
||||
# ^ NOTE: used in download and verify_collections ^
|
||||
b_tarball_name = to_bytes(
|
||||
url.rsplit('/', 1)[1], errors='surrogate_or_strict',
|
||||
)
|
||||
b_file_name = b_tarball_name[:-len('.tar.gz')]
|
||||
|
||||
b_tarball_dir = mkdtemp(
|
||||
dir=b_path,
|
||||
prefix=b'-'.join((b_file_name, b'')),
|
||||
) # type: bytes
|
||||
|
||||
b_file_path = os.path.join(b_tarball_dir, b_tarball_name)
|
||||
|
||||
display.display("Downloading %s to %s" % (url, to_text(b_tarball_dir)))
|
||||
# NOTE: Galaxy redirects downloads to S3 which rejects the request
|
||||
# NOTE: if an Authorization header is attached so don't redirect it
|
||||
resp = open_url(
|
||||
to_native(url, errors='surrogate_or_strict'),
|
||||
validate_certs=validate_certs,
|
||||
headers=None if token is None else token.headers(),
|
||||
unredirected_headers=['Authorization'], http_agent=user_agent(),
|
||||
)
|
||||
|
||||
with open(b_file_path, 'wb') as download_file: # type: BinaryIO
|
||||
actual_hash = _consume_file(resp, write_to=download_file)
|
||||
|
||||
if expected_hash:
|
||||
display.vvvv(
|
||||
'Validating downloaded file hash {actual_hash!s} with '
|
||||
'expected hash {expected_hash!s}'.
|
||||
format(actual_hash=actual_hash, expected_hash=expected_hash)
|
||||
)
|
||||
if expected_hash != actual_hash:
|
||||
raise AnsibleError('Mismatch artifact hash with downloaded file')
|
||||
|
||||
return b_file_path
|
||||
|
||||
|
||||
def _consume_file(read_from, write_to=None):
|
||||
# type: (BinaryIO, BinaryIO) -> str
|
||||
bufsize = 65536
|
||||
sha256_digest = sha256()
|
||||
data = read_from.read(bufsize)
|
||||
while data:
|
||||
if write_to is not None:
|
||||
write_to.write(data)
|
||||
write_to.flush()
|
||||
sha256_digest.update(data)
|
||||
data = read_from.read(bufsize)
|
||||
|
||||
return sha256_digest.hexdigest()
|
||||
|
||||
|
||||
def _normalize_galaxy_yml_manifest(
|
||||
galaxy_yml, # type: Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
|
||||
b_galaxy_yml_path, # type: bytes
|
||||
):
|
||||
# type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
|
||||
galaxy_yml_schema = (
|
||||
get_collections_galaxy_meta_info()
|
||||
) # type: List[Dict[str, Any]] # FIXME: <--
|
||||
# FIXME: 👆maybe precise type: List[Dict[str, Union[bool, str, List[str]]]]
|
||||
|
||||
mandatory_keys = set()
|
||||
string_keys = set() # type: Set[str]
|
||||
list_keys = set() # type: Set[str]
|
||||
dict_keys = set() # type: Set[str]
|
||||
|
||||
for info in galaxy_yml_schema:
|
||||
if info.get('required', False):
|
||||
mandatory_keys.add(info['key'])
|
||||
|
||||
key_list_type = {
|
||||
'str': string_keys,
|
||||
'list': list_keys,
|
||||
'dict': dict_keys,
|
||||
}[info.get('type', 'str')]
|
||||
key_list_type.add(info['key'])
|
||||
|
||||
all_keys = frozenset(list(mandatory_keys) + list(string_keys) + list(list_keys) + list(dict_keys))
|
||||
|
||||
set_keys = set(galaxy_yml.keys())
|
||||
missing_keys = mandatory_keys.difference(set_keys)
|
||||
if missing_keys:
|
||||
raise AnsibleError("The collection galaxy.yml at '%s' is missing the following mandatory keys: %s"
|
||||
% (to_native(b_galaxy_yml_path), ", ".join(sorted(missing_keys))))
|
||||
|
||||
extra_keys = set_keys.difference(all_keys)
|
||||
if len(extra_keys) > 0:
|
||||
display.warning("Found unknown keys in collection galaxy.yml at '%s': %s"
|
||||
% (to_text(b_galaxy_yml_path), ", ".join(extra_keys)))
|
||||
|
||||
# Add the defaults if they have not been set
|
||||
for optional_string in string_keys:
|
||||
if optional_string not in galaxy_yml:
|
||||
galaxy_yml[optional_string] = None
|
||||
|
||||
for optional_list in list_keys:
|
||||
list_val = galaxy_yml.get(optional_list, None)
|
||||
|
||||
if list_val is None:
|
||||
galaxy_yml[optional_list] = []
|
||||
elif not isinstance(list_val, list):
|
||||
galaxy_yml[optional_list] = [list_val] # type: ignore[list-item]
|
||||
|
||||
for optional_dict in dict_keys:
|
||||
if optional_dict not in galaxy_yml:
|
||||
galaxy_yml[optional_dict] = {}
|
||||
|
||||
# NOTE: `version: null` is only allowed for `galaxy.yml`
|
||||
# NOTE: and not `MANIFEST.json`. The use-case for it is collections
|
||||
# NOTE: that generate the version from Git before building a
|
||||
# NOTE: distributable tarball artifact.
|
||||
if not galaxy_yml.get('version'):
|
||||
galaxy_yml['version'] = '*'
|
||||
|
||||
return galaxy_yml
|
||||
|
||||
|
||||
def _get_meta_from_dir(
|
||||
b_path, # type: bytes
|
||||
): # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
|
||||
try:
|
||||
return _get_meta_from_installed_dir(b_path)
|
||||
except LookupError:
|
||||
return _get_meta_from_src_dir(b_path)
|
||||
|
||||
|
||||
def _get_meta_from_src_dir(
|
||||
b_path, # type: bytes
|
||||
): # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
|
||||
galaxy_yml = os.path.join(b_path, _GALAXY_YAML)
|
||||
if not os.path.isfile(galaxy_yml):
|
||||
raise LookupError(
|
||||
"The collection galaxy.yml path '{path!s}' does not exist.".
|
||||
format(path=to_native(galaxy_yml))
|
||||
)
|
||||
|
||||
with open(galaxy_yml, 'rb') as manifest_file_obj:
|
||||
try:
|
||||
manifest = yaml.safe_load(manifest_file_obj)
|
||||
except yaml.error.YAMLError as yaml_err:
|
||||
raise_from(
|
||||
AnsibleError(
|
||||
"Failed to parse the galaxy.yml at '{path!s}' with "
|
||||
'the following error:\n{err_txt!s}'.
|
||||
format(
|
||||
path=to_native(galaxy_yml),
|
||||
err_txt=to_native(yaml_err),
|
||||
),
|
||||
),
|
||||
yaml_err,
|
||||
)
|
||||
|
||||
return _normalize_galaxy_yml_manifest(manifest, galaxy_yml)
|
||||
|
||||
|
||||
def _get_meta_from_installed_dir(
|
||||
b_path, # type: bytes
|
||||
): # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
|
||||
n_manifest_json = 'MANIFEST.json'
|
||||
b_manifest_json = to_bytes(n_manifest_json)
|
||||
b_manifest_json_path = os.path.join(b_path, b_manifest_json)
|
||||
|
||||
try:
|
||||
with open(b_manifest_json_path, 'rb') as manifest_fd:
|
||||
b_manifest_txt = manifest_fd.read()
|
||||
except (IOError, OSError):
|
||||
raise LookupError(
|
||||
"The collection {manifest!s} path '{path!s}' does not exist.".
|
||||
format(
|
||||
manifest=n_manifest_json,
|
||||
path=to_native(b_manifest_json_path),
|
||||
)
|
||||
)
|
||||
|
||||
manifest_txt = to_text(b_manifest_txt, errors='surrogate_or_strict')
|
||||
|
||||
try:
|
||||
manifest = json.loads(manifest_txt)
|
||||
except ValueError:
|
||||
raise AnsibleError(
|
||||
'Collection tar file member {member!s} does not '
|
||||
'contain a valid json string.'.
|
||||
format(member=n_manifest_json),
|
||||
)
|
||||
else:
|
||||
collection_info = manifest['collection_info']
|
||||
|
||||
version = collection_info.get('version')
|
||||
if not version:
|
||||
raise AnsibleError(
|
||||
u'Collection metadata file at `{meta_file!s}` is expected '
|
||||
u'to have a valid SemVer version value but got {version!s}'.
|
||||
format(
|
||||
meta_file=to_text(b_manifest_json_path),
|
||||
version=to_text(repr(version)),
|
||||
),
|
||||
)
|
||||
|
||||
return collection_info
|
||||
|
||||
|
||||
def _get_meta_from_tar(
|
||||
b_path, # type: bytes
|
||||
): # type: (...) -> Dict[str, Optional[Union[str, List[str], Dict[str, str]]]]
|
||||
if not tarfile.is_tarfile(b_path):
|
||||
raise AnsibleError(
|
||||
"Collection artifact at '{path!s}' is not a valid tar file.".
|
||||
format(path=to_native(b_path)),
|
||||
)
|
||||
|
||||
n_manifest_json = 'MANIFEST.json'
|
||||
|
||||
with tarfile.open(b_path, mode='r') as collection_tar: # type: tarfile.TarFile
|
||||
try:
|
||||
member = collection_tar.getmember(n_manifest_json)
|
||||
except KeyError:
|
||||
raise AnsibleError(
|
||||
"Collection at '{path!s}' does not contain the "
|
||||
'required file {manifest_file!s}.'.
|
||||
format(
|
||||
path=to_native(b_path),
|
||||
manifest_file=n_manifest_json,
|
||||
),
|
||||
)
|
||||
|
||||
with _tarfile_extract(collection_tar, member) as (_member, member_obj):
|
||||
if member_obj is None:
|
||||
raise AnsibleError(
|
||||
'Collection tar file does not contain '
|
||||
'member {member!s}'.format(member=n_manifest_json),
|
||||
)
|
||||
|
||||
text_content = to_text(
|
||||
member_obj.read(),
|
||||
errors='surrogate_or_strict',
|
||||
)
|
||||
|
||||
try:
|
||||
manifest = json.loads(text_content)
|
||||
except ValueError:
|
||||
raise AnsibleError(
|
||||
'Collection tar file member {member!s} does not '
|
||||
'contain a valid json string.'.
|
||||
format(member=n_manifest_json),
|
||||
)
|
||||
return manifest['collection_info']
|
||||
|
||||
|
||||
@contextmanager
|
||||
def _tarfile_extract(
|
||||
tar, # type: tarfile.TarFile
|
||||
member, # type: tarfile.TarInfo
|
||||
):
|
||||
# type: (...) -> Iterator[Tuple[tarfile.TarInfo, Optional[IO[bytes]]]]
|
||||
tar_obj = tar.extractfile(member)
|
||||
try:
|
||||
yield member, tar_obj
|
||||
finally:
|
||||
if tar_obj is not None:
|
||||
tar_obj.close()
|
107
lib/ansible/galaxy/collection/galaxy_api_proxy.py
Normal file
107
lib/ansible/galaxy/collection/galaxy_api_proxy.py
Normal file
|
@ -0,0 +1,107 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright: (c) 2020-2021, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
"""A facade for interfacing with multiple Galaxy instances."""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
|
||||
try:
|
||||
from typing import TYPE_CHECKING
|
||||
except ImportError:
|
||||
TYPE_CHECKING = False
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Dict, Iterable, Tuple
|
||||
from ansible.galaxy.api import CollectionVersionMetadata
|
||||
from ansible.galaxy.collection.concrete_artifact_manager import (
|
||||
ConcreteArtifactsManager,
|
||||
)
|
||||
from ansible.galaxy.dependency_resolution.dataclasses import (
|
||||
Candidate, Requirement,
|
||||
)
|
||||
|
||||
from ansible.galaxy.api import GalaxyAPI, GalaxyError
|
||||
|
||||
|
||||
class MultiGalaxyAPIProxy:
|
||||
"""A proxy that abstracts talking to multiple Galaxy instances."""
|
||||
|
||||
def __init__(self, apis, concrete_artifacts_manager):
|
||||
# type: (Iterable[GalaxyAPI], ConcreteArtifactsManager) -> None
|
||||
"""Initialize the target APIs list."""
|
||||
self._apis = apis
|
||||
self._concrete_art_mgr = concrete_artifacts_manager
|
||||
|
||||
def get_collection_versions(self, requirement):
|
||||
# type: (Requirement) -> Iterable[Tuple[str, GalaxyAPI]]
|
||||
"""Get a set of unique versions for FQCN on Galaxy servers."""
|
||||
if requirement.is_concrete_artifact:
|
||||
return {
|
||||
(
|
||||
self._concrete_art_mgr.
|
||||
get_direct_collection_version(requirement),
|
||||
requirement.src,
|
||||
),
|
||||
}
|
||||
|
||||
api_lookup_order = (
|
||||
(requirement.src, )
|
||||
if isinstance(requirement.src, GalaxyAPI)
|
||||
else self._apis
|
||||
)
|
||||
return set(
|
||||
(version, api)
|
||||
for api in api_lookup_order
|
||||
for version in api.get_collection_versions(
|
||||
requirement.namespace, requirement.name,
|
||||
)
|
||||
)
|
||||
|
||||
def get_collection_version_metadata(self, collection_candidate):
|
||||
# type: (Candidate) -> CollectionVersionMetadata
|
||||
"""Retrieve collection metadata of a given candidate."""
|
||||
|
||||
api_lookup_order = (
|
||||
(collection_candidate.src, )
|
||||
if isinstance(collection_candidate.src, GalaxyAPI)
|
||||
else self._apis
|
||||
)
|
||||
for api in api_lookup_order:
|
||||
try:
|
||||
version_metadata = api.get_collection_version_metadata(
|
||||
collection_candidate.namespace,
|
||||
collection_candidate.name,
|
||||
collection_candidate.ver,
|
||||
)
|
||||
except GalaxyError as api_err:
|
||||
last_err = api_err
|
||||
else:
|
||||
self._concrete_art_mgr.save_collection_source(
|
||||
collection_candidate,
|
||||
version_metadata.download_url,
|
||||
version_metadata.artifact_sha256,
|
||||
api.token,
|
||||
)
|
||||
return version_metadata
|
||||
|
||||
raise last_err
|
||||
|
||||
def get_collection_dependencies(self, collection_candidate):
|
||||
# type: (Candidate) -> Dict[str, str]
|
||||
# FIXME: return Requirement instances instead?
|
||||
"""Retrieve collection dependencies of a given candidate."""
|
||||
if collection_candidate.is_concrete_artifact:
|
||||
return (
|
||||
self.
|
||||
_concrete_art_mgr.
|
||||
get_direct_collection_dependencies
|
||||
)(collection_candidate)
|
||||
|
||||
return (
|
||||
self.
|
||||
get_collection_version_metadata(collection_candidate).
|
||||
dependencies
|
||||
)
|
|
@ -1,7 +1,49 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright: (c) 2020, Ansible Project
|
||||
# Copyright: (c) 2020-2021, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
"""Dependency resolution machinery."""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
try:
|
||||
from typing import TYPE_CHECKING
|
||||
except ImportError:
|
||||
TYPE_CHECKING = False
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Iterable
|
||||
from ansible.galaxy.api import GalaxyAPI
|
||||
from ansible.galaxy.collection.concrete_artifact_manager import (
|
||||
ConcreteArtifactsManager,
|
||||
)
|
||||
from ansible.galaxy.dependency_resolution.dataclasses import Candidate
|
||||
|
||||
from ansible.galaxy.collection.galaxy_api_proxy import MultiGalaxyAPIProxy
|
||||
from ansible.galaxy.dependency_resolution.providers import CollectionDependencyProvider
|
||||
from ansible.galaxy.dependency_resolution.reporters import CollectionDependencyReporter
|
||||
from ansible.galaxy.dependency_resolution.resolvers import CollectionDependencyResolver
|
||||
|
||||
|
||||
def build_collection_dependency_resolver(
|
||||
galaxy_apis, # type: Iterable[GalaxyAPI]
|
||||
concrete_artifacts_manager, # type: ConcreteArtifactsManager
|
||||
preferred_candidates=None, # type: Iterable[Candidate]
|
||||
with_deps=True, # type: bool
|
||||
with_pre_releases=False, # type: bool
|
||||
): # type: (...) -> CollectionDependencyResolver
|
||||
"""Return a collection dependency resolver.
|
||||
|
||||
The returned instance will have a ``resolve()`` method for
|
||||
further consumption.
|
||||
"""
|
||||
return CollectionDependencyResolver(
|
||||
CollectionDependencyProvider(
|
||||
apis=MultiGalaxyAPIProxy(galaxy_apis, concrete_artifacts_manager),
|
||||
concrete_artifacts_manager=concrete_artifacts_manager,
|
||||
preferred_candidates=preferred_candidates,
|
||||
with_deps=with_deps,
|
||||
with_pre_releases=with_pre_releases,
|
||||
),
|
||||
CollectionDependencyReporter(),
|
||||
)
|
||||
|
|
435
lib/ansible/galaxy/dependency_resolution/dataclasses.py
Normal file
435
lib/ansible/galaxy/dependency_resolution/dataclasses.py
Normal file
|
@ -0,0 +1,435 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright: (c) 2020-2021, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
"""Dependency structs."""
|
||||
# FIXME: add caching all over the place
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
import os
|
||||
from collections import namedtuple
|
||||
from glob import iglob
|
||||
from keyword import iskeyword # used in _is_fqcn
|
||||
|
||||
try:
|
||||
from typing import TYPE_CHECKING
|
||||
except ImportError:
|
||||
TYPE_CHECKING = False
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Tuple, Type, TypeVar
|
||||
from ansible.galaxy.collection.concrete_artifact_manager import (
|
||||
ConcreteArtifactsManager,
|
||||
)
|
||||
Collection = TypeVar(
|
||||
'Collection',
|
||||
'Candidate', 'Requirement',
|
||||
'_ComputedReqKindsMixin',
|
||||
)
|
||||
|
||||
import yaml
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.galaxy.api import GalaxyAPI
|
||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
||||
from ansible.module_utils.six import raise_from
|
||||
from ansible.utils.display import Display
|
||||
|
||||
|
||||
try: # NOTE: py3/py2 compat
|
||||
# FIXME: put somewhere into compat
|
||||
# py2 mypy can't deal with try/excepts
|
||||
_is_py_id = str.isidentifier # type: ignore[attr-defined]
|
||||
except AttributeError: # Python 2
|
||||
# FIXME: port this to AnsibleCollectionRef.is_valid_collection_name
|
||||
from re import match as _match_pattern
|
||||
from tokenize import Name as _VALID_IDENTIFIER_REGEX
|
||||
_valid_identifier_string_regex = ''.join((_VALID_IDENTIFIER_REGEX, r'\Z'))
|
||||
|
||||
def _is_py_id(tested_str):
|
||||
# Ref: https://stackoverflow.com/a/55802320/595220
|
||||
return bool(_match_pattern(_valid_identifier_string_regex, tested_str))
|
||||
|
||||
|
||||
_ALLOW_CONCRETE_POINTER_IN_SOURCE = False # NOTE: This is a feature flag
|
||||
_GALAXY_YAML = b'galaxy.yml'
|
||||
_MANIFEST_JSON = b'MANIFEST.json'
|
||||
|
||||
|
||||
display = Display()
|
||||
|
||||
|
||||
def _is_collection_src_dir(dir_path):
|
||||
b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
|
||||
return os.path.isfile(os.path.join(b_dir_path, _GALAXY_YAML))
|
||||
|
||||
|
||||
def _is_installed_collection_dir(dir_path):
|
||||
b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
|
||||
return os.path.isfile(os.path.join(b_dir_path, _MANIFEST_JSON))
|
||||
|
||||
|
||||
def _is_collection_dir(dir_path):
|
||||
return (
|
||||
_is_installed_collection_dir(dir_path) or
|
||||
_is_collection_src_dir(dir_path)
|
||||
)
|
||||
|
||||
|
||||
def _find_collections_in_subdirs(dir_path):
|
||||
b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
|
||||
galaxy_yml_glob_pattern = os.path.join(
|
||||
b_dir_path,
|
||||
# b'*', # namespace is supposed to be top-level per spec
|
||||
b'*', # collection name
|
||||
_GALAXY_YAML,
|
||||
)
|
||||
return (
|
||||
os.path.dirname(galaxy_yml)
|
||||
for galaxy_yml in iglob(galaxy_yml_glob_pattern)
|
||||
)
|
||||
|
||||
|
||||
def _is_collection_namespace_dir(tested_str):
|
||||
return any(_find_collections_in_subdirs(tested_str))
|
||||
|
||||
|
||||
def _is_file_path(tested_str):
|
||||
return os.path.isfile(to_bytes(tested_str, errors='surrogate_or_strict'))
|
||||
|
||||
|
||||
def _is_http_url(tested_str):
|
||||
return urlparse(tested_str).scheme.lower() in {'http', 'https'}
|
||||
|
||||
|
||||
def _is_git_url(tested_str):
|
||||
return tested_str.startswith(('git+', 'git@'))
|
||||
|
||||
|
||||
def _is_concrete_artifact_pointer(tested_str):
|
||||
return any(
|
||||
predicate(tested_str)
|
||||
for predicate in (
|
||||
# NOTE: Maintain the checks to be sorted from light to heavy:
|
||||
_is_git_url,
|
||||
_is_http_url,
|
||||
_is_file_path,
|
||||
_is_collection_dir,
|
||||
_is_collection_namespace_dir,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _is_fqcn(tested_str):
|
||||
# FIXME: port this to AnsibleCollectionRef.is_valid_collection_name
|
||||
if tested_str.count('.') != 1:
|
||||
return False
|
||||
|
||||
return all(
|
||||
# FIXME: keywords and identifiers are different in differnt Pythons
|
||||
not iskeyword(ns_or_name) and _is_py_id(ns_or_name)
|
||||
for ns_or_name in tested_str.split('.')
|
||||
)
|
||||
|
||||
|
||||
class _ComputedReqKindsMixin:
|
||||
|
||||
@classmethod
|
||||
def from_dir_path_as_unknown( # type: ignore[misc]
|
||||
cls, # type: Type[Collection]
|
||||
dir_path, # type: bytes
|
||||
art_mgr, # type: ConcreteArtifactsManager
|
||||
): # type: (...) -> Collection
|
||||
"""Make collection from an unspecified dir type.
|
||||
|
||||
This alternative constructor attempts to grab metadata from the
|
||||
given path if it's a directory. If there's no metadata, it
|
||||
falls back to guessing the FQCN based on the directory path and
|
||||
sets the version to "*".
|
||||
|
||||
It raises a ValueError immediatelly if the input is not an
|
||||
existing directory path.
|
||||
"""
|
||||
if not os.path.isdir(dir_path):
|
||||
raise ValueError(
|
||||
"The collection directory '{path!s}' doesn't exist".
|
||||
format(path=to_native(dir_path)),
|
||||
)
|
||||
|
||||
try:
|
||||
return cls.from_dir_path(dir_path, art_mgr)
|
||||
except ValueError:
|
||||
return cls.from_dir_path_implicit(dir_path)
|
||||
|
||||
@classmethod
|
||||
def from_dir_path(cls, dir_path, art_mgr):
|
||||
"""Make collection from an directory with metadata."""
|
||||
b_dir_path = to_bytes(dir_path, errors='surrogate_or_strict')
|
||||
if not _is_collection_dir(b_dir_path):
|
||||
display.warning(
|
||||
u"Collection at '{path!s}' does not have a {manifest_json!s} "
|
||||
u'file, nor has it {galaxy_yml!s}: cannot detect version.'.
|
||||
format(
|
||||
galaxy_yml=to_text(_GALAXY_YAML),
|
||||
manifest_json=to_text(_MANIFEST_JSON),
|
||||
path=to_text(dir_path, errors='surrogate_or_strict'),
|
||||
),
|
||||
)
|
||||
raise ValueError(
|
||||
'`dir_path` argument must be an installed or a source'
|
||||
' collection directory.',
|
||||
)
|
||||
|
||||
tmp_inst_req = cls(None, None, dir_path, 'dir')
|
||||
req_name = art_mgr.get_direct_collection_fqcn(tmp_inst_req)
|
||||
req_version = art_mgr.get_direct_collection_version(tmp_inst_req)
|
||||
|
||||
return cls(req_name, req_version, dir_path, 'dir')
|
||||
|
||||
@classmethod
|
||||
def from_dir_path_implicit( # type: ignore[misc]
|
||||
cls, # type: Type[Collection]
|
||||
dir_path, # type: bytes
|
||||
): # type: (...) -> Collection
|
||||
"""Construct a collection instance based on an arbitrary dir.
|
||||
|
||||
This alternative constructor infers the FQCN based on the parent
|
||||
and current directory names. It also sets the version to "*"
|
||||
regardless of whether any of known metadata files are present.
|
||||
"""
|
||||
# There is no metadata, but it isn't required for a functional collection. Determine the namespace.name from the path.
|
||||
u_dir_path = to_text(dir_path, errors='surrogate_or_strict')
|
||||
path_list = u_dir_path.split(os.path.sep)
|
||||
req_name = '.'.join(path_list[-2:])
|
||||
return cls(req_name, '*', dir_path, 'dir') # type: ignore[call-arg]
|
||||
|
||||
@classmethod
|
||||
def from_string(cls, collection_input, artifacts_manager):
|
||||
req = {}
|
||||
if _is_concrete_artifact_pointer(collection_input):
|
||||
# Arg is a file path or URL to a collection
|
||||
req['name'] = collection_input
|
||||
else:
|
||||
req['name'], _sep, req['version'] = collection_input.partition(':')
|
||||
if not req['version']:
|
||||
del req['version']
|
||||
|
||||
return cls.from_requirement_dict(req, artifacts_manager)
|
||||
|
||||
@classmethod
|
||||
def from_requirement_dict(cls, collection_req, art_mgr):
|
||||
req_name = collection_req.get('name', None)
|
||||
req_version = collection_req.get('version', '*')
|
||||
req_type = collection_req.get('type')
|
||||
# TODO: decide how to deprecate the old src API behavior
|
||||
req_source = collection_req.get('source', None)
|
||||
|
||||
if req_type is None:
|
||||
if ( # FIXME: decide on the future behavior:
|
||||
_ALLOW_CONCRETE_POINTER_IN_SOURCE
|
||||
and req_source is not None
|
||||
and _is_concrete_artifact_pointer(req_source)
|
||||
):
|
||||
src_path = req_source
|
||||
elif req_name is not None and _is_fqcn(req_name):
|
||||
req_type = 'galaxy'
|
||||
elif (
|
||||
req_name is not None
|
||||
and _is_concrete_artifact_pointer(req_name)
|
||||
):
|
||||
src_path, req_name = req_name, None
|
||||
else:
|
||||
dir_tip_tmpl = ( # NOTE: leading LFs are for concat
|
||||
'\n\nTip: Make sure you are pointing to the right '
|
||||
'subdirectory — `{src!s}` looks like a directory '
|
||||
'but it is neither a collection, nor a namespace '
|
||||
'dir.'
|
||||
)
|
||||
|
||||
if req_source is not None and os.path.isdir(req_source):
|
||||
tip = dir_tip_tmpl.format(src=req_source)
|
||||
elif req_name is not None and os.path.isdir(req_name):
|
||||
tip = dir_tip_tmpl.format(src=req_name)
|
||||
elif req_name:
|
||||
tip = '\n\nCould not find {0}.'.format(req_name)
|
||||
else:
|
||||
tip = ''
|
||||
|
||||
raise AnsibleError( # NOTE: I'd prefer a ValueError instead
|
||||
'Neither the collection requirement entry key '
|
||||
"'name', nor 'source' point to a concrete "
|
||||
"resolvable collection artifact. Also 'name' is "
|
||||
'not an FQCN. A valid collection name must be in '
|
||||
'the format <namespace>.<collection>. Please make '
|
||||
'sure that the namespace and the collection name '
|
||||
' contain characters from [a-zA-Z0-9_] only.'
|
||||
'{extra_tip!s}'.format(extra_tip=tip),
|
||||
)
|
||||
|
||||
if req_type is None:
|
||||
if _is_git_url(src_path):
|
||||
req_type = 'git'
|
||||
req_source = src_path
|
||||
elif _is_http_url(src_path):
|
||||
req_type = 'url'
|
||||
req_source = src_path
|
||||
elif _is_file_path(src_path):
|
||||
req_type = 'file'
|
||||
req_source = src_path
|
||||
elif _is_collection_dir(src_path):
|
||||
req_type = 'dir'
|
||||
req_source = src_path
|
||||
elif _is_collection_namespace_dir(src_path):
|
||||
req_name = None # No name for a virtual req or "namespace."?
|
||||
req_type = 'subdirs'
|
||||
req_source = src_path
|
||||
else:
|
||||
raise AnsibleError( # NOTE: this is never supposed to be hit
|
||||
'Failed to automatically detect the collection '
|
||||
'requirement type.',
|
||||
)
|
||||
|
||||
if req_type not in {'file', 'galaxy', 'git', 'url', 'dir', 'subdirs'}:
|
||||
raise AnsibleError(
|
||||
"The collection requirement entry key 'type' must be "
|
||||
'one of file, galaxy, git, dir, subdirs, or url.'
|
||||
)
|
||||
|
||||
if req_name is None and req_type == 'galaxy':
|
||||
raise AnsibleError(
|
||||
'Collections requirement entry should contain '
|
||||
"the key 'name' if it's requested from a Galaxy-like "
|
||||
'index server.',
|
||||
)
|
||||
|
||||
if req_type != 'galaxy' and req_source is None:
|
||||
req_source, req_name = req_name, None
|
||||
|
||||
if (
|
||||
req_type == 'galaxy' and
|
||||
isinstance(req_source, GalaxyAPI) and
|
||||
not _is_http_url(req_source.api_server)
|
||||
):
|
||||
raise AnsibleError(
|
||||
"Collections requirement 'source' entry should contain "
|
||||
'a valid Galaxy API URL but it does not: {not_url!s} '
|
||||
'is not an HTTP URL.'.
|
||||
format(not_url=req_source.api_server),
|
||||
)
|
||||
|
||||
tmp_inst_req = cls(req_name, req_version, req_source, req_type)
|
||||
|
||||
if req_type not in {'galaxy', 'subdirs'} and req_name is None:
|
||||
req_name = art_mgr.get_direct_collection_fqcn(tmp_inst_req) # TODO: fix the cache key in artifacts manager?
|
||||
|
||||
if req_type not in {'galaxy', 'subdirs'} and req_version == '*':
|
||||
req_version = art_mgr.get_direct_collection_version(tmp_inst_req)
|
||||
|
||||
return cls(
|
||||
req_name, req_version,
|
||||
req_source, req_type,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
'<{self!s} of type {coll_type!r} from {src!s}>'.
|
||||
format(self=self, coll_type=self.type, src=self.src or 'Galaxy')
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return to_native(self.__unicode__())
|
||||
|
||||
def __unicode__(self):
|
||||
if self.fqcn is None:
|
||||
return (
|
||||
u'"virtual collection Git repo"' if self.is_scm
|
||||
else u'"virtual collection namespace"'
|
||||
)
|
||||
|
||||
return (
|
||||
u'{fqcn!s}:{ver!s}'.
|
||||
format(fqcn=to_text(self.fqcn), ver=to_text(self.ver))
|
||||
)
|
||||
|
||||
def _get_separate_ns_n_name(self): # FIXME: use LRU cache
|
||||
return self.fqcn.split('.')
|
||||
|
||||
@property
|
||||
def namespace(self):
|
||||
if self.is_virtual:
|
||||
raise TypeError('Virtual collections do not have a namespace')
|
||||
|
||||
return self._get_separate_ns_n_name()[0]
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
if self.is_virtual:
|
||||
raise TypeError('Virtual collections do not have a name')
|
||||
|
||||
return self._get_separate_ns_n_name()[-1]
|
||||
|
||||
@property
|
||||
def canonical_package_id(self):
|
||||
if not self.is_virtual:
|
||||
return to_native(self.fqcn)
|
||||
|
||||
return (
|
||||
'<virtual namespace from {src!s} of type {src_type!s}>'.
|
||||
format(src=to_native(self.src), src_type=to_native(self.type))
|
||||
)
|
||||
|
||||
@property
|
||||
def is_virtual(self):
|
||||
return self.is_scm or self.is_subdirs
|
||||
|
||||
@property
|
||||
def is_file(self):
|
||||
return self.type == 'file'
|
||||
|
||||
@property
|
||||
def is_dir(self):
|
||||
return self.type == 'dir'
|
||||
|
||||
@property
|
||||
def namespace_collection_paths(self):
|
||||
return [
|
||||
to_native(path)
|
||||
for path in _find_collections_in_subdirs(self.src)
|
||||
]
|
||||
|
||||
@property
|
||||
def is_subdirs(self):
|
||||
return self.type == 'subdirs'
|
||||
|
||||
@property
|
||||
def is_url(self):
|
||||
return self.type == 'url'
|
||||
|
||||
@property
|
||||
def is_scm(self):
|
||||
return self.type == 'git'
|
||||
|
||||
@property
|
||||
def is_concrete_artifact(self):
|
||||
return self.type in {'git', 'url', 'file', 'dir', 'subdirs'}
|
||||
|
||||
@property
|
||||
def is_online_index_pointer(self):
|
||||
return not self.is_concrete_artifact
|
||||
|
||||
|
||||
class Requirement(
|
||||
_ComputedReqKindsMixin,
|
||||
namedtuple('Requirement', ('fqcn', 'ver', 'src', 'type')),
|
||||
):
|
||||
"""An abstract requirement request."""
|
||||
|
||||
|
||||
class Candidate(
|
||||
_ComputedReqKindsMixin,
|
||||
namedtuple('Candidate', ('fqcn', 'ver', 'src', 'type'))
|
||||
):
|
||||
"""A concrete collection candidate with its version resolved."""
|
11
lib/ansible/galaxy/dependency_resolution/errors.py
Normal file
11
lib/ansible/galaxy/dependency_resolution/errors.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright: (c) 2020-2021, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
"""Dependency resolution exceptions."""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from resolvelib.resolvers import (
|
||||
ResolutionImpossible as CollectionDependencyResolutionImpossible,
|
||||
)
|
273
lib/ansible/galaxy/dependency_resolution/providers.py
Normal file
273
lib/ansible/galaxy/dependency_resolution/providers.py
Normal file
|
@ -0,0 +1,273 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright: (c) 2020-2021, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
"""Requirement provider interfaces."""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import functools
|
||||
|
||||
try:
|
||||
from typing import TYPE_CHECKING
|
||||
except ImportError:
|
||||
TYPE_CHECKING = False
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing import Iterable, List, NamedTuple, Optional, Union
|
||||
from ansible.galaxy.collection.concrete_artifact_manager import (
|
||||
ConcreteArtifactsManager,
|
||||
)
|
||||
from ansible.galaxy.collection.galaxy_api_proxy import MultiGalaxyAPIProxy
|
||||
|
||||
from ansible.galaxy.dependency_resolution.dataclasses import (
|
||||
Candidate,
|
||||
Requirement,
|
||||
)
|
||||
from ansible.galaxy.dependency_resolution.versioning import (
|
||||
is_pre_release,
|
||||
meets_requirements,
|
||||
)
|
||||
from ansible.utils.version import SemanticVersion
|
||||
|
||||
from resolvelib import AbstractProvider
|
||||
|
||||
|
||||
class CollectionDependencyProvider(AbstractProvider):
|
||||
"""Delegate providing a requirement interface for the resolver."""
|
||||
|
||||
def __init__(
|
||||
self, # type: CollectionDependencyProvider
|
||||
apis, # type: MultiGalaxyAPIProxy
|
||||
concrete_artifacts_manager=None, # type: ConcreteArtifactsManager
|
||||
preferred_candidates=None, # type: Iterable[Candidate]
|
||||
with_deps=True, # type: bool
|
||||
with_pre_releases=False, # type: bool
|
||||
): # type: (...) -> None
|
||||
r"""Initialize helper attributes.
|
||||
|
||||
:param api: An instance of the multiple Galaxy APIs wrapper.
|
||||
|
||||
:param concrete_artifacts_manager: An instance of the caching \
|
||||
concrete artifacts manager.
|
||||
|
||||
:param with_deps: A flag specifying whether the resolver \
|
||||
should attempt to pull-in the deps of the \
|
||||
requested requirements. On by default.
|
||||
|
||||
:param with_pre_releases: A flag specifying whether the \
|
||||
resolver should skip pre-releases. \
|
||||
Off by default.
|
||||
"""
|
||||
self._api_proxy = apis
|
||||
self._make_req_from_dict = functools.partial(
|
||||
Requirement.from_requirement_dict,
|
||||
art_mgr=concrete_artifacts_manager,
|
||||
)
|
||||
self._preferred_candidates = set(preferred_candidates or ())
|
||||
self._with_deps = with_deps
|
||||
self._with_pre_releases = with_pre_releases
|
||||
|
||||
def identify(self, requirement_or_candidate):
|
||||
# type: (Union[Candidate, Requirement]) -> str
|
||||
"""Given requirement or candidate, return an identifier for it.
|
||||
|
||||
This is used to identify a requirement or candidate, e.g.
|
||||
whether two requirements should have their specifier parts
|
||||
(version ranges or pins) merged, whether two candidates would
|
||||
conflict with each other (because they have same name but
|
||||
different versions).
|
||||
"""
|
||||
return requirement_or_candidate.canonical_package_id
|
||||
|
||||
def get_preference(
|
||||
self, # type: CollectionDependencyProvider
|
||||
resolution, # type: Optional[Candidate]
|
||||
candidates, # type: List[Candidate]
|
||||
information, # type: List[NamedTuple]
|
||||
): # type: (...) -> Union[float, int]
|
||||
"""Return sort key function return value for given requirement.
|
||||
|
||||
This result should be based on preference that is defined as
|
||||
"I think this requirement should be resolved first".
|
||||
The lower the return value is, the more preferred this
|
||||
group of arguments is.
|
||||
|
||||
:param resolution: Currently pinned candidate, or ``None``.
|
||||
|
||||
:param candidates: A list of possible candidates.
|
||||
|
||||
:param information: A list of requirement information.
|
||||
|
||||
Each ``information`` instance is a named tuple with two entries:
|
||||
|
||||
* ``requirement`` specifies a requirement contributing to
|
||||
the current candidate list
|
||||
|
||||
* ``parent`` specifies the candidate that provides
|
||||
(dependend on) the requirement, or `None`
|
||||
to indicate a root requirement.
|
||||
|
||||
The preference could depend on a various of issues, including
|
||||
(not necessarily in this order):
|
||||
|
||||
* Is this package pinned in the current resolution result?
|
||||
|
||||
* How relaxed is the requirement? Stricter ones should
|
||||
probably be worked on first? (I don't know, actually.)
|
||||
|
||||
* How many possibilities are there to satisfy this
|
||||
requirement? Those with few left should likely be worked on
|
||||
first, I guess?
|
||||
|
||||
* Are there any known conflicts for this requirement?
|
||||
We should probably work on those with the most
|
||||
known conflicts.
|
||||
|
||||
A sortable value should be returned (this will be used as the
|
||||
`key` parameter of the built-in sorting function). The smaller
|
||||
the value is, the more preferred this requirement is (i.e. the
|
||||
sorting function is called with ``reverse=False``).
|
||||
"""
|
||||
if any(
|
||||
candidate in self._preferred_candidates
|
||||
for candidate in candidates
|
||||
):
|
||||
# NOTE: Prefer pre-installed candidates over newer versions
|
||||
# NOTE: available from Galaxy or other sources.
|
||||
return float('-inf')
|
||||
return len(candidates)
|
||||
|
||||
def find_matches(self, requirements):
|
||||
# type: (List[Requirement]) -> List[Candidate]
|
||||
r"""Find all possible candidates satisfying given requirements.
|
||||
|
||||
This tries to get candidates based on the requirements' types.
|
||||
|
||||
For concrete requirements (SCM, dir, namespace dir, local or
|
||||
remote archives), the one-and-only match is returned
|
||||
|
||||
For a "named" requirement, Galaxy-compatible APIs are consulted
|
||||
to find concrete candidates for this requirement. Of theres a
|
||||
pre-installed candidate, it's prepended in front of others.
|
||||
|
||||
:param requirements: A collection of requirements which all of \
|
||||
the returned candidates must match. \
|
||||
All requirements are guaranteed to have \
|
||||
the same identifier. \
|
||||
The collection is never empty.
|
||||
|
||||
:returns: An iterable that orders candidates by preference, \
|
||||
e.g. the most preferred candidate comes first.
|
||||
"""
|
||||
# FIXME: The first requirement may be a Git repo followed by
|
||||
# FIXME: its cloned tmp dir. Using only the first one creates
|
||||
# FIXME: loops that prevent any further dependency exploration.
|
||||
# FIXME: We need to figure out how to prevent this.
|
||||
first_req = requirements[0]
|
||||
fqcn = first_req.fqcn
|
||||
# The fqcn is guaranteed to be the same
|
||||
coll_versions = self._api_proxy.get_collection_versions(first_req)
|
||||
if first_req.is_concrete_artifact:
|
||||
# FIXME: do we assume that all the following artifacts are also concrete?
|
||||
# FIXME: does using fqcn==None cause us problems here?
|
||||
|
||||
return [
|
||||
Candidate(fqcn, version, _none_src_server, first_req.type)
|
||||
for version, _none_src_server in coll_versions
|
||||
]
|
||||
|
||||
preinstalled_candidates = {
|
||||
candidate for candidate in self._preferred_candidates
|
||||
if candidate.fqcn == fqcn
|
||||
}
|
||||
|
||||
return list(preinstalled_candidates) + sorted(
|
||||
{
|
||||
candidate for candidate in (
|
||||
Candidate(fqcn, version, src_server, 'galaxy')
|
||||
for version, src_server in coll_versions
|
||||
)
|
||||
if all(self.is_satisfied_by(requirement, candidate) for requirement in requirements)
|
||||
# FIXME
|
||||
# if all(self.is_satisfied_by(requirement, candidate) and (
|
||||
# requirement.src is None or # if this is true for some candidates but not all it will break key param - Nonetype can't be compared to str
|
||||
# requirement.src == candidate.src
|
||||
# ))
|
||||
},
|
||||
key=lambda candidate: (
|
||||
SemanticVersion(candidate.ver), candidate.src,
|
||||
),
|
||||
reverse=True, # prefer newer versions over older ones
|
||||
)
|
||||
|
||||
def is_satisfied_by(self, requirement, candidate):
|
||||
# type: (Requirement, Candidate) -> bool
|
||||
r"""Whether the given requirement is satisfiable by a candidate.
|
||||
|
||||
:param requirement: A requirement that produced the `candidate`.
|
||||
|
||||
:param candidate: A pinned candidate supposedly matchine the \
|
||||
`requirement` specifier. It is guaranteed to \
|
||||
have been generated from the `requirement`.
|
||||
|
||||
:returns: Indication whether the `candidate` is a viable \
|
||||
solution to the `requirement`.
|
||||
"""
|
||||
# NOTE: Only allow pre-release candidates if we want pre-releases or
|
||||
# the req ver was an exact match with the pre-release version.
|
||||
allow_pre_release = self._with_pre_releases or not (
|
||||
requirement.ver == '*' or
|
||||
requirement.ver.startswith('<') or
|
||||
requirement.ver.startswith('>') or
|
||||
requirement.ver.startswith('!=')
|
||||
)
|
||||
if is_pre_release(candidate.ver) and not allow_pre_release:
|
||||
return False
|
||||
|
||||
# NOTE: This is a set of Pipenv-inspired optimizations. Ref:
|
||||
# https://github.com/sarugaku/passa/blob/2ac00f1/src/passa/models/providers.py#L58-L74
|
||||
if (
|
||||
requirement.is_virtual or
|
||||
candidate.is_virtual or
|
||||
requirement.ver == '*'
|
||||
):
|
||||
return True
|
||||
|
||||
return meets_requirements(
|
||||
version=candidate.ver,
|
||||
requirements=requirement.ver,
|
||||
)
|
||||
|
||||
def get_dependencies(self, candidate):
|
||||
# type: (Candidate) -> List[Candidate]
|
||||
r"""Get direct dependencies of a candidate.
|
||||
|
||||
:returns: A collection of requirements that `candidate` \
|
||||
specifies as its dependencies.
|
||||
"""
|
||||
# FIXME: If there's several galaxy servers set, there may be a
|
||||
# FIXME: situation when the metadata of the same collection
|
||||
# FIXME: differs. So how do we resolve this case? Priority?
|
||||
# FIXME: Taking into account a pinned hash? Exploding on
|
||||
# FIXME: any differences?
|
||||
# NOTE: The underlying implmentation currently uses first found
|
||||
req_map = self._api_proxy.get_collection_dependencies(candidate)
|
||||
|
||||
# NOTE: This guard expression MUST perform an early exit only
|
||||
# NOTE: after the `get_collection_dependencies()` call because
|
||||
# NOTE: internally it polulates the artifact URL of the candidate,
|
||||
# NOTE: its SHA hash and the Galaxy API token. These are still
|
||||
# NOTE: necessary with `--no-deps` because even with the disabled
|
||||
# NOTE: dependency resolution the outer layer will still need to
|
||||
# NOTE: know how to download and validate the artifact.
|
||||
#
|
||||
# NOTE: Virtual candidates should always return dependencies
|
||||
# NOTE: because they are ephemeral and non-installable.
|
||||
if not self._with_deps and not candidate.is_virtual:
|
||||
return []
|
||||
|
||||
return [
|
||||
self._make_req_from_dict({'name': dep_name, 'version': dep_req})
|
||||
for dep_name, dep_req in req_map.items()
|
||||
]
|
17
lib/ansible/galaxy/dependency_resolution/reporters.py
Normal file
17
lib/ansible/galaxy/dependency_resolution/reporters.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright: (c) 2020-2021, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
"""Requiement reporter implementations."""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from resolvelib import BaseReporter
|
||||
|
||||
|
||||
class CollectionDependencyReporter(BaseReporter):
|
||||
"""A dependency reporter for Ansible Collections.
|
||||
|
||||
This is a proxy class allowing us to abstract away importing resolvelib
|
||||
outside of the `ansible.galaxy.dependency_resolution` Python package.
|
||||
"""
|
17
lib/ansible/galaxy/dependency_resolution/resolvers.py
Normal file
17
lib/ansible/galaxy/dependency_resolution/resolvers.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright: (c) 2020-2021, Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
"""Requirement resolver implementations."""
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from resolvelib import Resolver
|
||||
|
||||
|
||||
class CollectionDependencyResolver(Resolver):
|
||||
"""A dependency resolver for Ansible Collections.
|
||||
|
||||
This is a proxy class allowing us to abstract away importing resolvelib
|
||||
outside of the `ansible.galaxy.dependency_resolution` Python package.
|
||||
"""
|
|
@ -15,7 +15,10 @@ from ansible.utils.version import SemanticVersion
|
|||
def is_pre_release(version):
|
||||
# type: (str) -> bool
|
||||
"""Figure out if a given version is a pre-release."""
|
||||
return SemanticVersion(version).is_prerelease
|
||||
try:
|
||||
return SemanticVersion(version).is_prerelease
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
|
||||
def meets_requirements(version, requirements):
|
||||
|
|
|
@ -7,3 +7,7 @@ jinja2
|
|||
PyYAML
|
||||
cryptography
|
||||
packaging
|
||||
# NOTE: resolvelib 0.x version bumps should be considered major/breaking
|
||||
# NOTE: and we should update the upper cap with care, at least until 1.0
|
||||
# NOTE: Ref: https://github.com/sarugaku/resolvelib/issues/69
|
||||
resolvelib >= 0.5.3, < 0.6.0 # dependency resolver used by ansible-galaxy
|
||||
|
|
|
@ -24,8 +24,8 @@
|
|||
|
||||
- assert:
|
||||
that:
|
||||
- '"Downloading collection ''amazon.aws'' to" in download_collection.stdout'
|
||||
- '"Downloading collection ''awx.awx'' to" in download_collection.stdout'
|
||||
- '"Downloading collection ''amazon.aws:1.0.0'' to" in download_collection.stdout'
|
||||
- '"Downloading collection ''awx.awx:0.0.1-devel'' to" in download_collection.stdout'
|
||||
- download_collection_amazon_actual.stat.exists
|
||||
- download_collection_awx_actual.stat.exists
|
||||
|
||||
|
|
|
@ -2,22 +2,23 @@
|
|||
command: 'ansible-galaxy collection install git+file://{{ galaxy_dir }}/development/ansible_test/.git#/collection_1/'
|
||||
register: installed
|
||||
|
||||
- assert:
|
||||
- name: SCM collections don't have a concrete artifact version so the collection should always be reinstalled
|
||||
assert:
|
||||
that:
|
||||
- "'Skipping' in installed.stdout"
|
||||
- "'Created' not in installed.stdout"
|
||||
- "'Created collection for ansible_test.collection_1' in installed.stdout"
|
||||
- "'Created collection for ansible_test.collection_2' in installed.stdout"
|
||||
|
||||
- name: Only reinstall the collection
|
||||
- name: The collection should also be reinstalled when --force flag is used
|
||||
command: 'ansible-galaxy collection install git+file://{{ galaxy_dir }}/development/ansible_test/.git#/collection_1/ --force'
|
||||
register: installed
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "'Created collection for ansible_test.collection_1' in installed.stdout"
|
||||
- "'Created collection for ansible_test.collection_2' not in installed.stdout"
|
||||
- "'Skipping' in installed.stdout"
|
||||
# The dependency is also an SCM collection, so it should also be reinstalled
|
||||
- "'Created collection for ansible_test.collection_2' in installed.stdout"
|
||||
|
||||
- name: Reinstall the collection and dependency
|
||||
- name: The collection should also be reinstalled when --force-with-deps is used
|
||||
command: 'ansible-galaxy collection install git+file://{{ galaxy_dir }}/development/ansible_test/.git#/collection_1/ --force-with-deps'
|
||||
register: installed
|
||||
|
||||
|
@ -25,7 +26,6 @@
|
|||
that:
|
||||
- "'Created collection for ansible_test.collection_1' in installed.stdout"
|
||||
- "'Created collection for ansible_test.collection_2' in installed.stdout"
|
||||
- "'Skipping' not in installed.stdout"
|
||||
|
||||
- include_tasks: ./empty_installed_collections.yml
|
||||
when: cleanup
|
||||
|
|
|
@ -25,7 +25,13 @@
|
|||
- assert:
|
||||
that:
|
||||
- result.failed
|
||||
- '"ERROR! Collections requirement entry should contain the key name." in result.stderr'
|
||||
- >-
|
||||
"ERROR! Neither the collection requirement entry key 'name',
|
||||
nor 'source' point to a concrete resolvable collection artifact.
|
||||
Also 'name' is not an FQCN. A valid collection name must be in
|
||||
the format <namespace>.<collection>. Please make sure that the
|
||||
namespace and the collection name contain characters from
|
||||
[a-zA-Z0-9_] only." in result.stderr
|
||||
|
||||
- name: test source is not a git repo even if name is provided
|
||||
command: 'ansible-galaxy collection install -r source_and_name.yml'
|
||||
|
@ -37,7 +43,10 @@
|
|||
- assert:
|
||||
that:
|
||||
- result.failed
|
||||
- '"ERROR! Unknown error when attempting to call Galaxy" in result.stderr'
|
||||
- >-
|
||||
result.stderr is search("ERROR! Collections requirement 'source'
|
||||
entry should contain a valid Galaxy API URL but it does not:
|
||||
git\+file:///.*/amazon.aws/.git is not an HTTP URL.")
|
||||
|
||||
- name: test source is not a git repo even if name and type is provided
|
||||
command: 'ansible-galaxy collection install -r source_and_name_and_type.yml'
|
||||
|
@ -49,7 +58,12 @@
|
|||
- assert:
|
||||
that:
|
||||
- result.failed
|
||||
- 'result.stderr is search("ERROR! - command /.*/git clone ansible.nope ansible.nope failed")'
|
||||
- >-
|
||||
result.stderr is search("ERROR! Failed to clone a Git repository
|
||||
from `file:///.*/.git`.")
|
||||
- >-
|
||||
result.stderr is search("fatal: '/.*/amazon.aws/.git' does not
|
||||
appear to be a git repository")
|
||||
|
||||
- name: test using name as a git repo without git+ prefix
|
||||
command: 'ansible-galaxy collection install -r name_without_type.yml'
|
||||
|
|
|
@ -4,16 +4,35 @@
|
|||
|
||||
- assert:
|
||||
that:
|
||||
- command.stdout_lines | length == 9
|
||||
- command.stdout_lines[0] == "Starting galaxy collection install process"
|
||||
- command.stdout_lines[1] == "Process install dependency map"
|
||||
- command.stdout_lines[2] == "Starting collection install process"
|
||||
- "'namespace_1.collection_1' in command.stdout_lines[3]"
|
||||
- "'namespace_1.collection_1' in command.stdout_lines[4]"
|
||||
- "'namespace_1.collection_1' in command.stdout_lines[5]"
|
||||
- "'namespace_2.collection_2' in command.stdout_lines[6]"
|
||||
- "'namespace_2.collection_2' in command.stdout_lines[7]"
|
||||
- "'namespace_2.collection_2' in command.stdout_lines[8]"
|
||||
- command.stdout_lines | length == 12
|
||||
- >-
|
||||
'Starting galaxy collection install process'
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
'Starting collection install process'
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
"Installing 'namespace_1.collection_1:1.0.0' to
|
||||
'{{ galaxy_dir }}/ansible_collections/namespace_1/collection_1'"
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
'Created collection for namespace_1.collection_1:1.0.0 at
|
||||
{{ galaxy_dir }}/ansible_collections/namespace_1/collection_1'
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
'namespace_1.collection_1:1.0.0 was installed successfully'
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
"Installing 'namespace_2.collection_2:1.0.0' to
|
||||
'{{ galaxy_dir }}/ansible_collections/namespace_2/collection_2'"
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
'Created collection for namespace_2.collection_2:1.0.0 at
|
||||
{{ galaxy_dir }}/ansible_collections/namespace_2/collection_2'
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
'namespace_2.collection_2:1.0.0 was installed successfully'
|
||||
in command.stdout_lines
|
||||
|
||||
- name: list installed collections
|
||||
command: 'ansible-galaxy collection list'
|
||||
|
@ -30,16 +49,35 @@
|
|||
|
||||
- assert:
|
||||
that:
|
||||
- command.stdout_lines | length == 9
|
||||
- command.stdout_lines[0] == "Starting galaxy collection install process"
|
||||
- command.stdout_lines[1] == "Process install dependency map"
|
||||
- command.stdout_lines[2] == "Starting collection install process"
|
||||
- "'namespace_1.collection_1' in command.stdout_lines[3]"
|
||||
- "'namespace_1.collection_1' in command.stdout_lines[4]"
|
||||
- "'namespace_1.collection_1' in command.stdout_lines[5]"
|
||||
- "'namespace_2.collection_2' in command.stdout_lines[6]"
|
||||
- "'namespace_2.collection_2' in command.stdout_lines[7]"
|
||||
- "'namespace_2.collection_2' in command.stdout_lines[8]"
|
||||
- command.stdout_lines | length == 12
|
||||
- >-
|
||||
'Starting galaxy collection install process'
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
'Starting collection install process'
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
"Installing 'namespace_1.collection_1:1.0.0' to
|
||||
'{{ galaxy_dir }}/ansible_collections/namespace_1/collection_1'"
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
'Created collection for namespace_1.collection_1:1.0.0 at
|
||||
{{ galaxy_dir }}/ansible_collections/namespace_1/collection_1'
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
'namespace_1.collection_1:1.0.0 was installed successfully'
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
"Installing 'namespace_2.collection_2:1.0.0' to
|
||||
'{{ galaxy_dir }}/ansible_collections/namespace_2/collection_2'"
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
'Created collection for namespace_2.collection_2:1.0.0 at
|
||||
{{ galaxy_dir }}/ansible_collections/namespace_2/collection_2'
|
||||
in command.stdout_lines
|
||||
- >-
|
||||
'namespace_2.collection_2:1.0.0 was installed successfully'
|
||||
in command.stdout_lines
|
||||
|
||||
- name: list installed collections
|
||||
command: 'ansible-galaxy collection list'
|
||||
|
|
|
@ -16,12 +16,18 @@
|
|||
file_type: file
|
||||
register: download_collection_actual
|
||||
|
||||
- name: assert download collection with multiple dependencies
|
||||
- name: assert download collection with multiple dependencies --no-deps
|
||||
assert:
|
||||
that:
|
||||
- '"Downloading collection ''parent_dep.parent_collection'' to" in download_collection.stdout'
|
||||
- 'not "Downloading collection ''child_dep.child_collection'' to" in download_collection.stdout'
|
||||
- 'not "Downloading collection ''child_dep.child_dep2'' to" in download_collection.stdout'
|
||||
- >-
|
||||
"Downloading collection 'parent_dep.parent_collection:1.0.0' to '/tmp/"
|
||||
in download_collection.stdout
|
||||
- >-
|
||||
"Downloading collection 'child_dep.child_collection"
|
||||
not in download_collection.stdout
|
||||
- >-
|
||||
"Downloading collection 'child_dep.child_dep2"
|
||||
not in download_collection.stdout
|
||||
- download_collection_actual.examined == 2
|
||||
- download_collection_actual.matched == 2
|
||||
- (download_collection_actual.files[0].path | basename) in ['requirements.yml', 'parent_dep-parent_collection-1.0.0.tar.gz']
|
||||
|
@ -42,9 +48,9 @@
|
|||
- name: assert download collection with multiple dependencies
|
||||
assert:
|
||||
that:
|
||||
- '"Downloading collection ''parent_dep.parent_collection'' to" in download_collection.stdout'
|
||||
- '"Downloading collection ''child_dep.child_collection'' to" in download_collection.stdout'
|
||||
- '"Downloading collection ''child_dep.child_dep2'' to" in download_collection.stdout'
|
||||
- '"Downloading collection ''parent_dep.parent_collection:1.0.0'' to" in download_collection.stdout'
|
||||
- '"Downloading collection ''child_dep.child_collection:0.9.9'' to" in download_collection.stdout'
|
||||
- '"Downloading collection ''child_dep.child_dep2:1.2.2'' to" in download_collection.stdout'
|
||||
- download_collection_actual.examined == 4
|
||||
- download_collection_actual.matched == 4
|
||||
- (download_collection_actual.files[0].path | basename) in ['requirements.yml', 'child_dep-child_dep2-1.2.2.tar.gz', 'child_dep-child_collection-0.9.9.tar.gz', 'parent_dep-parent_collection-1.0.0.tar.gz']
|
||||
|
@ -104,7 +110,7 @@
|
|||
- name: assert download collection with multiple dependencies
|
||||
assert:
|
||||
that:
|
||||
- '"Downloading collection ''namespace1.name1'' to" in download_req_custom_path.stdout'
|
||||
- '"Downloading collection ''namespace1.name1:1.1.0-beta.1'' to" in download_req_custom_path.stdout'
|
||||
- download_req_custom_path_actual.examined == 2
|
||||
- download_req_custom_path_actual.matched == 2
|
||||
- (download_req_custom_path_actual.files[0].path | basename) in ['requirements.yml', 'namespace1-name1-1.1.0-beta.1.tar.gz']
|
||||
|
@ -161,5 +167,5 @@
|
|||
|
||||
- assert:
|
||||
that:
|
||||
- '"Downloading collection ''ansible_test.my_collection'' to" in download_collection.stdout'
|
||||
- '"Downloading collection ''ansible_test.my_collection:1.0.0'' to" in download_collection.stdout'
|
||||
- download_collection_actual.stat.exists
|
||||
|
|
|
@ -40,7 +40,7 @@
|
|||
- name: assert install existing without --force - {{ test_name }}
|
||||
assert:
|
||||
that:
|
||||
- '"Skipping ''namespace1.name1'' as it is already installed" in install_existing_no_force.stdout'
|
||||
- '"Nothing to do. All requested collections are already installed" in install_existing_no_force.stdout'
|
||||
|
||||
- name: install existing with --force - {{ test_name }}
|
||||
command: ansible-galaxy collection install namespace1.name1 -s '{{ test_name }}' --force {{ galaxy_verbosity }}
|
||||
|
@ -129,7 +129,9 @@
|
|||
- name: expect failure with dep resolution failure
|
||||
command: ansible-galaxy collection install fail_namespace.fail_collection -s {{ test_name }} {{ galaxy_verbosity }}
|
||||
register: fail_dep_mismatch
|
||||
failed_when: '"Cannot meet dependency requirement ''fail_dep2.name:<0.0.5'' for collection fail_namespace.fail_collection" not in fail_dep_mismatch.stderr'
|
||||
failed_when:
|
||||
- '"Could not satisfy the following requirements" not in fail_dep_mismatch.stderr'
|
||||
- '" fail_dep2.name:<0.0.5 (dependency of fail_namespace.fail_collection:2.1.2)" not in fail_dep_mismatch.stderr'
|
||||
|
||||
- name: Find artifact url for namespace3.name
|
||||
uri:
|
||||
|
|
|
@ -0,0 +1,55 @@
|
|||
- name: initialize collection structure
|
||||
command: ansible-galaxy collection init {{ item }} --init-path "{{ galaxy_dir }}/dev/ansible_collections" {{ galaxy_verbosity }}
|
||||
loop:
|
||||
- 'dev.collection1'
|
||||
- 'dev.collection2'
|
||||
- 'dev.collection3'
|
||||
|
||||
- name: replace the default version of the collections
|
||||
lineinfile:
|
||||
path: "{{ galaxy_dir }}/dev/ansible_collections/dev/{{ item.name }}/galaxy.yml"
|
||||
line: "{{ item.version }}"
|
||||
regexp: "version: .*"
|
||||
loop:
|
||||
- name: "collection1"
|
||||
version: "version: null"
|
||||
- name: "collection2"
|
||||
version: "version: placeholder"
|
||||
- name: "collection3"
|
||||
version: "version: ''"
|
||||
|
||||
- name: list collections in development without semver versions
|
||||
command: ansible-galaxy collection list {{ galaxy_verbosity }}
|
||||
register: list_result
|
||||
environment:
|
||||
ANSIBLE_COLLECTIONS_PATH: "{{ galaxy_dir }}/dev:{{ galaxy_dir }}/prod"
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "'dev.collection1 *' in list_result.stdout"
|
||||
# Note the version displayed is the 'placeholder' string rather than "*" since it is not falsey
|
||||
- "'dev.collection2 placeholder' in list_result.stdout"
|
||||
- "'dev.collection3 *' in list_result.stdout"
|
||||
|
||||
- name: install an artifact to the second collections path
|
||||
command: ansible-galaxy collection install namespace1.name1 -s galaxy_ng {{ galaxy_verbosity }} -p "{{ galaxy_dir }}/prod"
|
||||
environment:
|
||||
ANSIBLE_CONFIG: '{{ galaxy_dir }}/ansible.cfg'
|
||||
|
||||
- name: replace the artifact version
|
||||
lineinfile:
|
||||
path: "{{ galaxy_dir }}/prod/ansible_collections/namespace1/name1/MANIFEST.json"
|
||||
line: ' "version": null,'
|
||||
regexp: ' "version": .*'
|
||||
|
||||
- name: test listing collections in all paths
|
||||
command: ansible-galaxy collection list {{ galaxy_verbosity }}
|
||||
register: list_result
|
||||
ignore_errors: True
|
||||
environment:
|
||||
ANSIBLE_COLLECTIONS_PATH: "{{ galaxy_dir }}/dev:{{ galaxy_dir }}/prod"
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- list_result is failed
|
||||
- "'is expected to have a valid SemVer version value but got None' in list_result.stderr"
|
|
@ -144,34 +144,24 @@
|
|||
- name: assert result of install collection with dep on another server
|
||||
assert:
|
||||
that:
|
||||
- '"''secondary.name'' obtained from server secondary" in install_cross_dep.stdout'
|
||||
- >-
|
||||
"'secondary.name:1.0.0' obtained from server secondary"
|
||||
in install_cross_dep.stdout
|
||||
# pulp_v2 is highest in the list so it will find it there first
|
||||
- '"''parent_dep.parent_collection'' obtained from server pulp_v2" in install_cross_dep.stdout'
|
||||
- '"''child_dep.child_collection'' obtained from server pulp_v2" in install_cross_dep.stdout'
|
||||
- '"''child_dep.child_dep2'' obtained from server pulp_v2" in install_cross_dep.stdout'
|
||||
- >-
|
||||
"'parent_dep.parent_collection:1.0.0' obtained from server pulp_v2"
|
||||
in install_cross_dep.stdout
|
||||
- >-
|
||||
"'child_dep.child_collection:0.9.9' obtained from server pulp_v2"
|
||||
in install_cross_dep.stdout
|
||||
- >-
|
||||
"'child_dep.child_dep2:1.2.2' obtained from server pulp_v2"
|
||||
in install_cross_dep.stdout
|
||||
- (install_cross_dep_actual.results[0].content | b64decode | from_json).collection_info.version == '1.0.0'
|
||||
- (install_cross_dep_actual.results[1].content | b64decode | from_json).collection_info.version == '1.0.0'
|
||||
- (install_cross_dep_actual.results[2].content | b64decode | from_json).collection_info.version == '0.9.9'
|
||||
- (install_cross_dep_actual.results[3].content | b64decode | from_json).collection_info.version == '1.2.2'
|
||||
|
||||
# fake.fake does not exist but we check the output to ensure it checked all 3
|
||||
# servers defined in the config. We hardcode to -vvv as that's what level the
|
||||
# message is shown
|
||||
- name: test install fallback on server list
|
||||
command: ansible-galaxy collection install fake.fake -vvv
|
||||
ignore_errors: yes
|
||||
environment:
|
||||
ANSIBLE_CONFIG: '{{ galaxy_dir }}/ansible.cfg'
|
||||
register: missing_fallback
|
||||
|
||||
- name: assert test install fallback on server list
|
||||
assert:
|
||||
that:
|
||||
- missing_fallback.rc == 1
|
||||
- '"Collection ''fake.fake'' is not available from server pulp_v2" in missing_fallback.stdout'
|
||||
- '"Collection ''fake.fake'' is not available from server pulp_v3" in missing_fallback.stdout'
|
||||
- '"Collection ''fake.fake'' is not available from server galaxy_ng" in missing_fallback.stdout'
|
||||
|
||||
- name: run ansible-galaxy collection download tests
|
||||
include_tasks: download.yml
|
||||
args:
|
||||
|
@ -189,3 +179,6 @@
|
|||
test_name: 'galaxy_ng'
|
||||
test_server: '{{ galaxy_ng_server }}'
|
||||
vX: "v3/"
|
||||
|
||||
- name: run ansible-galaxy collection list tests
|
||||
include_tasks: list.yml
|
||||
|
|
|
@ -21,7 +21,8 @@
|
|||
- assert:
|
||||
that:
|
||||
- verify.failed
|
||||
- "'The format namespace.name is expected' in verify.stderr"
|
||||
- >-
|
||||
"ERROR! 'file' type is not supported. The format namespace.name is expected." in verify.stderr
|
||||
|
||||
- name: install the collection from the server
|
||||
command: ansible-galaxy collection install ansible_test.verify:1.0.0
|
||||
|
@ -39,6 +40,11 @@
|
|||
- verify is success
|
||||
- "'Collection ansible_test.verify contains modified content' not in verify.stdout"
|
||||
|
||||
- name: verify the installed collection against the server, with unspecified version in CLI
|
||||
command: ansible-galaxy collection verify ansible_test.verify
|
||||
environment:
|
||||
ANSIBLE_COLLECTIONS_PATH: '{{ galaxy_dir }}'
|
||||
|
||||
- name: verify a collection that doesn't appear to be installed
|
||||
command: ansible-galaxy collection verify ansible_test.verify:1.0.0
|
||||
register: verify
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
resolvelib >= 0.5.3, < 0.6.0 # keep in sync with `requirements.txt`
|
||||
coverage >= 4.5.1, < 5.0.0 ; python_version < '3.7' # coverage 4.4 required for "disable_warnings" support but 4.5.1 needed for bug fixes, coverage 5.0+ incompatible
|
||||
coverage >= 4.5.2, < 5.0.0 ; python_version == '3.7' # coverage 4.5.2 fixes bugs in support for python 3.7, coverage 5.0+ incompatible
|
||||
coverage >= 4.5.4, < 5.0.0 ; python_version > '3.7' # coverage had a bug in < 4.5.4 that would cause unit tests to hang in Python 3.8, coverage 5.0+ incompatible
|
||||
|
|
|
@ -4,3 +4,4 @@ junit-xml
|
|||
ordereddict ; python_version < '2.7'
|
||||
packaging
|
||||
pyyaml
|
||||
resolvelib
|
||||
|
|
|
@ -5,3 +5,4 @@ pytest
|
|||
pytest-mock
|
||||
pytest-xdist
|
||||
pyyaml
|
||||
resolvelib
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
jinja2
|
||||
pyyaml
|
||||
resolvelib
|
||||
sphinx
|
||||
sphinx-notfound-page
|
||||
straight.plugin
|
||||
|
|
|
@ -2,6 +2,7 @@ docutils
|
|||
jinja2
|
||||
packaging
|
||||
pyyaml # ansible-core requirement
|
||||
resolvelib # ansible-core requirement
|
||||
rstcheck
|
||||
setuptools > 39.2
|
||||
straight.plugin
|
||||
|
|
|
@ -41,7 +41,11 @@ lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath
|
|||
lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
|
||||
lib/ansible/executor/powershell/exec_wrapper.ps1 pslint:PSCustomUseLiteralPath
|
||||
lib/ansible/executor/task_queue_manager.py pylint:blacklisted-name
|
||||
lib/ansible/cli/galaxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
|
||||
lib/ansible/galaxy/collection/__init__.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
|
||||
lib/ansible/galaxy/collection/galaxy_api_proxy.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
|
||||
lib/ansible/galaxy/dependency_resolution/dataclasses.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
|
||||
lib/ansible/galaxy/dependency_resolution/providers.py compile-2.6!skip # 'ansible-galaxy collection' requires 2.7+
|
||||
lib/ansible/module_utils/compat/_selectors2.py future-import-boilerplate # ignore bundled
|
||||
lib/ansible/module_utils/compat/_selectors2.py metaclass-boilerplate # ignore bundled
|
||||
lib/ansible/module_utils/compat/_selectors2.py pylint:blacklisted-name
|
||||
|
|
|
@ -8,14 +8,13 @@ __metaclass__ = type
|
|||
import pytest
|
||||
|
||||
from ansible.cli.galaxy import _display_collection
|
||||
from ansible.galaxy.dependency_resolution.dataclasses import Requirement
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def collection_object(mocker):
|
||||
def collection_object():
|
||||
def _cobj(fqcn='sandwiches.ham'):
|
||||
cobj = mocker.MagicMock(latest_version='1.5.0')
|
||||
cobj.__str__.return_value = fqcn
|
||||
return cobj
|
||||
return Requirement(fqcn, '1.5.0', None, 'galaxy')
|
||||
return _cobj
|
||||
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2020 Ansible Project
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
|
@ -9,7 +10,8 @@ import pytest
|
|||
from ansible import context
|
||||
from ansible.cli.galaxy import GalaxyCLI
|
||||
from ansible.errors import AnsibleError, AnsibleOptionsError
|
||||
from ansible.galaxy.collection import CollectionRequirement
|
||||
from ansible.galaxy import collection
|
||||
from ansible.galaxy.dependency_resolution.dataclasses import Requirement
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
|
||||
|
@ -48,47 +50,39 @@ def mock_collection_objects(mocker):
|
|||
mocker.patch('ansible.cli.galaxy.validate_collection_path',
|
||||
side_effect=['/root/.ansible/collections/ansible_collections', '/usr/share/ansible/collections/ansible_collections'])
|
||||
|
||||
collection_args = (
|
||||
collection_args_1 = (
|
||||
(
|
||||
'sandwiches',
|
||||
'pbj',
|
||||
b'/usr/share/ansible/collections/ansible_collections/sandwiches/pbj',
|
||||
mocker.Mock(),
|
||||
['1.0.0', '1.5.0'],
|
||||
'1.0.0',
|
||||
False,
|
||||
),
|
||||
(
|
||||
'sandwiches',
|
||||
'pbj',
|
||||
b'/root/.ansible/collections/ansible_collections/sandwiches/pbj',
|
||||
mocker.Mock(),
|
||||
['1.0.0', '1.5.0'],
|
||||
'sandwiches.pbj',
|
||||
'1.5.0',
|
||||
False,
|
||||
None,
|
||||
'dir',
|
||||
),
|
||||
(
|
||||
'sandwiches',
|
||||
'ham',
|
||||
b'/usr/share/ansible/collections/ansible_collections/sandwiches/ham',
|
||||
mocker.Mock(),
|
||||
['1.0.0'],
|
||||
'1.0.0',
|
||||
False,
|
||||
),
|
||||
(
|
||||
'sandwiches',
|
||||
'reuben',
|
||||
b'/root/.ansible/collections/ansible_collections/sandwiches/reuben',
|
||||
mocker.Mock(),
|
||||
['1.0.0', '2.5.0'],
|
||||
'sandwiches.reuben',
|
||||
'2.5.0',
|
||||
False,
|
||||
None,
|
||||
'dir',
|
||||
),
|
||||
)
|
||||
|
||||
collections_path_1 = [CollectionRequirement(*cargs) for cargs in collection_args if to_native(cargs[2]).startswith('/root')]
|
||||
collections_path_2 = [CollectionRequirement(*cargs) for cargs in collection_args if to_native(cargs[2]).startswith('/usr/share')]
|
||||
collection_args_2 = (
|
||||
(
|
||||
'sandwiches.pbj',
|
||||
'1.0.0',
|
||||
None,
|
||||
'dir',
|
||||
),
|
||||
(
|
||||
'sandwiches.ham',
|
||||
'1.0.0',
|
||||
None,
|
||||
'dir',
|
||||
),
|
||||
)
|
||||
|
||||
collections_path_1 = [Requirement(*cargs) for cargs in collection_args_1]
|
||||
collections_path_2 = [Requirement(*cargs) for cargs in collection_args_2]
|
||||
|
||||
mocker.patch('ansible.cli.galaxy.find_existing_collections', side_effect=[collections_path_1, collections_path_2])
|
||||
|
||||
|
||||
|
@ -98,44 +92,35 @@ def mock_from_path(mocker):
|
|||
collection_args = {
|
||||
'sandwiches.pbj': (
|
||||
(
|
||||
'sandwiches',
|
||||
'pbj',
|
||||
b'/root/.ansible/collections/ansible_collections/sandwiches/pbj',
|
||||
mocker.Mock(),
|
||||
['1.0.0', '1.5.0'],
|
||||
'sandwiches.pbj',
|
||||
'1.5.0',
|
||||
False,
|
||||
None,
|
||||
'dir',
|
||||
),
|
||||
(
|
||||
'sandwiches',
|
||||
'pbj',
|
||||
b'/usr/share/ansible/collections/ansible_collections/sandwiches/pbj',
|
||||
mocker.Mock(),
|
||||
['1.0.0', '1.5.0'],
|
||||
'sandwiches.pbj',
|
||||
'1.0.0',
|
||||
False,
|
||||
None,
|
||||
'dir',
|
||||
),
|
||||
),
|
||||
'sandwiches.ham': (
|
||||
(
|
||||
'sandwiches',
|
||||
'ham',
|
||||
b'/usr/share/ansible/collections/ansible_collections/sandwiches/ham',
|
||||
mocker.Mock(),
|
||||
['1.0.0'],
|
||||
'sandwiches.ham',
|
||||
'1.0.0',
|
||||
False,
|
||||
None,
|
||||
'dir',
|
||||
),
|
||||
),
|
||||
}
|
||||
|
||||
from_path_objects = [CollectionRequirement(*args) for args in collection_args[collection_name]]
|
||||
mocker.patch('ansible.galaxy.collection.CollectionRequirement.from_path', side_effect=from_path_objects)
|
||||
from_path_objects = [Requirement(*args) for args in collection_args[collection_name]]
|
||||
mocker.patch('ansible.cli.galaxy.Requirement.from_dir_path_as_unknown', side_effect=from_path_objects)
|
||||
|
||||
return _from_path
|
||||
|
||||
|
||||
def test_execute_list_collection_all(mocker, capsys, mock_collection_objects):
|
||||
def test_execute_list_collection_all(mocker, capsys, mock_collection_objects, tmp_path_factory):
|
||||
"""Test listing all collections from multiple paths"""
|
||||
|
||||
cliargs()
|
||||
|
@ -143,7 +128,9 @@ def test_execute_list_collection_all(mocker, capsys, mock_collection_objects):
|
|||
mocker.patch('os.path.exists', return_value=True)
|
||||
mocker.patch('os.path.isdir', return_value=True)
|
||||
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list'])
|
||||
gc.execute_list_collection()
|
||||
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
|
||||
gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
|
||||
|
||||
out, err = capsys.readouterr()
|
||||
out_lines = out.splitlines()
|
||||
|
@ -163,7 +150,7 @@ def test_execute_list_collection_all(mocker, capsys, mock_collection_objects):
|
|||
assert out_lines[11] == 'sandwiches.pbj 1.0.0 '
|
||||
|
||||
|
||||
def test_execute_list_collection_specific(mocker, capsys, mock_collection_objects, mock_from_path):
|
||||
def test_execute_list_collection_specific(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory):
|
||||
"""Test listing a specific collection"""
|
||||
|
||||
collection_name = 'sandwiches.ham'
|
||||
|
@ -176,7 +163,9 @@ def test_execute_list_collection_specific(mocker, capsys, mock_collection_object
|
|||
mocker.patch('ansible.cli.galaxy._get_collection_widths', return_value=(14, 5))
|
||||
|
||||
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name])
|
||||
gc.execute_list_collection()
|
||||
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
|
||||
gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
|
||||
|
||||
out, err = capsys.readouterr()
|
||||
out_lines = out.splitlines()
|
||||
|
@ -189,7 +178,7 @@ def test_execute_list_collection_specific(mocker, capsys, mock_collection_object
|
|||
assert out_lines[4] == 'sandwiches.ham 1.0.0 '
|
||||
|
||||
|
||||
def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collection_objects, mock_from_path):
|
||||
def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory):
|
||||
"""Test listing a specific collection that exists at multiple paths"""
|
||||
|
||||
collection_name = 'sandwiches.pbj'
|
||||
|
@ -201,7 +190,9 @@ def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collect
|
|||
mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name)
|
||||
|
||||
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name])
|
||||
gc.execute_list_collection()
|
||||
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
|
||||
gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
|
||||
|
||||
out, err = capsys.readouterr()
|
||||
out_lines = out.splitlines()
|
||||
|
@ -219,7 +210,7 @@ def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collect
|
|||
assert out_lines[9] == 'sandwiches.pbj 1.0.0 '
|
||||
|
||||
|
||||
def test_execute_list_collection_specific_invalid_fqcn(mocker):
|
||||
def test_execute_list_collection_specific_invalid_fqcn(mocker, tmp_path_factory):
|
||||
"""Test an invalid fully qualified collection name (FQCN)"""
|
||||
|
||||
collection_name = 'no.good.name'
|
||||
|
@ -229,11 +220,13 @@ def test_execute_list_collection_specific_invalid_fqcn(mocker):
|
|||
mocker.patch('os.path.isdir', return_value=True)
|
||||
|
||||
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name])
|
||||
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
|
||||
with pytest.raises(AnsibleError, match='Invalid collection name'):
|
||||
gc.execute_list_collection()
|
||||
gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
|
||||
|
||||
|
||||
def test_execute_list_collection_no_valid_paths(mocker, capsys):
|
||||
def test_execute_list_collection_no_valid_paths(mocker, capsys, tmp_path_factory):
|
||||
"""Test listing collections when no valid paths are given"""
|
||||
|
||||
cliargs()
|
||||
|
@ -244,8 +237,11 @@ def test_execute_list_collection_no_valid_paths(mocker, capsys):
|
|||
mocker.patch('ansible.cli.galaxy.display.columns', 79)
|
||||
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list'])
|
||||
|
||||
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
|
||||
|
||||
with pytest.raises(AnsibleOptionsError, match=r'None of the provided paths were usable.'):
|
||||
gc.execute_list_collection()
|
||||
gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
|
||||
|
||||
out, err = capsys.readouterr()
|
||||
|
||||
|
@ -253,7 +249,7 @@ def test_execute_list_collection_no_valid_paths(mocker, capsys):
|
|||
assert 'exists, but it\nis not a directory.' in err
|
||||
|
||||
|
||||
def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_collection_objects):
|
||||
def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_collection_objects, tmp_path_factory):
|
||||
"""Test listing all collections when one invalid path is given"""
|
||||
|
||||
cliargs()
|
||||
|
@ -263,7 +259,9 @@ def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_collectio
|
|||
mocker.patch('ansible.utils.color.ANSIBLE_COLOR', False)
|
||||
|
||||
gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', '-p', 'nope'])
|
||||
gc.execute_list_collection()
|
||||
tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections')
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
|
||||
gc.execute_list_collection(artifacts_manager=concrete_artifact_cm)
|
||||
|
||||
out, err = capsys.readouterr()
|
||||
out_lines = out.splitlines()
|
||||
|
|
|
@ -8,18 +8,16 @@ __metaclass__ = type
|
|||
import pytest
|
||||
|
||||
from ansible.cli.galaxy import _get_collection_widths
|
||||
from ansible.galaxy.dependency_resolution.dataclasses import Requirement
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def collection_objects(mocker):
|
||||
collection_ham = mocker.MagicMock(latest_version='1.5.0')
|
||||
collection_ham.__str__.return_value = 'sandwiches.ham'
|
||||
def collection_objects():
|
||||
collection_ham = Requirement('sandwiches.ham', '1.5.0', None, 'galaxy')
|
||||
|
||||
collection_pbj = mocker.MagicMock(latest_version='2.5')
|
||||
collection_pbj.__str__.return_value = 'sandwiches.pbj'
|
||||
collection_pbj = Requirement('sandwiches.pbj', '2.5', None, 'galaxy')
|
||||
|
||||
collection_reuben = mocker.MagicMock(latest_version='4')
|
||||
collection_reuben.__str__.return_value = 'sandwiches.reuben'
|
||||
collection_reuben = Requirement('sandwiches.reuben', '4', None, 'galaxy')
|
||||
|
||||
return [collection_ham, collection_pbj, collection_reuben]
|
||||
|
||||
|
@ -29,8 +27,7 @@ def test_get_collection_widths(collection_objects):
|
|||
|
||||
|
||||
def test_get_collection_widths_single_collection(mocker):
|
||||
mocked_collection = mocker.MagicMock(latest_version='3.0.0')
|
||||
mocked_collection.__str__.return_value = 'sandwiches.club'
|
||||
mocked_collection = Requirement('sandwiches.club', '3.0.0', None, 'galaxy')
|
||||
# Make this look like it is not iterable
|
||||
mocker.patch('ansible.cli.galaxy.is_iterable', return_value=False)
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
import ansible
|
||||
from io import BytesIO
|
||||
import json
|
||||
import os
|
||||
import pytest
|
||||
|
@ -33,6 +34,7 @@ import yaml
|
|||
import ansible.constants as C
|
||||
from ansible import context
|
||||
from ansible.cli.galaxy import GalaxyCLI
|
||||
from ansible.galaxy import collection
|
||||
from ansible.galaxy.api import GalaxyAPI
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
||||
|
@ -630,7 +632,12 @@ def test_invalid_collection_name_init(name):
|
|||
])
|
||||
def test_invalid_collection_name_install(name, expected, tmp_path_factory):
|
||||
install_path = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
|
||||
expected = "Invalid collection name '%s', name must be in the format <namespace>.<collection>" % expected
|
||||
|
||||
# FIXME: we should add the collection name in the error message
|
||||
# Used to be: expected = "Invalid collection name '%s', name must be in the format <namespace>.<collection>" % expected
|
||||
expected = "Neither the collection requirement entry key 'name', nor 'source' point to a concrete resolvable collection artifact. "
|
||||
expected += r"Also 'name' is not an FQCN\. A valid collection name must be in the format <namespace>\.<collection>\. "
|
||||
expected += r"Please make sure that the namespace and the collection name contain characters from \[a\-zA\-Z0\-9_\] only\."
|
||||
|
||||
gc = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', name, '-p', os.path.join(install_path, 'install')])
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
|
@ -758,17 +765,17 @@ def test_collection_install_with_names(collection_install):
|
|||
in mock_warning.call_args[0][0]
|
||||
|
||||
assert mock_install.call_count == 1
|
||||
assert mock_install.call_args[0][0] == [('namespace.collection', '*', None, None),
|
||||
('namespace2.collection', '1.0.1', None, None)]
|
||||
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
|
||||
assert requirements == [('namespace.collection', '*', None, 'galaxy'),
|
||||
('namespace2.collection', '1.0.1', None, 'galaxy')]
|
||||
assert mock_install.call_args[0][1] == collection_path
|
||||
assert len(mock_install.call_args[0][2]) == 1
|
||||
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
|
||||
assert mock_install.call_args[0][2][0].validate_certs is True
|
||||
assert mock_install.call_args[0][3] is True
|
||||
assert mock_install.call_args[0][4] is False
|
||||
assert mock_install.call_args[0][5] is False
|
||||
assert mock_install.call_args[0][6] is False
|
||||
assert mock_install.call_args[0][7] is False
|
||||
assert mock_install.call_args[0][3] is False # ignore_errors
|
||||
assert mock_install.call_args[0][4] is False # no_deps
|
||||
assert mock_install.call_args[0][5] is False # force
|
||||
assert mock_install.call_args[0][6] is False # force_deps
|
||||
|
||||
|
||||
def test_collection_install_with_requirements_file(collection_install):
|
||||
|
@ -795,17 +802,16 @@ collections:
|
|||
in mock_warning.call_args[0][0]
|
||||
|
||||
assert mock_install.call_count == 1
|
||||
assert mock_install.call_args[0][0] == [('namespace.coll', '*', None, None),
|
||||
('namespace2.coll', '>2.0.1', None, None)]
|
||||
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
|
||||
assert requirements == [('namespace.coll', '*', None, 'galaxy'),
|
||||
('namespace2.coll', '>2.0.1', None, 'galaxy')]
|
||||
assert mock_install.call_args[0][1] == collection_path
|
||||
assert len(mock_install.call_args[0][2]) == 1
|
||||
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
|
||||
assert mock_install.call_args[0][2][0].validate_certs is True
|
||||
assert mock_install.call_args[0][3] is True
|
||||
assert mock_install.call_args[0][4] is False
|
||||
assert mock_install.call_args[0][5] is False
|
||||
assert mock_install.call_args[0][6] is False
|
||||
assert mock_install.call_args[0][7] is False
|
||||
assert mock_install.call_args[0][3] is False # ignore_errors
|
||||
assert mock_install.call_args[0][4] is False # no_deps
|
||||
assert mock_install.call_args[0][5] is False # force
|
||||
assert mock_install.call_args[0][6] is False # force_deps
|
||||
|
||||
|
||||
def test_collection_install_with_relative_path(collection_install, monkeypatch):
|
||||
|
@ -829,11 +835,10 @@ def test_collection_install_with_relative_path(collection_install, monkeypatch):
|
|||
assert len(mock_install.call_args[0][2]) == 1
|
||||
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
|
||||
assert mock_install.call_args[0][2][0].validate_certs is True
|
||||
assert mock_install.call_args[0][3] is True
|
||||
assert mock_install.call_args[0][4] is False
|
||||
assert mock_install.call_args[0][5] is False
|
||||
assert mock_install.call_args[0][6] is False
|
||||
assert mock_install.call_args[0][7] is False
|
||||
assert mock_install.call_args[0][3] is False # ignore_errors
|
||||
assert mock_install.call_args[0][4] is False # no_deps
|
||||
assert mock_install.call_args[0][5] is False # force
|
||||
assert mock_install.call_args[0][6] is False # force_deps
|
||||
|
||||
assert mock_req.call_count == 1
|
||||
assert mock_req.call_args[0][0] == os.path.abspath(requirements_file)
|
||||
|
@ -860,11 +865,10 @@ def test_collection_install_with_unexpanded_path(collection_install, monkeypatch
|
|||
assert len(mock_install.call_args[0][2]) == 1
|
||||
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
|
||||
assert mock_install.call_args[0][2][0].validate_certs is True
|
||||
assert mock_install.call_args[0][3] is True
|
||||
assert mock_install.call_args[0][4] is False
|
||||
assert mock_install.call_args[0][5] is False
|
||||
assert mock_install.call_args[0][6] is False
|
||||
assert mock_install.call_args[0][7] is False
|
||||
assert mock_install.call_args[0][3] is False # ignore_errors
|
||||
assert mock_install.call_args[0][4] is False # no_deps
|
||||
assert mock_install.call_args[0][5] is False # force
|
||||
assert mock_install.call_args[0][6] is False # force_deps
|
||||
|
||||
assert mock_req.call_count == 1
|
||||
assert mock_req.call_args[0][0] == os.path.expanduser(os.path.expandvars(requirements_file))
|
||||
|
@ -882,22 +886,28 @@ def test_collection_install_in_collection_dir(collection_install, monkeypatch):
|
|||
assert mock_warning.call_count == 0
|
||||
|
||||
assert mock_install.call_count == 1
|
||||
assert mock_install.call_args[0][0] == [('namespace.collection', '*', None, None),
|
||||
('namespace2.collection', '1.0.1', None, None)]
|
||||
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
|
||||
assert requirements == [('namespace.collection', '*', None, 'galaxy'),
|
||||
('namespace2.collection', '1.0.1', None, 'galaxy')]
|
||||
assert mock_install.call_args[0][1] == os.path.join(collections_path, 'ansible_collections')
|
||||
assert len(mock_install.call_args[0][2]) == 1
|
||||
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
|
||||
assert mock_install.call_args[0][2][0].validate_certs is True
|
||||
assert mock_install.call_args[0][3] is True
|
||||
assert mock_install.call_args[0][4] is False
|
||||
assert mock_install.call_args[0][5] is False
|
||||
assert mock_install.call_args[0][6] is False
|
||||
assert mock_install.call_args[0][7] is False
|
||||
assert mock_install.call_args[0][3] is False # ignore_errors
|
||||
assert mock_install.call_args[0][4] is False # no_deps
|
||||
assert mock_install.call_args[0][5] is False # force
|
||||
assert mock_install.call_args[0][6] is False # force_deps
|
||||
|
||||
|
||||
def test_collection_install_with_url(collection_install):
|
||||
def test_collection_install_with_url(monkeypatch, collection_install):
|
||||
mock_install, dummy, output_dir = collection_install
|
||||
|
||||
mock_open = MagicMock(return_value=BytesIO())
|
||||
monkeypatch.setattr(collection.concrete_artifact_manager, 'open_url', mock_open)
|
||||
|
||||
mock_metadata = MagicMock(return_value={'namespace': 'foo', 'name': 'bar', 'version': 'v1.0.0'})
|
||||
monkeypatch.setattr(collection.concrete_artifact_manager, '_get_meta_from_tar', mock_metadata)
|
||||
|
||||
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'https://foo/bar/foo-bar-v1.0.0.tar.gz',
|
||||
'--collections-path', output_dir]
|
||||
GalaxyCLI(args=galaxy_args).run()
|
||||
|
@ -906,16 +916,16 @@ def test_collection_install_with_url(collection_install):
|
|||
assert os.path.isdir(collection_path)
|
||||
|
||||
assert mock_install.call_count == 1
|
||||
assert mock_install.call_args[0][0] == [('https://foo/bar/foo-bar-v1.0.0.tar.gz', '*', None, None)]
|
||||
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
|
||||
assert requirements == [('foo.bar', 'v1.0.0', 'https://foo/bar/foo-bar-v1.0.0.tar.gz', 'url')]
|
||||
assert mock_install.call_args[0][1] == collection_path
|
||||
assert len(mock_install.call_args[0][2]) == 1
|
||||
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
|
||||
assert mock_install.call_args[0][2][0].validate_certs is True
|
||||
assert mock_install.call_args[0][3] is True
|
||||
assert mock_install.call_args[0][4] is False
|
||||
assert mock_install.call_args[0][5] is False
|
||||
assert mock_install.call_args[0][6] is False
|
||||
assert mock_install.call_args[0][7] is False
|
||||
assert mock_install.call_args[0][3] is False # ignore_errors
|
||||
assert mock_install.call_args[0][4] is False # no_deps
|
||||
assert mock_install.call_args[0][5] is False # force
|
||||
assert mock_install.call_args[0][6] is False # force_deps
|
||||
|
||||
|
||||
def test_collection_install_name_and_requirements_fail(collection_install):
|
||||
|
@ -951,17 +961,17 @@ def test_collection_install_path_with_ansible_collections(collection_install):
|
|||
% collection_path in mock_warning.call_args[0][0]
|
||||
|
||||
assert mock_install.call_count == 1
|
||||
assert mock_install.call_args[0][0] == [('namespace.collection', '*', None, None),
|
||||
('namespace2.collection', '1.0.1', None, None)]
|
||||
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_install.call_args[0][0]]
|
||||
assert requirements == [('namespace.collection', '*', None, 'galaxy'),
|
||||
('namespace2.collection', '1.0.1', None, 'galaxy')]
|
||||
assert mock_install.call_args[0][1] == collection_path
|
||||
assert len(mock_install.call_args[0][2]) == 1
|
||||
assert mock_install.call_args[0][2][0].api_server == 'https://galaxy.ansible.com'
|
||||
assert mock_install.call_args[0][2][0].validate_certs is True
|
||||
assert mock_install.call_args[0][3] is True
|
||||
assert mock_install.call_args[0][4] is False
|
||||
assert mock_install.call_args[0][5] is False
|
||||
assert mock_install.call_args[0][6] is False
|
||||
assert mock_install.call_args[0][7] is False
|
||||
assert mock_install.call_args[0][3] is False # ignore_errors
|
||||
assert mock_install.call_args[0][4] is False # no_deps
|
||||
assert mock_install.call_args[0][5] is False # force
|
||||
assert mock_install.call_args[0][6] is False # force_deps
|
||||
|
||||
|
||||
def test_collection_install_ignore_certs(collection_install):
|
||||
|
@ -981,7 +991,8 @@ def test_collection_install_force(collection_install):
|
|||
'--force']
|
||||
GalaxyCLI(args=galaxy_args).run()
|
||||
|
||||
assert mock_install.call_args[0][6] is True
|
||||
# mock_install args: collections, output_path, apis, ignore_errors, no_deps, force, force_deps
|
||||
assert mock_install.call_args[0][5] is True
|
||||
|
||||
|
||||
def test_collection_install_force_deps(collection_install):
|
||||
|
@ -991,7 +1002,8 @@ def test_collection_install_force_deps(collection_install):
|
|||
'--force-with-deps']
|
||||
GalaxyCLI(args=galaxy_args).run()
|
||||
|
||||
assert mock_install.call_args[0][7] is True
|
||||
# mock_install args: collections, output_path, apis, ignore_errors, no_deps, force, force_deps
|
||||
assert mock_install.call_args[0][6] is True
|
||||
|
||||
|
||||
def test_collection_install_no_deps(collection_install):
|
||||
|
@ -1001,7 +1013,8 @@ def test_collection_install_no_deps(collection_install):
|
|||
'--no-deps']
|
||||
GalaxyCLI(args=galaxy_args).run()
|
||||
|
||||
assert mock_install.call_args[0][5] is True
|
||||
# mock_install args: collections, output_path, apis, ignore_errors, no_deps, force, force_deps
|
||||
assert mock_install.call_args[0][4] is True
|
||||
|
||||
|
||||
def test_collection_install_ignore(collection_install):
|
||||
|
@ -1011,7 +1024,8 @@ def test_collection_install_ignore(collection_install):
|
|||
'--ignore-errors']
|
||||
GalaxyCLI(args=galaxy_args).run()
|
||||
|
||||
assert mock_install.call_args[0][4] is True
|
||||
# mock_install args: collections, output_path, apis, ignore_errors, no_deps, force, force_deps
|
||||
assert mock_install.call_args[0][3] is True
|
||||
|
||||
|
||||
def test_collection_install_custom_server(collection_install):
|
||||
|
@ -1080,7 +1094,13 @@ collections:
|
|||
- version: 1.0.0
|
||||
'''], indirect=True)
|
||||
def test_parse_requirements_without_mandatory_name_key(requirements_cli, requirements_file):
|
||||
expected = "Collections requirement entry should contain the key name."
|
||||
# Used to be "Collections requirement entry should contain the key name."
|
||||
# Should we check that either source or name is provided before using the dep resolver?
|
||||
|
||||
expected = "Neither the collection requirement entry key 'name', nor 'source' point to a concrete resolvable collection artifact. "
|
||||
expected += r"Also 'name' is not an FQCN\. A valid collection name must be in the format <namespace>\.<collection>\. "
|
||||
expected += r"Please make sure that the namespace and the collection name contain characters from \[a\-zA\-Z0\-9_\] only\."
|
||||
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
requirements_cli._parse_requirements_file(requirements_file)
|
||||
|
||||
|
@ -1097,9 +1117,10 @@ collections:
|
|||
def test_parse_requirements(requirements_cli, requirements_file):
|
||||
expected = {
|
||||
'roles': [],
|
||||
'collections': [('namespace.collection1', '*', None, None), ('namespace.collection2', '*', None, None)]
|
||||
'collections': [('namespace.collection1', '*', None, 'galaxy'), ('namespace.collection2', '*', None, 'galaxy')]
|
||||
}
|
||||
actual = requirements_cli._parse_requirements_file(requirements_file)
|
||||
actual['collections'] = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in actual.get('collections', [])]
|
||||
|
||||
assert actual == expected
|
||||
|
||||
|
@ -1112,19 +1133,15 @@ collections:
|
|||
- namespace.collection2'''], indirect=True)
|
||||
def test_parse_requirements_with_extra_info(requirements_cli, requirements_file):
|
||||
actual = requirements_cli._parse_requirements_file(requirements_file)
|
||||
actual['collections'] = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in actual.get('collections', [])]
|
||||
|
||||
assert len(actual['roles']) == 0
|
||||
assert len(actual['collections']) == 2
|
||||
assert actual['collections'][0][0] == 'namespace.collection1'
|
||||
assert actual['collections'][0][1] == '>=1.0.0,<=2.0.0'
|
||||
assert actual['collections'][0][2].api_server == 'https://galaxy-dev.ansible.com'
|
||||
assert actual['collections'][0][2].name == 'explicit_requirement_namespace.collection1'
|
||||
assert actual['collections'][0][2].token is None
|
||||
assert actual['collections'][0][2].username is None
|
||||
assert actual['collections'][0][2].password is None
|
||||
assert actual['collections'][0][2].validate_certs is True
|
||||
|
||||
assert actual['collections'][1] == ('namespace.collection2', '*', None, None)
|
||||
assert actual['collections'][1] == ('namespace.collection2', '*', None, 'galaxy')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('requirements_file', ['''
|
||||
|
@ -1139,6 +1156,7 @@ collections:
|
|||
'''], indirect=True)
|
||||
def test_parse_requirements_with_roles_and_collections(requirements_cli, requirements_file):
|
||||
actual = requirements_cli._parse_requirements_file(requirements_file)
|
||||
actual['collections'] = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in actual.get('collections', [])]
|
||||
|
||||
assert len(actual['roles']) == 3
|
||||
assert actual['roles'][0].name == 'username.role_name'
|
||||
|
@ -1147,7 +1165,7 @@ def test_parse_requirements_with_roles_and_collections(requirements_cli, require
|
|||
assert actual['roles'][2].src == 'ssh://github.com/user/repo'
|
||||
|
||||
assert len(actual['collections']) == 1
|
||||
assert actual['collections'][0] == ('namespace.collection2', '*', None, None)
|
||||
assert actual['collections'][0] == ('namespace.collection2', '*', None, 'galaxy')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('requirements_file', ['''
|
||||
|
@ -1163,18 +1181,19 @@ def test_parse_requirements_with_collection_source(requirements_cli, requirement
|
|||
requirements_cli.api_servers.append(galaxy_api)
|
||||
|
||||
actual = requirements_cli._parse_requirements_file(requirements_file)
|
||||
actual['collections'] = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in actual.get('collections', [])]
|
||||
|
||||
assert actual['roles'] == []
|
||||
assert len(actual['collections']) == 3
|
||||
assert actual['collections'][0] == ('namespace.collection', '*', None, None)
|
||||
assert actual['collections'][0] == ('namespace.collection', '*', None, 'galaxy')
|
||||
|
||||
assert actual['collections'][1][0] == 'namespace2.collection2'
|
||||
assert actual['collections'][1][1] == '*'
|
||||
assert actual['collections'][1][2].api_server == 'https://galaxy-dev.ansible.com/'
|
||||
assert actual['collections'][1][2].name == 'explicit_requirement_namespace2.collection2'
|
||||
assert actual['collections'][1][2].token is None
|
||||
|
||||
assert actual['collections'][2] == ('namespace3.collection3', '*', galaxy_api, None)
|
||||
assert actual['collections'][2][0] == 'namespace3.collection3'
|
||||
assert actual['collections'][2][1] == '*'
|
||||
assert actual['collections'][2][2].api_server == 'https://config-server'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('requirements_file', ['''
|
||||
|
@ -1230,7 +1249,8 @@ def test_install_implicit_role_with_collections(requirements_file, monkeypatch):
|
|||
cli.run()
|
||||
|
||||
assert mock_collection_install.call_count == 1
|
||||
assert mock_collection_install.call_args[0][0] == [('namespace.name', '*', None, None)]
|
||||
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_collection_install.call_args[0][0]]
|
||||
assert requirements == [('namespace.name', '*', None, 'galaxy')]
|
||||
assert mock_collection_install.call_args[0][1] == cli._get_default_collection_path()
|
||||
|
||||
assert mock_role_install.call_count == 1
|
||||
|
@ -1328,8 +1348,8 @@ def test_install_collection_with_roles(requirements_file, monkeypatch):
|
|||
cli.run()
|
||||
|
||||
assert mock_collection_install.call_count == 1
|
||||
assert mock_collection_install.call_args[0][0] == [('namespace.name', '*', None, None)]
|
||||
assert mock_collection_install.call_args[0][1] == cli._get_default_collection_path()
|
||||
requirements = [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type,) for r in mock_collection_install.call_args[0][0]]
|
||||
assert requirements == [('namespace.name', '*', None, 'galaxy')]
|
||||
|
||||
assert mock_role_install.call_count == 0
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ def collection_input(tmp_path_factory):
|
|||
def collection_artifact(monkeypatch, tmp_path_factory):
|
||||
''' Creates a temp collection artifact and mocked open_url instance for publishing tests '''
|
||||
mock_open = MagicMock()
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
monkeypatch.setattr(collection.concrete_artifact_manager, 'open_url', mock_open)
|
||||
|
||||
mock_uuid = MagicMock()
|
||||
mock_uuid.return_value.hex = 'uuid'
|
||||
|
@ -76,13 +76,13 @@ def collection_artifact(monkeypatch, tmp_path_factory):
|
|||
|
||||
|
||||
@pytest.fixture()
|
||||
def galaxy_yml(request, tmp_path_factory):
|
||||
def galaxy_yml_dir(request, tmp_path_factory):
|
||||
b_test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
|
||||
b_galaxy_yml = os.path.join(b_test_dir, b'galaxy.yml')
|
||||
with open(b_galaxy_yml, 'wb') as galaxy_obj:
|
||||
galaxy_obj.write(to_bytes(request.param))
|
||||
|
||||
yield b_galaxy_yml
|
||||
yield b_test_dir
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
|
@ -198,31 +198,12 @@ def manifest(manifest_info):
|
|||
yield fake_file, sha256(b_data).hexdigest()
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def mock_collection(galaxy_server):
|
||||
def create_mock_collection(namespace='ansible_namespace', name='collection', version='0.1.0', local=True, local_installed=True):
|
||||
b_path = None
|
||||
force = False
|
||||
|
||||
if local:
|
||||
mock_collection = collection.CollectionRequirement(namespace, name, b_path, galaxy_server, [version], version, force, skip=local_installed)
|
||||
else:
|
||||
download_url = 'https://galaxy.ansible.com/download/{0}-{1}-{2}.tar.gz'.format(namespace, name, version)
|
||||
digest = '19415a6a6df831df61cffde4a09d1d89ac8d8ca5c0586e85bea0b106d6dff29a'
|
||||
dependencies = {}
|
||||
metadata = api.CollectionVersionMetadata(namespace, name, version, download_url, digest, dependencies)
|
||||
mock_collection = collection.CollectionRequirement(namespace, name, b_path, galaxy_server, [version], version, force, metadata=metadata)
|
||||
|
||||
return mock_collection
|
||||
return create_mock_collection
|
||||
|
||||
|
||||
def test_build_collection_no_galaxy_yaml():
|
||||
fake_path = u'/fake/ÅÑŚÌβŁÈ/path'
|
||||
expected = to_native("The collection galaxy.yml path '%s/galaxy.yml' does not exist." % fake_path)
|
||||
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.build_collection(fake_path, 'output', False)
|
||||
collection.build_collection(fake_path, u'output', False)
|
||||
|
||||
|
||||
def test_build_existing_output_file(collection_input):
|
||||
|
@ -234,7 +215,7 @@ def test_build_existing_output_file(collection_input):
|
|||
expected = "The output collection artifact '%s' already exists, but is a directory - aborting" \
|
||||
% to_native(existing_output_dir)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.build_collection(input_dir, output_dir, False)
|
||||
collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), False)
|
||||
|
||||
|
||||
def test_build_existing_output_without_force(collection_input):
|
||||
|
@ -248,7 +229,7 @@ def test_build_existing_output_without_force(collection_input):
|
|||
expected = "The file '%s' already exists. You can use --force to re-create the collection artifact." \
|
||||
% to_native(existing_output)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.build_collection(input_dir, output_dir, False)
|
||||
collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), False)
|
||||
|
||||
|
||||
def test_build_existing_output_with_force(collection_input):
|
||||
|
@ -259,55 +240,57 @@ def test_build_existing_output_with_force(collection_input):
|
|||
out_file.write("random garbage")
|
||||
out_file.flush()
|
||||
|
||||
collection.build_collection(input_dir, output_dir, True)
|
||||
collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), True)
|
||||
|
||||
# Verify the file was replaced with an actual tar file
|
||||
assert tarfile.is_tarfile(existing_output)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('galaxy_yml', [b'namespace: value: broken'], indirect=True)
|
||||
def test_invalid_yaml_galaxy_file(galaxy_yml):
|
||||
expected = to_native(b"Failed to parse the galaxy.yml at '%s' with the following error:" % galaxy_yml)
|
||||
@pytest.mark.parametrize('galaxy_yml_dir', [b'namespace: value: broken'], indirect=True)
|
||||
def test_invalid_yaml_galaxy_file(galaxy_yml_dir):
|
||||
galaxy_file = os.path.join(galaxy_yml_dir, b'galaxy.yml')
|
||||
expected = to_native(b"Failed to parse the galaxy.yml at '%s' with the following error:" % galaxy_file)
|
||||
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection._get_galaxy_yml(galaxy_yml)
|
||||
collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('galaxy_yml', [b'namespace: test_namespace'], indirect=True)
|
||||
def test_missing_required_galaxy_key(galaxy_yml):
|
||||
@pytest.mark.parametrize('galaxy_yml_dir', [b'namespace: test_namespace'], indirect=True)
|
||||
def test_missing_required_galaxy_key(galaxy_yml_dir):
|
||||
galaxy_file = os.path.join(galaxy_yml_dir, b'galaxy.yml')
|
||||
expected = "The collection galaxy.yml at '%s' is missing the following mandatory keys: authors, name, " \
|
||||
"readme, version" % to_native(galaxy_yml)
|
||||
"readme, version" % to_native(galaxy_file)
|
||||
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection._get_galaxy_yml(galaxy_yml)
|
||||
collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('galaxy_yml', [b"""
|
||||
@pytest.mark.parametrize('galaxy_yml_dir', [b"""
|
||||
namespace: namespace
|
||||
name: collection
|
||||
authors: Jordan
|
||||
version: 0.1.0
|
||||
readme: README.md
|
||||
invalid: value"""], indirect=True)
|
||||
def test_warning_extra_keys(galaxy_yml, monkeypatch):
|
||||
def test_warning_extra_keys(galaxy_yml_dir, monkeypatch):
|
||||
display_mock = MagicMock()
|
||||
monkeypatch.setattr(Display, 'warning', display_mock)
|
||||
|
||||
collection._get_galaxy_yml(galaxy_yml)
|
||||
collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
|
||||
|
||||
assert display_mock.call_count == 1
|
||||
assert display_mock.call_args[0][0] == "Found unknown keys in collection galaxy.yml at '%s': invalid"\
|
||||
% to_text(galaxy_yml)
|
||||
assert display_mock.call_args[0][0] == "Found unknown keys in collection galaxy.yml at '%s/galaxy.yml': invalid"\
|
||||
% to_text(galaxy_yml_dir)
|
||||
|
||||
|
||||
@pytest.mark.parametrize('galaxy_yml', [b"""
|
||||
@pytest.mark.parametrize('galaxy_yml_dir', [b"""
|
||||
namespace: namespace
|
||||
name: collection
|
||||
authors: Jordan
|
||||
version: 0.1.0
|
||||
readme: README.md"""], indirect=True)
|
||||
def test_defaults_galaxy_yml(galaxy_yml):
|
||||
actual = collection._get_galaxy_yml(galaxy_yml)
|
||||
def test_defaults_galaxy_yml(galaxy_yml_dir):
|
||||
actual = collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
|
||||
|
||||
assert actual['namespace'] == 'namespace'
|
||||
assert actual['name'] == 'collection'
|
||||
|
@ -321,10 +304,10 @@ def test_defaults_galaxy_yml(galaxy_yml):
|
|||
assert actual['issues'] is None
|
||||
assert actual['tags'] == []
|
||||
assert actual['dependencies'] == {}
|
||||
assert actual['license_ids'] == []
|
||||
assert actual['license'] == []
|
||||
|
||||
|
||||
@pytest.mark.parametrize('galaxy_yml', [(b"""
|
||||
@pytest.mark.parametrize('galaxy_yml_dir', [(b"""
|
||||
namespace: namespace
|
||||
name: collection
|
||||
authors: Jordan
|
||||
|
@ -338,9 +321,9 @@ version: 0.1.0
|
|||
readme: README.md
|
||||
license:
|
||||
- MIT""")], indirect=True)
|
||||
def test_galaxy_yml_list_value(galaxy_yml):
|
||||
actual = collection._get_galaxy_yml(galaxy_yml)
|
||||
assert actual['license_ids'] == ['MIT']
|
||||
def test_galaxy_yml_list_value(galaxy_yml_dir):
|
||||
actual = collection.concrete_artifact_manager._get_meta_from_src_dir(galaxy_yml_dir)
|
||||
assert actual['license'] == ['MIT']
|
||||
|
||||
|
||||
def test_build_ignore_files_and_folders(collection_input, monkeypatch):
|
||||
|
@ -529,7 +512,7 @@ def test_build_with_symlink_inside_collection(collection_input):
|
|||
os.symlink(roles_target, roles_link)
|
||||
os.symlink(os.path.join(input_dir, 'README.md'), file_link)
|
||||
|
||||
collection.build_collection(input_dir, output_dir, False)
|
||||
collection.build_collection(to_text(input_dir, errors='surrogate_or_strict'), to_text(output_dir, errors='surrogate_or_strict'), False)
|
||||
|
||||
output_artifact = os.path.join(output_dir, 'ansible_namespace-collection-0.1.0.tar.gz')
|
||||
assert tarfile.is_tarfile(output_artifact)
|
||||
|
@ -603,6 +586,7 @@ def test_publish_with_wait(galaxy_server, collection_artifact, monkeypatch):
|
|||
|
||||
def test_find_existing_collections(tmp_path_factory, monkeypatch):
|
||||
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
collection1 = os.path.join(test_dir, 'namespace1', 'collection1')
|
||||
collection2 = os.path.join(test_dir, 'namespace2', 'collection2')
|
||||
fake_collection1 = os.path.join(test_dir, 'namespace3', 'collection3')
|
||||
|
@ -631,32 +615,24 @@ def test_find_existing_collections(tmp_path_factory, monkeypatch):
|
|||
mock_warning = MagicMock()
|
||||
monkeypatch.setattr(Display, 'warning', mock_warning)
|
||||
|
||||
actual = collection.find_existing_collections(test_dir)
|
||||
actual = list(collection.find_existing_collections(test_dir, artifacts_manager=concrete_artifact_cm))
|
||||
|
||||
assert len(actual) == 2
|
||||
for actual_collection in actual:
|
||||
assert actual_collection.skip is True
|
||||
|
||||
if str(actual_collection) == 'namespace1.collection1':
|
||||
if '%s.%s' % (actual_collection.namespace, actual_collection.name) == 'namespace1.collection1':
|
||||
assert actual_collection.namespace == 'namespace1'
|
||||
assert actual_collection.name == 'collection1'
|
||||
assert actual_collection.b_path == to_bytes(collection1)
|
||||
assert actual_collection.api is None
|
||||
assert actual_collection.versions == set(['1.2.3'])
|
||||
assert actual_collection.latest_version == '1.2.3'
|
||||
assert actual_collection.dependencies == {}
|
||||
assert actual_collection.ver == '1.2.3'
|
||||
assert to_text(actual_collection.src) == collection1
|
||||
else:
|
||||
assert actual_collection.namespace == 'namespace2'
|
||||
assert actual_collection.name == 'collection2'
|
||||
assert actual_collection.b_path == to_bytes(collection2)
|
||||
assert actual_collection.api is None
|
||||
assert actual_collection.versions == set(['*'])
|
||||
assert actual_collection.latest_version == '*'
|
||||
assert actual_collection.dependencies == {}
|
||||
assert actual_collection.ver == '*'
|
||||
assert to_text(actual_collection.src) == collection2
|
||||
|
||||
assert mock_warning.call_count == 1
|
||||
assert mock_warning.mock_calls[0][1][0] == "Collection at '%s' does not have a MANIFEST.json file, cannot " \
|
||||
"detect version." % to_text(collection2)
|
||||
assert mock_warning.mock_calls[0][1][0] == "Collection at '%s' does not have a MANIFEST.json file, nor has it galaxy.yml: " \
|
||||
"cannot detect version." % to_text(collection2)
|
||||
|
||||
|
||||
def test_download_file(tmp_path_factory, monkeypatch):
|
||||
|
@ -668,9 +644,9 @@ def test_download_file(tmp_path_factory, monkeypatch):
|
|||
|
||||
mock_open = MagicMock()
|
||||
mock_open.return_value = BytesIO(data)
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
monkeypatch.setattr(collection.concrete_artifact_manager, 'open_url', mock_open)
|
||||
|
||||
expected = os.path.join(temp_dir, b'file')
|
||||
expected = temp_dir
|
||||
actual = collection._download_file('http://google.com/file', temp_dir, sha256_hash.hexdigest(), True)
|
||||
|
||||
assert actual.startswith(expected)
|
||||
|
@ -689,7 +665,7 @@ def test_download_file_hash_mismatch(tmp_path_factory, monkeypatch):
|
|||
|
||||
mock_open = MagicMock()
|
||||
mock_open.return_value = BytesIO(data)
|
||||
monkeypatch.setattr(collection, 'open_url', mock_open)
|
||||
monkeypatch.setattr(collection.concrete_artifact_manager, 'open_url', mock_open)
|
||||
|
||||
expected = "Mismatch artifact hash with downloaded file"
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
|
@ -772,7 +748,8 @@ def test_require_one_of_collections_requirements_with_collections():
|
|||
|
||||
requirements = cli._require_one_of_collections_requirements(collections, '')['collections']
|
||||
|
||||
assert requirements == [('namespace1.collection1', '*', None, None), ('namespace2.collection1', '1.0.0', None, None)]
|
||||
req_tuples = [('%s.%s' % (req.namespace, req.name), req.ver, req.src, req.type,) for req in requirements]
|
||||
assert req_tuples == [('namespace1.collection1', '*', None, 'galaxy'), ('namespace2.collection1', '1.0.0', None, 'galaxy')]
|
||||
|
||||
|
||||
@patch('ansible.cli.galaxy.GalaxyCLI._parse_requirements_file')
|
||||
|
@ -821,13 +798,13 @@ def test_execute_verify_with_defaults(mock_verify_collections):
|
|||
|
||||
assert mock_verify_collections.call_count == 1
|
||||
|
||||
requirements, search_paths, galaxy_apis, validate, ignore_errors = mock_verify_collections.call_args[0]
|
||||
print("Call args {0}".format(mock_verify_collections.call_args[0]))
|
||||
requirements, search_paths, galaxy_apis, ignore_errors = mock_verify_collections.call_args[0]
|
||||
|
||||
assert requirements == [('namespace.collection', '1.0.4', None, None)]
|
||||
assert [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type) for r in requirements] == [('namespace.collection', '1.0.4', None, 'galaxy')]
|
||||
for install_path in search_paths:
|
||||
assert install_path.endswith('ansible_collections')
|
||||
assert galaxy_apis[0].api_server == 'https://galaxy.ansible.com'
|
||||
assert validate is True
|
||||
assert ignore_errors is False
|
||||
|
||||
|
||||
|
@ -840,13 +817,12 @@ def test_execute_verify(mock_verify_collections):
|
|||
|
||||
assert mock_verify_collections.call_count == 1
|
||||
|
||||
requirements, search_paths, galaxy_apis, validate, ignore_errors = mock_verify_collections.call_args[0]
|
||||
requirements, search_paths, galaxy_apis, ignore_errors = mock_verify_collections.call_args[0]
|
||||
|
||||
assert requirements == [('namespace.collection', '1.0.4', None, None)]
|
||||
assert [('%s.%s' % (r.namespace, r.name), r.ver, r.src, r.type) for r in requirements] == [('namespace.collection', '1.0.4', None, 'galaxy')]
|
||||
for install_path in search_paths:
|
||||
assert install_path.endswith('ansible_collections')
|
||||
assert galaxy_apis[0].api_server == 'http://galaxy-dev.com'
|
||||
assert validate is False
|
||||
assert ignore_errors is True
|
||||
|
||||
|
||||
|
@ -863,8 +839,7 @@ def test_verify_file_hash_deleted_file(manifest_info):
|
|||
|
||||
with patch.object(builtins, 'open', mock_open(read_data=data)) as m:
|
||||
with patch.object(collection.os.path, 'isfile', MagicMock(return_value=False)) as mock_isfile:
|
||||
collection_req = collection.CollectionRequirement(namespace, name, './', server, [version], version, False)
|
||||
collection_req._verify_file_hash(b'path/', 'file', digest, error_queue)
|
||||
collection._verify_file_hash(b'path/', 'file', digest, error_queue)
|
||||
|
||||
assert mock_isfile.called_once
|
||||
|
||||
|
@ -887,8 +862,7 @@ def test_verify_file_hash_matching_hash(manifest_info):
|
|||
|
||||
with patch.object(builtins, 'open', mock_open(read_data=data)) as m:
|
||||
with patch.object(collection.os.path, 'isfile', MagicMock(return_value=True)) as mock_isfile:
|
||||
collection_req = collection.CollectionRequirement(namespace, name, './', server, [version], version, False)
|
||||
collection_req._verify_file_hash(b'path/', 'file', digest, error_queue)
|
||||
collection._verify_file_hash(b'path/', 'file', digest, error_queue)
|
||||
|
||||
assert mock_isfile.called_once
|
||||
|
||||
|
@ -910,8 +884,7 @@ def test_verify_file_hash_mismatching_hash(manifest_info):
|
|||
|
||||
with patch.object(builtins, 'open', mock_open(read_data=data)) as m:
|
||||
with patch.object(collection.os.path, 'isfile', MagicMock(return_value=True)) as mock_isfile:
|
||||
collection_req = collection.CollectionRequirement(namespace, name, './', server, [version], version, False)
|
||||
collection_req._verify_file_hash(b'path/', 'file', different_digest, error_queue)
|
||||
collection._verify_file_hash(b'path/', 'file', different_digest, error_queue)
|
||||
|
||||
assert mock_isfile.called_once
|
||||
|
||||
|
@ -972,355 +945,3 @@ def test_get_json_from_tar_file(tmp_tarfile):
|
|||
data = collection._get_json_from_tar_file(tfile.name, 'MANIFEST.json')
|
||||
|
||||
assert isinstance(data, dict)
|
||||
|
||||
|
||||
def test_verify_collection_not_installed(mock_collection):
|
||||
|
||||
local_collection = mock_collection(local_installed=False)
|
||||
remote_collection = mock_collection(local=False)
|
||||
|
||||
with patch.object(collection.display, 'display') as mocked_display:
|
||||
local_collection.verify(remote_collection, './', './')
|
||||
|
||||
assert mocked_display.called
|
||||
assert mocked_display.call_args[0][0] == "'%s.%s' has not been installed, nothing to verify" % (local_collection.namespace, local_collection.name)
|
||||
|
||||
|
||||
def test_verify_successful_debug_info(monkeypatch, mock_collection):
|
||||
local_collection = mock_collection()
|
||||
remote_collection = mock_collection(local=False)
|
||||
|
||||
monkeypatch.setattr(collection, '_get_tar_file_hash', MagicMock())
|
||||
monkeypatch.setattr(collection.CollectionRequirement, '_verify_file_hash', MagicMock())
|
||||
monkeypatch.setattr(collection, '_get_json_from_tar_file', MagicMock())
|
||||
|
||||
with patch.object(collection.display, 'vvv') as mock_display:
|
||||
local_collection.verify(remote_collection, './', './')
|
||||
|
||||
namespace = local_collection.namespace
|
||||
name = local_collection.name
|
||||
version = local_collection.latest_version
|
||||
|
||||
assert mock_display.call_count == 4
|
||||
assert mock_display.call_args_list[0][0][0] == "Verifying '%s.%s:%s'." % (namespace, name, version)
|
||||
assert mock_display.call_args_list[1][0][0] == "Installed collection found at './%s/%s'" % (namespace, name)
|
||||
located = "Remote collection found at 'https://galaxy.ansible.com/download/%s-%s-%s.tar.gz'" % (namespace, name, version)
|
||||
assert mock_display.call_args_list[2][0][0] == located
|
||||
verified = "Successfully verified that checksums for '%s.%s:%s' match the remote collection" % (namespace, name, version)
|
||||
assert mock_display.call_args_list[3][0][0] == verified
|
||||
|
||||
|
||||
def test_verify_different_versions(mock_collection):
|
||||
|
||||
local_collection = mock_collection(version='0.1.0')
|
||||
remote_collection = mock_collection(local=False, version='3.0.0')
|
||||
|
||||
with patch.object(collection.display, 'display') as mock_display:
|
||||
local_collection.verify(remote_collection, './', './')
|
||||
|
||||
namespace = local_collection.namespace
|
||||
name = local_collection.name
|
||||
installed_version = local_collection.latest_version
|
||||
compared_version = remote_collection.latest_version
|
||||
|
||||
msg = "%s.%s has the version '%s' but is being compared to '%s'" % (namespace, name, installed_version, compared_version)
|
||||
|
||||
assert mock_display.call_count == 1
|
||||
assert mock_display.call_args[0][0] == msg
|
||||
|
||||
|
||||
@patch.object(builtins, 'open', mock_open())
|
||||
def test_verify_modified_manifest(monkeypatch, mock_collection, manifest_info):
|
||||
local_collection = mock_collection()
|
||||
remote_collection = mock_collection(local=False)
|
||||
|
||||
monkeypatch.setattr(collection, '_get_tar_file_hash', MagicMock(side_effect=['manifest_checksum']))
|
||||
monkeypatch.setattr(collection, '_consume_file', MagicMock(side_effect=['manifest_checksum_modified', 'files_manifest_checksum']))
|
||||
monkeypatch.setattr(collection, '_get_json_from_tar_file', MagicMock(side_effect=[manifest_info, {'files': []}]))
|
||||
monkeypatch.setattr(collection.os.path, 'isfile', MagicMock(return_value=True))
|
||||
|
||||
with patch.object(collection.display, 'display') as mock_display:
|
||||
with patch.object(collection.display, 'vvv') as mock_debug:
|
||||
local_collection.verify(remote_collection, './', './')
|
||||
|
||||
namespace = local_collection.namespace
|
||||
name = local_collection.name
|
||||
|
||||
assert mock_display.call_count == 3
|
||||
assert mock_display.call_args_list[0][0][0] == 'Collection %s.%s contains modified content in the following files:' % (namespace, name)
|
||||
assert mock_display.call_args_list[1][0][0] == '%s.%s' % (namespace, name)
|
||||
assert mock_display.call_args_list[2][0][0] == ' MANIFEST.json'
|
||||
|
||||
# The -vvv output should show details (the checksums do not match)
|
||||
assert mock_debug.call_count == 5
|
||||
assert mock_debug.call_args_list[-1][0][0] == ' Expected: manifest_checksum\n Found: manifest_checksum_modified'
|
||||
|
||||
|
||||
@patch.object(builtins, 'open', mock_open())
|
||||
def test_verify_modified_files_manifest(monkeypatch, mock_collection, manifest_info):
|
||||
local_collection = mock_collection()
|
||||
remote_collection = mock_collection(local=False)
|
||||
|
||||
monkeypatch.setattr(collection, '_get_tar_file_hash', MagicMock(side_effect=['manifest_checksum']))
|
||||
monkeypatch.setattr(collection, '_consume_file', MagicMock(side_effect=['manifest_checksum', 'files_manifest_checksum_modified']))
|
||||
monkeypatch.setattr(collection, '_get_json_from_tar_file', MagicMock(side_effect=[manifest_info, {'files': []}]))
|
||||
monkeypatch.setattr(collection.os.path, 'isfile', MagicMock(return_value=True))
|
||||
|
||||
with patch.object(collection.display, 'display') as mock_display:
|
||||
with patch.object(collection.display, 'vvv') as mock_debug:
|
||||
local_collection.verify(remote_collection, './', './')
|
||||
|
||||
namespace = local_collection.namespace
|
||||
name = local_collection.name
|
||||
|
||||
assert mock_display.call_count == 3
|
||||
assert mock_display.call_args_list[0][0][0] == 'Collection %s.%s contains modified content in the following files:' % (namespace, name)
|
||||
assert mock_display.call_args_list[1][0][0] == '%s.%s' % (namespace, name)
|
||||
assert mock_display.call_args_list[2][0][0] == ' FILES.json'
|
||||
|
||||
# The -vvv output should show details (the checksums do not match)
|
||||
assert mock_debug.call_count == 5
|
||||
assert mock_debug.call_args_list[-1][0][0] == ' Expected: files_manifest_checksum\n Found: files_manifest_checksum_modified'
|
||||
|
||||
|
||||
@patch.object(builtins, 'open', mock_open())
|
||||
def test_verify_modified_files(monkeypatch, mock_collection, manifest_info, files_manifest_info):
|
||||
|
||||
local_collection = mock_collection()
|
||||
remote_collection = mock_collection(local=False)
|
||||
|
||||
monkeypatch.setattr(collection, '_get_tar_file_hash', MagicMock(side_effect=['manifest_checksum']))
|
||||
fakehashes = ['manifest_checksum', 'files_manifest_checksum', 'individual_file_checksum_modified']
|
||||
monkeypatch.setattr(collection, '_consume_file', MagicMock(side_effect=fakehashes))
|
||||
monkeypatch.setattr(collection, '_get_json_from_tar_file', MagicMock(side_effect=[manifest_info, files_manifest_info]))
|
||||
monkeypatch.setattr(collection.os.path, 'isfile', MagicMock(return_value=True))
|
||||
|
||||
with patch.object(collection.display, 'display') as mock_display:
|
||||
with patch.object(collection.display, 'vvv') as mock_debug:
|
||||
local_collection.verify(remote_collection, './', './')
|
||||
|
||||
namespace = local_collection.namespace
|
||||
name = local_collection.name
|
||||
|
||||
assert mock_display.call_count == 3
|
||||
assert mock_display.call_args_list[0][0][0] == 'Collection %s.%s contains modified content in the following files:' % (namespace, name)
|
||||
assert mock_display.call_args_list[1][0][0] == '%s.%s' % (namespace, name)
|
||||
assert mock_display.call_args_list[2][0][0] == ' README.md'
|
||||
|
||||
# The -vvv output should show details (the checksums do not match)
|
||||
assert mock_debug.call_count == 5
|
||||
assert mock_debug.call_args_list[-1][0][0] == ' Expected: individual_file_checksum\n Found: individual_file_checksum_modified'
|
||||
|
||||
|
||||
@patch.object(builtins, 'open', mock_open())
|
||||
def test_verify_identical(monkeypatch, mock_collection, manifest_info, files_manifest_info):
|
||||
|
||||
local_collection = mock_collection()
|
||||
remote_collection = mock_collection(local=False)
|
||||
|
||||
monkeypatch.setattr(collection, '_get_tar_file_hash', MagicMock(side_effect=['manifest_checksum']))
|
||||
monkeypatch.setattr(collection, '_consume_file', MagicMock(side_effect=['manifest_checksum', 'files_manifest_checksum', 'individual_file_checksum']))
|
||||
monkeypatch.setattr(collection, '_get_json_from_tar_file', MagicMock(side_effect=[manifest_info, files_manifest_info]))
|
||||
monkeypatch.setattr(collection.os.path, 'isfile', MagicMock(return_value=True))
|
||||
|
||||
with patch.object(collection.display, 'display') as mock_display:
|
||||
with patch.object(collection.display, 'vvv') as mock_debug:
|
||||
local_collection.verify(remote_collection, './', './')
|
||||
|
||||
# Successful verification is quiet
|
||||
assert mock_display.call_count == 0
|
||||
|
||||
# The -vvv output should show the checksums not matching
|
||||
namespace = local_collection.namespace
|
||||
name = local_collection.name
|
||||
version = local_collection.latest_version
|
||||
success_msg = "Successfully verified that checksums for '%s.%s:%s' match the remote collection" % (namespace, name, version)
|
||||
|
||||
assert mock_debug.call_count == 4
|
||||
assert mock_debug.call_args_list[-1][0][0] == success_msg
|
||||
|
||||
|
||||
@patch.object(os.path, 'isdir', return_value=True)
|
||||
def test_verify_collections_no_version(mock_isdir, mock_collection, monkeypatch):
|
||||
namespace = 'ansible_namespace'
|
||||
name = 'collection'
|
||||
version = '*' # Occurs if MANIFEST.json does not exist
|
||||
|
||||
local_collection = mock_collection(namespace=namespace, name=name, version=version)
|
||||
monkeypatch.setattr(collection.CollectionRequirement, 'from_path', MagicMock(return_value=local_collection))
|
||||
|
||||
collections = [('%s.%s' % (namespace, name), version, None)]
|
||||
|
||||
with pytest.raises(AnsibleError) as err:
|
||||
collection.verify_collections(collections, './', local_collection.api, False, False)
|
||||
|
||||
err_msg = 'Collection %s.%s does not appear to have a MANIFEST.json. ' % (namespace, name)
|
||||
err_msg += 'A MANIFEST.json is expected if the collection has been built and installed via ansible-galaxy.'
|
||||
assert err.value.message == err_msg
|
||||
|
||||
|
||||
@patch.object(collection.CollectionRequirement, 'verify')
|
||||
def test_verify_collections_not_installed(mock_verify, mock_collection, monkeypatch):
|
||||
namespace = 'ansible_namespace'
|
||||
name = 'collection'
|
||||
version = '1.0.0'
|
||||
|
||||
local_collection = mock_collection(local_installed=False)
|
||||
|
||||
found_remote = MagicMock(return_value=mock_collection(local=False))
|
||||
monkeypatch.setattr(collection.CollectionRequirement, 'from_name', found_remote)
|
||||
|
||||
collections = [('%s.%s' % (namespace, name), version, None, None)]
|
||||
search_path = './'
|
||||
validate_certs = False
|
||||
ignore_errors = False
|
||||
apis = [local_collection.api]
|
||||
|
||||
with patch.object(collection, '_download_file') as mock_download_file:
|
||||
with pytest.raises(AnsibleError) as err:
|
||||
collection.verify_collections(collections, search_path, apis, validate_certs, ignore_errors)
|
||||
|
||||
assert err.value.message == "Collection %s.%s is not installed in any of the collection paths." % (namespace, name)
|
||||
|
||||
|
||||
@patch.object(collection.CollectionRequirement, 'verify')
|
||||
def test_verify_collections_not_installed_ignore_errors(mock_verify, mock_collection, monkeypatch):
|
||||
namespace = 'ansible_namespace'
|
||||
name = 'collection'
|
||||
version = '1.0.0'
|
||||
|
||||
local_collection = mock_collection(local_installed=False)
|
||||
|
||||
found_remote = MagicMock(return_value=mock_collection(local=False))
|
||||
monkeypatch.setattr(collection.CollectionRequirement, 'from_name', found_remote)
|
||||
|
||||
collections = [('%s.%s' % (namespace, name), version, None)]
|
||||
search_path = './'
|
||||
validate_certs = False
|
||||
ignore_errors = True
|
||||
apis = [local_collection.api]
|
||||
|
||||
with patch.object(collection, '_download_file') as mock_download_file:
|
||||
with patch.object(Display, 'warning') as mock_warning:
|
||||
collection.verify_collections(collections, search_path, apis, validate_certs, ignore_errors)
|
||||
|
||||
skip_message = "Failed to verify collection %s.%s but skipping due to --ignore-errors being set." % (namespace, name)
|
||||
original_err = "Error: Collection %s.%s is not installed in any of the collection paths." % (namespace, name)
|
||||
|
||||
assert mock_warning.called
|
||||
assert mock_warning.call_args[0][0] == skip_message + " " + original_err
|
||||
|
||||
|
||||
@patch.object(os.path, 'isdir', return_value=True)
|
||||
@patch.object(collection.CollectionRequirement, 'verify')
|
||||
def test_verify_collections_no_remote(mock_verify, mock_isdir, mock_collection, monkeypatch):
|
||||
namespace = 'ansible_namespace'
|
||||
name = 'collection'
|
||||
version = '1.0.0'
|
||||
|
||||
monkeypatch.setattr(os.path, 'isfile', MagicMock(side_effect=[False, True]))
|
||||
monkeypatch.setattr(collection.CollectionRequirement, 'from_path', MagicMock(return_value=mock_collection()))
|
||||
|
||||
collections = [('%s.%s' % (namespace, name), version, None)]
|
||||
search_path = './'
|
||||
validate_certs = False
|
||||
ignore_errors = False
|
||||
apis = []
|
||||
|
||||
with pytest.raises(AnsibleError) as err:
|
||||
collection.verify_collections(collections, search_path, apis, validate_certs, ignore_errors)
|
||||
|
||||
assert err.value.message == "Failed to find remote collection %s.%s:%s on any of the galaxy servers" % (namespace, name, version)
|
||||
|
||||
|
||||
@patch.object(os.path, 'isdir', return_value=True)
|
||||
@patch.object(collection.CollectionRequirement, 'verify')
|
||||
def test_verify_collections_no_remote_ignore_errors(mock_verify, mock_isdir, mock_collection, monkeypatch):
|
||||
namespace = 'ansible_namespace'
|
||||
name = 'collection'
|
||||
version = '1.0.0'
|
||||
|
||||
monkeypatch.setattr(os.path, 'isfile', MagicMock(side_effect=[False, True]))
|
||||
monkeypatch.setattr(collection.CollectionRequirement, 'from_path', MagicMock(return_value=mock_collection()))
|
||||
|
||||
collections = [('%s.%s' % (namespace, name), version, None)]
|
||||
search_path = './'
|
||||
validate_certs = False
|
||||
ignore_errors = True
|
||||
apis = []
|
||||
|
||||
with patch.object(Display, 'warning') as mock_warning:
|
||||
collection.verify_collections(collections, search_path, apis, validate_certs, ignore_errors)
|
||||
|
||||
skip_message = "Failed to verify collection %s.%s but skipping due to --ignore-errors being set." % (namespace, name)
|
||||
original_err = "Error: Failed to find remote collection %s.%s:%s on any of the galaxy servers" % (namespace, name, version)
|
||||
|
||||
assert mock_warning.called
|
||||
assert mock_warning.call_args[0][0] == skip_message + " " + original_err
|
||||
|
||||
|
||||
def test_verify_collections_tarfile(monkeypatch):
|
||||
|
||||
monkeypatch.setattr(os.path, 'isfile', MagicMock(return_value=True))
|
||||
|
||||
invalid_format = 'ansible_namespace-collection-0.1.0.tar.gz'
|
||||
collections = [(invalid_format, '*', None)]
|
||||
|
||||
with pytest.raises(AnsibleError) as err:
|
||||
collection.verify_collections(collections, './', [], False, False)
|
||||
|
||||
msg = "'%s' is not a valid collection name. The format namespace.name is expected." % invalid_format
|
||||
assert err.value.message == msg
|
||||
|
||||
|
||||
def test_verify_collections_path(monkeypatch):
|
||||
|
||||
monkeypatch.setattr(os.path, 'isfile', MagicMock(return_value=False))
|
||||
|
||||
invalid_format = 'collections/collection_namespace/collection_name'
|
||||
collections = [(invalid_format, '*', None)]
|
||||
|
||||
with pytest.raises(AnsibleError) as err:
|
||||
collection.verify_collections(collections, './', [], False, False)
|
||||
|
||||
msg = "'%s' is not a valid collection name. The format namespace.name is expected." % invalid_format
|
||||
assert err.value.message == msg
|
||||
|
||||
|
||||
def test_verify_collections_url(monkeypatch):
|
||||
|
||||
monkeypatch.setattr(os.path, 'isfile', MagicMock(return_value=False))
|
||||
|
||||
invalid_format = 'https://galaxy.ansible.com/download/ansible_namespace-collection-0.1.0.tar.gz'
|
||||
collections = [(invalid_format, '*', None)]
|
||||
|
||||
with pytest.raises(AnsibleError) as err:
|
||||
collection.verify_collections(collections, './', [], False, False)
|
||||
|
||||
msg = "'%s' is not a valid collection name. The format namespace.name is expected." % invalid_format
|
||||
assert err.value.message == msg
|
||||
|
||||
|
||||
@patch.object(os.path, 'isdir', return_value=True)
|
||||
@patch.object(collection.CollectionRequirement, 'verify')
|
||||
def test_verify_collections_name(mock_verify, mock_isdir, mock_collection, monkeypatch):
|
||||
local_collection = mock_collection()
|
||||
monkeypatch.setattr(collection.CollectionRequirement, 'from_path', MagicMock(return_value=local_collection))
|
||||
|
||||
monkeypatch.setattr(os.path, 'isfile', MagicMock(side_effect=[False, True, False]))
|
||||
|
||||
located_remote_from_name = MagicMock(return_value=mock_collection(local=False))
|
||||
monkeypatch.setattr(collection.CollectionRequirement, 'from_name', located_remote_from_name)
|
||||
|
||||
with patch.object(collection, '_download_file') as mock_download_file:
|
||||
|
||||
collections = [('%s.%s' % (local_collection.namespace, local_collection.name), '%s' % local_collection.latest_version, None)]
|
||||
search_path = './'
|
||||
validate_certs = False
|
||||
ignore_errors = False
|
||||
apis = [local_collection.api]
|
||||
|
||||
collection.verify_collections(collections, search_path, apis, validate_certs, ignore_errors)
|
||||
|
||||
assert mock_download_file.call_count == 1
|
||||
assert located_remote_from_name.call_count == 1
|
||||
|
|
|
@ -24,12 +24,24 @@ import ansible.module_utils.six.moves.urllib.error as urllib_error
|
|||
from ansible import context
|
||||
from ansible.cli.galaxy import GalaxyCLI
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.galaxy import collection, api
|
||||
from ansible.galaxy import collection, api, dependency_resolution
|
||||
from ansible.galaxy.dependency_resolution.dataclasses import Candidate, Requirement
|
||||
from ansible.module_utils._text import to_bytes, to_native, to_text
|
||||
from ansible.utils import context_objects as co
|
||||
from ansible.utils.display import Display
|
||||
|
||||
|
||||
class RequirementCandidates():
|
||||
def __init__(self):
|
||||
self.candidates = []
|
||||
|
||||
def func_wrapper(self, func):
|
||||
def run(*args, **kwargs):
|
||||
self.candidates = func(*args, **kwargs)
|
||||
return self.candidates
|
||||
return run
|
||||
|
||||
|
||||
def call_galaxy_cli(args):
|
||||
orig = co.GlobalCLIArgs._Singleton__instance
|
||||
co.GlobalCLIArgs._Singleton__instance = None
|
||||
|
@ -160,16 +172,14 @@ def galaxy_server():
|
|||
|
||||
|
||||
def test_build_requirement_from_path(collection_artifact):
|
||||
actual = collection.CollectionRequirement.from_path(collection_artifact[0], True)
|
||||
tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
|
||||
actual = Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
|
||||
|
||||
assert actual.namespace == u'ansible_namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path == collection_artifact[0]
|
||||
assert actual.api is None
|
||||
assert actual.skip is True
|
||||
assert actual.versions == set([u'*'])
|
||||
assert actual.latest_version == u'*'
|
||||
assert actual.dependencies == {}
|
||||
assert actual.src == collection_artifact[0]
|
||||
assert actual.ver == u'0.1.0'
|
||||
|
||||
|
||||
@pytest.mark.parametrize('version', ['1.1.1', '1.1.0', '1.0.0'])
|
||||
|
@ -188,17 +198,15 @@ def test_build_requirement_from_path_with_manifest(version, collection_artifact)
|
|||
with open(manifest_path, 'wb') as manifest_obj:
|
||||
manifest_obj.write(to_bytes(manifest_value))
|
||||
|
||||
actual = collection.CollectionRequirement.from_path(collection_artifact[0], True)
|
||||
tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
|
||||
actual = Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
|
||||
|
||||
# While the folder name suggests a different collection, we treat MANIFEST.json as the source of truth.
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'name'
|
||||
assert actual.b_path == collection_artifact[0]
|
||||
assert actual.api is None
|
||||
assert actual.skip is True
|
||||
assert actual.versions == set([to_text(version)])
|
||||
assert actual.latest_version == to_text(version)
|
||||
assert actual.dependencies == {'ansible_namespace.collection': '*'}
|
||||
assert actual.src == collection_artifact[0]
|
||||
assert actual.ver == to_text(version)
|
||||
|
||||
|
||||
def test_build_requirement_from_path_invalid_manifest(collection_artifact):
|
||||
|
@ -206,12 +214,19 @@ def test_build_requirement_from_path_invalid_manifest(collection_artifact):
|
|||
with open(manifest_path, 'wb') as manifest_obj:
|
||||
manifest_obj.write(b"not json")
|
||||
|
||||
expected = "Collection file at '%s' does not contain a valid json string." % to_native(manifest_path)
|
||||
tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
|
||||
|
||||
expected = "Collection tar file member MANIFEST.json does not contain a valid json string."
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement.from_path(collection_artifact[0], True)
|
||||
Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
|
||||
|
||||
|
||||
def test_build_requirement_from_path_no_version(collection_artifact, monkeypatch):
|
||||
def test_build_artifact_from_path_no_version(collection_artifact, monkeypatch):
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
# a collection artifact should always contain a valid version
|
||||
manifest_path = os.path.join(collection_artifact[0], b'MANIFEST.json')
|
||||
manifest_value = json.dumps({
|
||||
'collection_info': {
|
||||
|
@ -224,40 +239,56 @@ def test_build_requirement_from_path_no_version(collection_artifact, monkeypatch
|
|||
with open(manifest_path, 'wb') as manifest_obj:
|
||||
manifest_obj.write(to_bytes(manifest_value))
|
||||
|
||||
tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
|
||||
|
||||
expected = (
|
||||
'^Collection metadata file at `.*` is expected to have a valid SemVer '
|
||||
'version value but got {empty_unicode_string!r}$'.
|
||||
format(empty_unicode_string=u'')
|
||||
)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
|
||||
|
||||
|
||||
def test_build_requirement_from_path_no_version(collection_artifact, monkeypatch):
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
actual = collection.CollectionRequirement.from_path(collection_artifact[0], True)
|
||||
# version may be falsey/arbitrary strings for collections in development
|
||||
manifest_path = os.path.join(collection_artifact[0], b'galaxy.yml')
|
||||
metadata = {
|
||||
'authors': ['Ansible'],
|
||||
'readme': 'README.md',
|
||||
'namespace': 'namespace',
|
||||
'name': 'name',
|
||||
'version': '',
|
||||
'dependencies': {},
|
||||
}
|
||||
with open(manifest_path, 'wb') as manifest_obj:
|
||||
manifest_obj.write(to_bytes(yaml.safe_dump(metadata)))
|
||||
|
||||
tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
|
||||
actual = Requirement.from_dir_path_as_unknown(collection_artifact[0], concrete_artifact_cm)
|
||||
|
||||
# While the folder name suggests a different collection, we treat MANIFEST.json as the source of truth.
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'name'
|
||||
assert actual.b_path == collection_artifact[0]
|
||||
assert actual.api is None
|
||||
assert actual.skip is True
|
||||
assert actual.versions == set(['*'])
|
||||
assert actual.latest_version == u'*'
|
||||
assert actual.dependencies == {}
|
||||
|
||||
assert mock_display.call_count == 1
|
||||
|
||||
actual_warn = ' '.join(mock_display.mock_calls[0][1][0].split('\n'))
|
||||
expected_warn = "Collection at '%s' does not have a valid version set, falling back to '*'. Found version: ''" \
|
||||
% to_text(collection_artifact[0])
|
||||
assert expected_warn in actual_warn
|
||||
assert actual.src == collection_artifact[0]
|
||||
assert actual.ver == u'*'
|
||||
|
||||
|
||||
def test_build_requirement_from_tar(collection_artifact):
|
||||
actual = collection.CollectionRequirement.from_tar(collection_artifact[1], True, True)
|
||||
tmp_path = os.path.join(os.path.split(collection_artifact[1])[0], b'temp')
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False)
|
||||
|
||||
actual = Requirement.from_requirement_dict({'name': to_text(collection_artifact[1])}, concrete_artifact_cm)
|
||||
|
||||
assert actual.namespace == u'ansible_namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path == collection_artifact[1]
|
||||
assert actual.api is None
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'0.1.0'])
|
||||
assert actual.latest_version == u'0.1.0'
|
||||
assert actual.dependencies == {}
|
||||
assert actual.src == to_text(collection_artifact[1])
|
||||
assert actual.ver == u'0.1.0'
|
||||
|
||||
|
||||
def test_build_requirement_from_tar_fail_not_tar(tmp_path_factory):
|
||||
|
@ -266,9 +297,11 @@ def test_build_requirement_from_tar_fail_not_tar(tmp_path_factory):
|
|||
with open(test_file, 'wb') as test_obj:
|
||||
test_obj.write(b"\x00\x01\x02\x03")
|
||||
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
|
||||
expected = "Collection artifact at '%s' is not a valid tar file." % to_native(test_file)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement.from_tar(test_file, True, True)
|
||||
Requirement.from_requirement_dict({'name': to_text(test_file)}, concrete_artifact_cm)
|
||||
|
||||
|
||||
def test_build_requirement_from_tar_no_manifest(tmp_path_factory):
|
||||
|
@ -289,9 +322,11 @@ def test_build_requirement_from_tar_no_manifest(tmp_path_factory):
|
|||
tar_info.mode = 0o0644
|
||||
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
|
||||
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
|
||||
expected = "Collection at '%s' does not contain the required file MANIFEST.json." % to_native(tar_path)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement.from_tar(tar_path, True, True)
|
||||
Requirement.from_requirement_dict({'name': to_text(tar_path)}, concrete_artifact_cm)
|
||||
|
||||
|
||||
def test_build_requirement_from_tar_no_files(tmp_path_factory):
|
||||
|
@ -311,9 +346,9 @@ def test_build_requirement_from_tar_no_files(tmp_path_factory):
|
|||
tar_info.mode = 0o0644
|
||||
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
|
||||
|
||||
expected = "Collection at '%s' does not contain the required file FILES.json." % to_native(tar_path)
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement.from_tar(tar_path, True, True)
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
with pytest.raises(KeyError, match='namespace'):
|
||||
Requirement.from_requirement_dict({'name': to_text(tar_path)}, concrete_artifact_cm)
|
||||
|
||||
|
||||
def test_build_requirement_from_tar_invalid_manifest(tmp_path_factory):
|
||||
|
@ -329,95 +364,128 @@ def test_build_requirement_from_tar_invalid_manifest(tmp_path_factory):
|
|||
tar_info.mode = 0o0644
|
||||
tfile.addfile(tarinfo=tar_info, fileobj=b_io)
|
||||
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
|
||||
expected = "Collection tar file member MANIFEST.json does not contain a valid json string."
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement.from_tar(tar_path, True, True)
|
||||
Requirement.from_requirement_dict({'name': to_text(tar_path)}, concrete_artifact_cm)
|
||||
|
||||
|
||||
def test_build_requirement_from_name(galaxy_server, monkeypatch):
|
||||
def test_build_requirement_from_name(galaxy_server, monkeypatch, tmp_path_factory):
|
||||
mock_get_versions = MagicMock()
|
||||
mock_get_versions.return_value = ['2.1.9', '2.1.10']
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '*', True, True)
|
||||
mock_version_metadata = MagicMock(
|
||||
namespace='namespace', name='collection',
|
||||
version='2.1.10', artifact_sha256='', dependencies={}
|
||||
)
|
||||
monkeypatch.setattr(api.GalaxyAPI, 'get_collection_version_metadata', mock_version_metadata)
|
||||
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
|
||||
collections = ['namespace.collection']
|
||||
requirements_file = None
|
||||
|
||||
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', collections[0]])
|
||||
requirements = cli._require_one_of_collections_requirements(
|
||||
collections, requirements_file, artifacts_manager=concrete_artifact_cm
|
||||
)['collections']
|
||||
actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, True, False)['namespace.collection']
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
assert actual.api == galaxy_server
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'2.1.9', u'2.1.10'])
|
||||
assert actual.latest_version == u'2.1.10'
|
||||
assert actual.dependencies == {}
|
||||
assert actual.ver == u'2.1.10'
|
||||
assert actual.src == galaxy_server
|
||||
|
||||
assert mock_get_versions.call_count == 1
|
||||
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
|
||||
|
||||
|
||||
def test_build_requirement_from_name_with_prerelease(galaxy_server, monkeypatch):
|
||||
def test_build_requirement_from_name_with_prerelease(galaxy_server, monkeypatch, tmp_path_factory):
|
||||
mock_get_versions = MagicMock()
|
||||
mock_get_versions.return_value = ['1.0.1', '2.0.1-beta.1', '2.0.1']
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '*', True, True)
|
||||
mock_get_info = MagicMock()
|
||||
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.1', None, None, {})
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
|
||||
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
|
||||
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection'])
|
||||
requirements = cli._require_one_of_collections_requirements(
|
||||
['namespace.collection'], None, artifacts_manager=concrete_artifact_cm
|
||||
)['collections']
|
||||
actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, True, False)['namespace.collection']
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
assert actual.api == galaxy_server
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'1.0.1', u'2.0.1'])
|
||||
assert actual.latest_version == u'2.0.1'
|
||||
assert actual.dependencies == {}
|
||||
assert actual.src == galaxy_server
|
||||
assert actual.ver == u'2.0.1'
|
||||
|
||||
assert mock_get_versions.call_count == 1
|
||||
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
|
||||
|
||||
|
||||
def test_build_requirment_from_name_with_prerelease_explicit(galaxy_server, monkeypatch):
|
||||
def test_build_requirment_from_name_with_prerelease_explicit(galaxy_server, monkeypatch, tmp_path_factory):
|
||||
mock_get_versions = MagicMock()
|
||||
mock_get_versions.return_value = ['1.0.1', '2.0.1-beta.1', '2.0.1']
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
|
||||
|
||||
mock_get_info = MagicMock()
|
||||
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.1-beta.1', None, None,
|
||||
{})
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '2.0.1-beta.1', True,
|
||||
True)
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
|
||||
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:2.0.1-beta.1'])
|
||||
requirements = cli._require_one_of_collections_requirements(
|
||||
['namespace.collection:2.0.1-beta.1'], None, artifacts_manager=concrete_artifact_cm
|
||||
)['collections']
|
||||
actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, True, False)['namespace.collection']
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
assert actual.api == galaxy_server
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'2.0.1-beta.1'])
|
||||
assert actual.latest_version == u'2.0.1-beta.1'
|
||||
assert actual.dependencies == {}
|
||||
assert actual.src == galaxy_server
|
||||
assert actual.ver == u'2.0.1-beta.1'
|
||||
|
||||
assert mock_get_info.call_count == 1
|
||||
assert mock_get_info.mock_calls[0][1] == ('namespace', 'collection', '2.0.1-beta.1')
|
||||
|
||||
|
||||
def test_build_requirement_from_name_second_server(galaxy_server, monkeypatch):
|
||||
def test_build_requirement_from_name_second_server(galaxy_server, monkeypatch, tmp_path_factory):
|
||||
mock_get_versions = MagicMock()
|
||||
mock_get_versions.return_value = ['1.0.1', '1.0.2', '1.0.3']
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
|
||||
|
||||
mock_get_info = MagicMock()
|
||||
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '1.0.3', None, None, {})
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
|
||||
|
||||
broken_server = copy.copy(galaxy_server)
|
||||
broken_server.api_server = 'https://broken.com/'
|
||||
mock_version_list = MagicMock()
|
||||
mock_version_list.return_value = []
|
||||
monkeypatch.setattr(broken_server, 'get_collection_versions', mock_version_list)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', [broken_server, galaxy_server],
|
||||
'>1.0.1', False, True)
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
|
||||
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:>1.0.1'])
|
||||
requirements = cli._require_one_of_collections_requirements(
|
||||
['namespace.collection:>1.0.1'], None, artifacts_manager=concrete_artifact_cm
|
||||
)['collections']
|
||||
actual = collection._resolve_depenency_map(requirements, [broken_server, galaxy_server], concrete_artifact_cm, None, True, False)['namespace.collection']
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
# assert actual.api == galaxy_server
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'1.0.2', u'1.0.3'])
|
||||
assert actual.latest_version == u'1.0.3'
|
||||
assert actual.dependencies == {}
|
||||
assert actual.src == galaxy_server
|
||||
assert actual.ver == u'1.0.3'
|
||||
|
||||
assert mock_version_list.call_count == 1
|
||||
assert mock_version_list.mock_calls[0][1] == ('namespace', 'collection')
|
||||
|
@ -426,53 +494,91 @@ def test_build_requirement_from_name_second_server(galaxy_server, monkeypatch):
|
|||
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
|
||||
|
||||
|
||||
def test_build_requirement_from_name_missing(galaxy_server, monkeypatch):
|
||||
def test_build_requirement_from_name_missing(galaxy_server, monkeypatch, tmp_path_factory):
|
||||
mock_open = MagicMock()
|
||||
mock_open.return_value = []
|
||||
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_open)
|
||||
|
||||
expected = "Failed to find collection namespace.collection:*"
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server, galaxy_server], '*', False,
|
||||
True)
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
|
||||
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:>1.0.1'])
|
||||
requirements = cli._require_one_of_collections_requirements(
|
||||
['namespace.collection'], None, artifacts_manager=concrete_artifact_cm
|
||||
)['collections']
|
||||
|
||||
expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n* namespace.collection:* (direct request)"
|
||||
with pytest.raises(AnsibleError, match=re.escape(expected)):
|
||||
collection._resolve_depenency_map(requirements, [galaxy_server, galaxy_server], concrete_artifact_cm, None, False, True)
|
||||
|
||||
|
||||
def test_build_requirement_from_name_401_unauthorized(galaxy_server, monkeypatch):
|
||||
def test_build_requirement_from_name_401_unauthorized(galaxy_server, monkeypatch, tmp_path_factory):
|
||||
mock_open = MagicMock()
|
||||
mock_open.side_effect = api.GalaxyError(urllib_error.HTTPError('https://galaxy.server.com', 401, 'msg', {},
|
||||
StringIO()), "error")
|
||||
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_open)
|
||||
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
|
||||
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:>1.0.1'])
|
||||
requirements = cli._require_one_of_collections_requirements(
|
||||
['namespace.collection'], None, artifacts_manager=concrete_artifact_cm
|
||||
)['collections']
|
||||
|
||||
expected = "error (HTTP Code: 401, Message: msg)"
|
||||
with pytest.raises(api.GalaxyError, match=re.escape(expected)):
|
||||
collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server, galaxy_server], '*', False)
|
||||
collection._resolve_depenency_map(requirements, [galaxy_server, galaxy_server], concrete_artifact_cm, None, False, False)
|
||||
|
||||
|
||||
def test_build_requirement_from_name_single_version(galaxy_server, monkeypatch):
|
||||
def test_build_requirement_from_name_single_version(galaxy_server, monkeypatch, tmp_path_factory):
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
multi_api_proxy = collection.galaxy_api_proxy.MultiGalaxyAPIProxy([galaxy_server], concrete_artifact_cm)
|
||||
dep_provider = dependency_resolution.providers.CollectionDependencyProvider(apis=multi_api_proxy, concrete_artifacts_manager=concrete_artifact_cm)
|
||||
|
||||
matches = RequirementCandidates()
|
||||
mock_find_matches = MagicMock(side_effect=matches.func_wrapper(dep_provider.find_matches), autospec=True)
|
||||
monkeypatch.setattr(dependency_resolution.providers.CollectionDependencyProvider, 'find_matches', mock_find_matches)
|
||||
|
||||
mock_get_versions = MagicMock()
|
||||
mock_get_versions.return_value = ['2.0.0']
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
|
||||
|
||||
mock_get_info = MagicMock()
|
||||
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.0', None, None,
|
||||
{})
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '2.0.0', True,
|
||||
True)
|
||||
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:==2.0.0'])
|
||||
requirements = cli._require_one_of_collections_requirements(
|
||||
['namespace.collection:==2.0.0'], None, artifacts_manager=concrete_artifact_cm
|
||||
)['collections']
|
||||
|
||||
actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True)['namespace.collection']
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
assert actual.api == galaxy_server
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'2.0.0'])
|
||||
assert actual.latest_version == u'2.0.0'
|
||||
assert actual.dependencies == {}
|
||||
assert actual.src == galaxy_server
|
||||
assert actual.ver == u'2.0.0'
|
||||
assert [c.ver for c in matches.candidates] == [u'2.0.0']
|
||||
|
||||
assert mock_get_info.call_count == 1
|
||||
assert mock_get_info.mock_calls[0][1] == ('namespace', 'collection', '2.0.0')
|
||||
|
||||
|
||||
def test_build_requirement_from_name_multiple_versions_one_match(galaxy_server, monkeypatch):
|
||||
def test_build_requirement_from_name_multiple_versions_one_match(galaxy_server, monkeypatch, tmp_path_factory):
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
multi_api_proxy = collection.galaxy_api_proxy.MultiGalaxyAPIProxy([galaxy_server], concrete_artifact_cm)
|
||||
dep_provider = dependency_resolution.providers.CollectionDependencyProvider(apis=multi_api_proxy, concrete_artifacts_manager=concrete_artifact_cm)
|
||||
|
||||
matches = RequirementCandidates()
|
||||
mock_find_matches = MagicMock(side_effect=matches.func_wrapper(dep_provider.find_matches), autospec=True)
|
||||
monkeypatch.setattr(dependency_resolution.providers.CollectionDependencyProvider, 'find_matches', mock_find_matches)
|
||||
|
||||
mock_get_versions = MagicMock()
|
||||
mock_get_versions.return_value = ['2.0.0', '2.0.1', '2.0.2']
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
|
||||
|
@ -482,17 +588,18 @@ def test_build_requirement_from_name_multiple_versions_one_match(galaxy_server,
|
|||
{})
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '>=2.0.1,<2.0.2',
|
||||
True, True)
|
||||
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:>=2.0.1,<2.0.2'])
|
||||
requirements = cli._require_one_of_collections_requirements(
|
||||
['namespace.collection:>=2.0.1,<2.0.2'], None, artifacts_manager=concrete_artifact_cm
|
||||
)['collections']
|
||||
|
||||
actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True)['namespace.collection']
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
assert actual.api == galaxy_server
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'2.0.1'])
|
||||
assert actual.latest_version == u'2.0.1'
|
||||
assert actual.dependencies == {}
|
||||
assert actual.src == galaxy_server
|
||||
assert actual.ver == u'2.0.1'
|
||||
assert [c.ver for c in matches.candidates] == [u'2.0.1']
|
||||
|
||||
assert mock_get_versions.call_count == 1
|
||||
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
|
||||
|
@ -501,122 +608,118 @@ def test_build_requirement_from_name_multiple_versions_one_match(galaxy_server,
|
|||
assert mock_get_info.mock_calls[0][1] == ('namespace', 'collection', '2.0.1')
|
||||
|
||||
|
||||
def test_build_requirement_from_name_multiple_version_results(galaxy_server, monkeypatch):
|
||||
def test_build_requirement_from_name_multiple_version_results(galaxy_server, monkeypatch, tmp_path_factory):
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
multi_api_proxy = collection.galaxy_api_proxy.MultiGalaxyAPIProxy([galaxy_server], concrete_artifact_cm)
|
||||
dep_provider = dependency_resolution.providers.CollectionDependencyProvider(apis=multi_api_proxy, concrete_artifacts_manager=concrete_artifact_cm)
|
||||
|
||||
matches = RequirementCandidates()
|
||||
mock_find_matches = MagicMock(side_effect=matches.func_wrapper(dep_provider.find_matches), autospec=True)
|
||||
monkeypatch.setattr(dependency_resolution.providers.CollectionDependencyProvider, 'find_matches', mock_find_matches)
|
||||
|
||||
mock_get_info = MagicMock()
|
||||
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.5', None, None, {})
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
|
||||
|
||||
mock_get_versions = MagicMock()
|
||||
mock_get_versions.return_value = ['1.0.1', '1.0.2', '1.0.3']
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
|
||||
|
||||
mock_get_versions.return_value = ['2.0.0', '2.0.1', '2.0.2', '2.0.3', '2.0.4', '2.0.5']
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
|
||||
|
||||
actual = collection.CollectionRequirement.from_name('namespace.collection', [galaxy_server], '!=2.0.2',
|
||||
True, True)
|
||||
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:!=2.0.2'])
|
||||
requirements = cli._require_one_of_collections_requirements(
|
||||
['namespace.collection:!=2.0.2'], None, artifacts_manager=concrete_artifact_cm
|
||||
)['collections']
|
||||
|
||||
actual = collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True)['namespace.collection']
|
||||
|
||||
assert actual.namespace == u'namespace'
|
||||
assert actual.name == u'collection'
|
||||
assert actual.b_path is None
|
||||
assert actual.api == galaxy_server
|
||||
assert actual.skip is False
|
||||
assert actual.versions == set([u'2.0.0', u'2.0.1', u'2.0.3', u'2.0.4', u'2.0.5'])
|
||||
assert actual.latest_version == u'2.0.5'
|
||||
assert actual.dependencies == {}
|
||||
assert actual.src == galaxy_server
|
||||
assert actual.ver == u'2.0.5'
|
||||
# should be ordered latest to earliest
|
||||
assert [c.ver for c in matches.candidates] == [u'2.0.5', u'2.0.4', u'2.0.3', u'2.0.1', u'2.0.0']
|
||||
|
||||
assert mock_get_versions.call_count == 1
|
||||
assert mock_get_versions.mock_calls[0][1] == ('namespace', 'collection')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('versions, requirement, expected_filter, expected_latest', [
|
||||
[['1.0.0', '1.0.1'], '*', ['1.0.0', '1.0.1'], '1.0.1'],
|
||||
[['1.0.0', '1.0.5', '1.1.0'], '>1.0.0,<1.1.0', ['1.0.5'], '1.0.5'],
|
||||
[['1.0.0', '1.0.5', '1.1.0'], '>1.0.0,<=1.0.5', ['1.0.5'], '1.0.5'],
|
||||
[['1.0.0', '1.0.5', '1.1.0'], '>=1.1.0', ['1.1.0'], '1.1.0'],
|
||||
[['1.0.0', '1.0.5', '1.1.0'], '!=1.1.0', ['1.0.0', '1.0.5'], '1.0.5'],
|
||||
[['1.0.0', '1.0.5', '1.1.0'], '==1.0.5', ['1.0.5'], '1.0.5'],
|
||||
[['1.0.0', '1.0.5', '1.1.0'], '1.0.5', ['1.0.5'], '1.0.5'],
|
||||
[['1.0.0', '2.0.0', '3.0.0'], '>=2', ['2.0.0', '3.0.0'], '3.0.0'],
|
||||
])
|
||||
def test_add_collection_requirements(versions, requirement, expected_filter, expected_latest):
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', versions, requirement,
|
||||
False)
|
||||
assert req.versions == set(expected_filter)
|
||||
assert req.latest_version == expected_latest
|
||||
def test_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_server):
|
||||
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
|
||||
mock_get_info = MagicMock()
|
||||
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '2.0.5', None, None, {})
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
|
||||
|
||||
mock_get_versions = MagicMock()
|
||||
mock_get_versions.return_value = ['2.0.5']
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
|
||||
|
||||
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection:!=2.0.5'])
|
||||
requirements = cli._require_one_of_collections_requirements(
|
||||
['namespace.collection:!=2.0.5'], None, artifacts_manager=concrete_artifact_cm
|
||||
)['collections']
|
||||
|
||||
expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n"
|
||||
expected += "* namespace.collection:!=2.0.5 (direct request)"
|
||||
with pytest.raises(AnsibleError, match=re.escape(expected)):
|
||||
collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True)
|
||||
|
||||
|
||||
def test_add_collection_requirement_to_unknown_installed_version(monkeypatch):
|
||||
def test_dep_candidate_with_conflict(monkeypatch, tmp_path_factory, galaxy_server):
|
||||
test_dir = to_bytes(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections Input'))
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
|
||||
mock_get_info_return = [
|
||||
api.CollectionVersionMetadata('parent', 'collection', '2.0.5', None, None, {'namespace.collection': '!=1.0.0'}),
|
||||
api.CollectionVersionMetadata('namespace', 'collection', '1.0.0', None, None, {}),
|
||||
]
|
||||
mock_get_info = MagicMock(side_effect=mock_get_info_return)
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
|
||||
|
||||
mock_get_versions = MagicMock(side_effect=[['2.0.5'], ['1.0.0']])
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
|
||||
|
||||
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'parent.collection:2.0.5'])
|
||||
requirements = cli._require_one_of_collections_requirements(
|
||||
['parent.collection:2.0.5'], None, artifacts_manager=concrete_artifact_cm
|
||||
)['collections']
|
||||
|
||||
expected = "Failed to resolve the requested dependencies map. Could not satisfy the following requirements:\n"
|
||||
expected += "* namespace.collection:!=1.0.0 (dependency of parent.collection:2.0.5)"
|
||||
with pytest.raises(AnsibleError, match=re.escape(expected)):
|
||||
collection._resolve_depenency_map(requirements, [galaxy_server], concrete_artifact_cm, None, False, True)
|
||||
|
||||
|
||||
def test_install_installed_collection(monkeypatch, tmp_path_factory, galaxy_server):
|
||||
|
||||
mock_installed_collections = MagicMock(return_value=[Candidate('namespace.collection', '1.2.3', None, 'dir')])
|
||||
|
||||
monkeypatch.setattr(collection, 'find_existing_collections', mock_installed_collections)
|
||||
|
||||
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(test_dir, validate_certs=False)
|
||||
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', ['*'], '*', False,
|
||||
skip=True)
|
||||
mock_get_info = MagicMock()
|
||||
mock_get_info.return_value = api.CollectionVersionMetadata('namespace', 'collection', '1.2.3', None, None, {})
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_version_metadata', mock_get_info)
|
||||
|
||||
req.add_requirement('parent.collection', '1.0.0')
|
||||
assert req.latest_version == '*'
|
||||
mock_get_versions = MagicMock(return_value=['1.2.3', '1.3.0'])
|
||||
monkeypatch.setattr(galaxy_server, 'get_collection_versions', mock_get_versions)
|
||||
|
||||
assert mock_display.call_count == 1
|
||||
cli = GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection'])
|
||||
cli.run()
|
||||
|
||||
actual_warn = ' '.join(mock_display.mock_calls[0][1][0].split('\n'))
|
||||
assert "Failed to validate the collection requirement 'namespace.name:1.0.0' for parent.collection" in actual_warn
|
||||
|
||||
|
||||
def test_add_collection_wildcard_requirement_to_unknown_installed_version():
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, 'https://galaxy.com', ['*'], '*', False,
|
||||
skip=True)
|
||||
req.add_requirement(str(req), '*')
|
||||
|
||||
assert req.versions == set('*')
|
||||
assert req.latest_version == '*'
|
||||
|
||||
|
||||
def test_add_collection_requirement_with_conflict(galaxy_server):
|
||||
expected = "Cannot meet requirement ==1.0.2 for dependency namespace.name from source '%s'. Available versions " \
|
||||
"before last requirement added: 1.0.0, 1.0.1\n" \
|
||||
"Requirements from:\n" \
|
||||
"\tbase - 'namespace.name:==1.0.2'" % galaxy_server.api_server
|
||||
with pytest.raises(AnsibleError, match=expected):
|
||||
collection.CollectionRequirement('namespace', 'name', None, galaxy_server, ['1.0.0', '1.0.1'], '==1.0.2',
|
||||
False)
|
||||
|
||||
|
||||
def test_add_requirement_to_existing_collection_with_conflict(galaxy_server):
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, galaxy_server, ['1.0.0', '1.0.1'], '*', False)
|
||||
|
||||
expected = "Cannot meet dependency requirement 'namespace.name:1.0.2' for collection namespace.collection2 from " \
|
||||
"source '%s'. Available versions before last requirement added: 1.0.0, 1.0.1\n" \
|
||||
"Requirements from:\n" \
|
||||
"\tbase - 'namespace.name:*'\n" \
|
||||
"\tnamespace.collection2 - 'namespace.name:1.0.2'" % galaxy_server.api_server
|
||||
with pytest.raises(AnsibleError, match=re.escape(expected)):
|
||||
req.add_requirement('namespace.collection2', '1.0.2')
|
||||
|
||||
|
||||
def test_add_requirement_to_installed_collection_with_conflict():
|
||||
source = 'https://galaxy.ansible.com'
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '*', False,
|
||||
skip=True)
|
||||
|
||||
expected = "Cannot meet requirement namespace.name:1.0.2 as it is already installed at version '1.0.1'. " \
|
||||
"Use --force to overwrite"
|
||||
with pytest.raises(AnsibleError, match=re.escape(expected)):
|
||||
req.add_requirement(None, '1.0.2')
|
||||
|
||||
|
||||
def test_add_requirement_to_installed_collection_with_conflict_as_dep():
|
||||
source = 'https://galaxy.ansible.com'
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, source, ['1.0.0', '1.0.1'], '*', False,
|
||||
skip=True)
|
||||
|
||||
expected = "Cannot meet requirement namespace.name:1.0.2 as it is already installed at version '1.0.1'. " \
|
||||
"Use --force-with-deps to overwrite"
|
||||
with pytest.raises(AnsibleError, match=re.escape(expected)):
|
||||
req.add_requirement('namespace.collection2', '1.0.2')
|
||||
|
||||
|
||||
def test_install_skipped_collection(monkeypatch):
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
req = collection.CollectionRequirement('namespace', 'name', None, 'source', ['1.0.0'], '*', False, skip=True)
|
||||
req.install(None, None)
|
||||
|
||||
assert mock_display.call_count == 1
|
||||
assert mock_display.mock_calls[0][1][0] == "Skipping 'namespace.name' as it is already installed"
|
||||
expected = "Nothing to do. All requested collections are already installed. If you want to reinstall them, consider using `--force`."
|
||||
assert mock_display.mock_calls[1][1][0] == expected
|
||||
|
||||
|
||||
def test_install_collection(collection_artifact, monkeypatch):
|
||||
|
@ -624,15 +727,17 @@ def test_install_collection(collection_artifact, monkeypatch):
|
|||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
collection_tar = collection_artifact[1]
|
||||
output_path = os.path.join(os.path.split(collection_tar)[0], b'output')
|
||||
collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
|
||||
os.makedirs(os.path.join(collection_path, b'delete_me')) # Create a folder to verify the install cleans out the dir
|
||||
|
||||
temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
|
||||
os.makedirs(temp_path)
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
|
||||
|
||||
req = collection.CollectionRequirement.from_tar(collection_tar, True, True)
|
||||
req.install(to_text(output_path), temp_path)
|
||||
output_path = os.path.join(os.path.split(collection_tar)[0])
|
||||
collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
|
||||
os.makedirs(os.path.join(collection_path, b'delete_me')) # Create a folder to verify the install cleans out the dir
|
||||
|
||||
candidate = Candidate('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')
|
||||
collection.install(candidate, to_text(output_path), concrete_artifact_cm)
|
||||
|
||||
# Ensure the temp directory is empty, nothing is left behind
|
||||
assert os.listdir(temp_path) == []
|
||||
|
@ -649,33 +754,29 @@ def test_install_collection(collection_artifact, monkeypatch):
|
|||
assert mock_display.call_count == 2
|
||||
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
|
||||
% to_text(collection_path)
|
||||
assert mock_display.mock_calls[1][1][0] == "ansible_namespace.collection (0.1.0) was installed successfully"
|
||||
assert mock_display.mock_calls[1][1][0] == "ansible_namespace.collection:0.1.0 was installed successfully"
|
||||
|
||||
|
||||
def test_install_collection_with_download(galaxy_server, collection_artifact, monkeypatch):
|
||||
collection_tar = collection_artifact[1]
|
||||
output_path = os.path.join(os.path.split(collection_tar)[0], b'output')
|
||||
collection_path = os.path.join(output_path, b'ansible_namespace', b'collection')
|
||||
collection_path, collection_tar = collection_artifact
|
||||
shutil.rmtree(collection_path)
|
||||
|
||||
collections_dir = ('%s' % os.path.sep).join(to_text(collection_path).split('%s' % os.path.sep)[:-2])
|
||||
|
||||
temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
|
||||
os.makedirs(temp_path)
|
||||
|
||||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
|
||||
|
||||
mock_download = MagicMock()
|
||||
mock_download.return_value = collection_tar
|
||||
monkeypatch.setattr(collection, '_download_file', mock_download)
|
||||
monkeypatch.setattr(concrete_artifact_cm, 'get_galaxy_artifact_path', mock_download)
|
||||
|
||||
monkeypatch.setattr(galaxy_server, '_available_api_versions', {'v2': 'v2/'})
|
||||
temp_path = os.path.join(os.path.split(collection_tar)[0], b'temp')
|
||||
os.makedirs(temp_path)
|
||||
|
||||
meta = api.CollectionVersionMetadata('ansible_namespace', 'collection', '0.1.0', 'https://downloadme.com',
|
||||
'myhash', {})
|
||||
req = collection.CollectionRequirement('ansible_namespace', 'collection', None, galaxy_server,
|
||||
['0.1.0'], '*', False, metadata=meta)
|
||||
req.install(to_text(output_path), temp_path)
|
||||
|
||||
# Ensure the temp directory is empty, nothing is left behind
|
||||
assert os.listdir(temp_path) == []
|
||||
req = Requirement('ansible_namespace.collection', '0.1.0', 'https://downloadme.com', 'galaxy')
|
||||
collection.install(req, to_text(collections_dir), concrete_artifact_cm)
|
||||
|
||||
actual_files = os.listdir(collection_path)
|
||||
actual_files.sort()
|
||||
|
@ -685,13 +786,11 @@ def test_install_collection_with_download(galaxy_server, collection_artifact, mo
|
|||
assert mock_display.call_count == 2
|
||||
assert mock_display.mock_calls[0][1][0] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" \
|
||||
% to_text(collection_path)
|
||||
assert mock_display.mock_calls[1][1][0] == "ansible_namespace.collection (0.1.0) was installed successfully"
|
||||
assert mock_display.mock_calls[1][1][0] == "ansible_namespace.collection:0.1.0 was installed successfully"
|
||||
|
||||
assert mock_download.call_count == 1
|
||||
assert mock_download.mock_calls[0][1][0] == 'https://downloadme.com'
|
||||
assert mock_download.mock_calls[0][1][1] == temp_path
|
||||
assert mock_download.mock_calls[0][1][2] == 'myhash'
|
||||
assert mock_download.mock_calls[0][1][3] is True
|
||||
assert mock_download.mock_calls[0][1][0].src == 'https://downloadme.com'
|
||||
assert mock_download.mock_calls[0][1][0].type == 'galaxy'
|
||||
|
||||
|
||||
def test_install_collections_from_tar(collection_artifact, monkeypatch):
|
||||
|
@ -702,8 +801,10 @@ def test_install_collections_from_tar(collection_artifact, monkeypatch):
|
|||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
collection.install_collections([(to_text(collection_tar), '*', None, None)], to_text(temp_path),
|
||||
[u'https://galaxy.ansible.com'], True, False, False, False, False)
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
|
||||
|
||||
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')]
|
||||
collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, concrete_artifact_cm)
|
||||
|
||||
assert os.path.isdir(collection_path)
|
||||
|
||||
|
@ -734,9 +835,12 @@ def test_install_collections_existing_without_force(collection_artifact, monkeyp
|
|||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
# If we don't delete collection_path it will think the original build skeleton is installed so we expect a skip
|
||||
collection.install_collections([(to_text(collection_tar), '*', None, None)], to_text(temp_path),
|
||||
[u'https://galaxy.ansible.com'], True, False, False, False, False)
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
|
||||
|
||||
assert os.path.isdir(collection_path)
|
||||
|
||||
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')]
|
||||
collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, concrete_artifact_cm)
|
||||
|
||||
assert os.path.isdir(collection_path)
|
||||
|
||||
|
@ -746,11 +850,9 @@ def test_install_collections_existing_without_force(collection_artifact, monkeyp
|
|||
|
||||
# Filter out the progress cursor display calls.
|
||||
display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1]
|
||||
assert len(display_msgs) == 3
|
||||
assert len(display_msgs) == 1
|
||||
|
||||
assert display_msgs[0] == "Process install dependency map"
|
||||
assert display_msgs[1] == "Starting collection install process"
|
||||
assert display_msgs[2] == "Skipping 'ansible_namespace.collection' as it is already installed"
|
||||
assert display_msgs[0] == 'Nothing to do. All requested collections are already installed. If you want to reinstall them, consider using `--force`.'
|
||||
|
||||
for msg in display_msgs:
|
||||
assert 'WARNING' not in msg
|
||||
|
@ -768,8 +870,9 @@ def test_install_missing_metadata_warning(collection_artifact, monkeypatch):
|
|||
if os.path.isfile(b_path):
|
||||
os.unlink(b_path)
|
||||
|
||||
collection.install_collections([(to_text(collection_tar), '*', None, None)], to_text(temp_path),
|
||||
[u'https://galaxy.ansible.com'], True, False, False, False, False)
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
|
||||
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')]
|
||||
collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, concrete_artifact_cm)
|
||||
|
||||
display_msgs = [m[1][0] for m in mock_display.mock_calls if 'newline' not in m[2] and len(m[1]) == 1]
|
||||
|
||||
|
@ -788,8 +891,9 @@ def test_install_collection_with_circular_dependency(collection_artifact, monkey
|
|||
mock_display = MagicMock()
|
||||
monkeypatch.setattr(Display, 'display', mock_display)
|
||||
|
||||
collection.install_collections([(to_text(collection_tar), '*', None, None)], to_text(temp_path),
|
||||
[u'https://galaxy.ansible.com'], True, False, False, False, False)
|
||||
concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(temp_path, validate_certs=False)
|
||||
requirements = [Requirement('ansible_namespace.collection', '0.1.0', to_text(collection_tar), 'file')]
|
||||
collection.install_collections(requirements, to_text(temp_path), [], False, False, False, False, False, concrete_artifact_cm)
|
||||
|
||||
assert os.path.isdir(collection_path)
|
||||
|
||||
|
@ -811,4 +915,4 @@ def test_install_collection_with_circular_dependency(collection_artifact, monkey
|
|||
assert display_msgs[0] == "Process install dependency map"
|
||||
assert display_msgs[1] == "Starting collection install process"
|
||||
assert display_msgs[2] == "Installing 'ansible_namespace.collection:0.1.0' to '%s'" % to_text(collection_path)
|
||||
assert display_msgs[3] == "ansible_namespace.collection (0.1.0) was installed successfully"
|
||||
assert display_msgs[3] == "ansible_namespace.collection:0.1.0 was installed successfully"
|
||||
|
|
Loading…
Reference in a new issue