support hard coded module_defaults.yml groups for collections (#69919)
* Only allow groups which were hardcoded in module_defaults.yml only load action groups from the collection if module_defaults contains a potential group for the action * Fix tests using modules that override those whitelisted in lib/ansible/config/module_defaults.yml Third party modules should not be using group/ - use the action name instead * add externalized module_defaults tests add the missing group and collections ci_complete Co-authored-by: Matt Davis <mrd@redhat.com> * changelog ci_complete * Fix import in tests ci_complete * Update with requested changes ci_complete * don't traceback since we don't validate the contents of module_defaults ci_complete Co-authored-by: Matt Davis <mrd@redhat.com>
This commit is contained in:
parent
a862ff2d43
commit
51f6d129cb
31 changed files with 341 additions and 1613 deletions
|
@ -0,0 +1,3 @@
|
|||
bugfixes:
|
||||
- module_defaults - support short group names for content relocated to collections
|
||||
- module_defaults - support candidate action names for relocated content
|
|
@ -289,12 +289,6 @@ class ConfigManager(object):
|
|||
|
||||
# update constants
|
||||
self.update_config_data()
|
||||
try:
|
||||
self.update_module_defaults_groups()
|
||||
except Exception as e:
|
||||
# Since this is a 2.7 preview feature, we want to have it fail as gracefully as possible when there are issues.
|
||||
sys.stderr.write('Could not load module_defaults_groups: %s: %s\n\n' % (type(e).__name__, e))
|
||||
self.module_defaults_groups = {}
|
||||
|
||||
def _read_config_yaml_file(self, yml_file):
|
||||
# TODO: handle relative paths as relative to the directory containing the current playbook instead of CWD
|
||||
|
@ -525,14 +519,6 @@ class ConfigManager(object):
|
|||
|
||||
self._plugins[plugin_type][name] = defs
|
||||
|
||||
def update_module_defaults_groups(self):
|
||||
defaults_config = self._read_config_yaml_file(
|
||||
'%s/module_defaults.yml' % os.path.join(os.path.dirname(__file__))
|
||||
)
|
||||
if defaults_config.get('version') not in ('1', '1.0', 1, 1.0):
|
||||
raise AnsibleError('module_defaults.yml has an invalid version "%s" for configuration. Could be a bad install.' % defaults_config.get('version'))
|
||||
self.module_defaults_groups = defaults_config.get('groupings', {})
|
||||
|
||||
def update_config_data(self, defs=None, configfile=None):
|
||||
''' really: update constants '''
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -38,7 +38,7 @@ from ansible.executor.interpreter_discovery import InterpreterDiscoveryRequiredE
|
|||
from ansible.executor.powershell import module_manifest as ps_manifest
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
||||
from ansible.plugins.loader import module_utils_loader
|
||||
from ansible.utils.collection_loader._collection_finder import _get_collection_metadata
|
||||
from ansible.utils.collection_loader._collection_finder import _get_collection_metadata, AnsibleCollectionRef
|
||||
|
||||
# Must import strategy and use write_locks from there
|
||||
# If we import write_locks directly then we end up binding a
|
||||
|
@ -1319,7 +1319,23 @@ def modify_module(module_name, module_path, module_args, templar, task_vars=None
|
|||
return (b_module_data, module_style, shebang)
|
||||
|
||||
|
||||
def get_action_args_with_defaults(action, args, defaults, templar):
|
||||
def get_action_args_with_defaults(action, args, defaults, templar, redirected_names=None):
|
||||
group_collection_map = {
|
||||
'acme': ['community.crypto'],
|
||||
'aws': ['amazon.aws', 'community.aws'],
|
||||
'azure': ['azure.azcollection'],
|
||||
'cpm': ['wti.remote'],
|
||||
'docker': ['community.general'],
|
||||
'gcp': ['google.cloud'],
|
||||
'k8s': ['community.kubernetes', 'community.general'],
|
||||
'os': ['openstack.cloud'],
|
||||
'ovirt': ['ovirt.ovirt', 'community.general'],
|
||||
'vmware': ['community.vmware'],
|
||||
'testgroup': ['testns.testcoll', 'testns.othercoll', 'testns.boguscoll']
|
||||
}
|
||||
|
||||
if not redirected_names:
|
||||
redirected_names = [action]
|
||||
|
||||
tmp_args = {}
|
||||
module_defaults = {}
|
||||
|
@ -1334,11 +1350,24 @@ def get_action_args_with_defaults(action, args, defaults, templar):
|
|||
module_defaults = templar.template(module_defaults)
|
||||
|
||||
# deal with configured group defaults first
|
||||
if action in C.config.module_defaults_groups:
|
||||
for group in C.config.module_defaults_groups.get(action, []):
|
||||
tmp_args.update((module_defaults.get('group/{0}'.format(group)) or {}).copy())
|
||||
for default in module_defaults:
|
||||
if not default.startswith('group/'):
|
||||
continue
|
||||
|
||||
group_name = default.split('group/')[-1]
|
||||
|
||||
for collection_name in group_collection_map.get(group_name, []):
|
||||
try:
|
||||
action_group = _get_collection_metadata(collection_name).get('action_groups', {})
|
||||
except ValueError:
|
||||
# The collection may not be installed
|
||||
continue
|
||||
|
||||
if any(name for name in redirected_names if name in action_group):
|
||||
tmp_args.update((module_defaults.get('group/%s' % group_name) or {}).copy())
|
||||
|
||||
# handle specific action defaults
|
||||
for action in redirected_names:
|
||||
if action in module_defaults:
|
||||
tmp_args.update(module_defaults[action].copy())
|
||||
|
||||
|
|
|
@ -631,7 +631,9 @@ class TaskExecutor:
|
|||
self._handler = self._get_action_handler(connection=self._connection, templar=templar)
|
||||
|
||||
# Apply default params for action/module, if present
|
||||
self._task.args = get_action_args_with_defaults(self._task.action, self._task.args, self._task.module_defaults, templar)
|
||||
self._task.args = get_action_args_with_defaults(
|
||||
self._task.action, self._task.args, self._task.module_defaults, templar, self._task._ansible_internal_redirect_list
|
||||
)
|
||||
|
||||
# And filter out any fields which were set to default(omit), and got the omit token value
|
||||
omit_token = variables.get('omit')
|
||||
|
|
|
@ -119,6 +119,8 @@ class ModuleArgsParser:
|
|||
self._task_attrs.update(['local_action', 'static'])
|
||||
self._task_attrs = frozenset(self._task_attrs)
|
||||
|
||||
self.internal_redirect_list = []
|
||||
|
||||
def _split_module_string(self, module_string):
|
||||
'''
|
||||
when module names are expressed like:
|
||||
|
@ -266,6 +268,8 @@ class ModuleArgsParser:
|
|||
delegate_to = self._task_ds.get('delegate_to', Sentinel)
|
||||
args = dict()
|
||||
|
||||
self.internal_redirect_list = []
|
||||
|
||||
# This is the standard YAML form for command-type modules. We grab
|
||||
# the args and pass them in as additional arguments, which can/will
|
||||
# be overwritten via dict updates from the other arg sources below
|
||||
|
@ -294,8 +298,24 @@ class ModuleArgsParser:
|
|||
|
||||
# walk the filtered input dictionary to see if we recognize a module name
|
||||
for item, value in iteritems(non_task_ds):
|
||||
if item in BUILTIN_TASKS or skip_action_validation or action_loader.has_plugin(item, collection_list=self._collection_list) or \
|
||||
module_loader.has_plugin(item, collection_list=self._collection_list):
|
||||
is_action_candidate = False
|
||||
if item in BUILTIN_TASKS:
|
||||
is_action_candidate = True
|
||||
elif skip_action_validation:
|
||||
is_action_candidate = True
|
||||
else:
|
||||
# If the plugin is resolved and redirected smuggle the list of candidate names via the task attribute 'internal_redirect_list'
|
||||
context = action_loader.find_plugin_with_context(item, collection_list=self._collection_list)
|
||||
if not context.resolved:
|
||||
context = module_loader.find_plugin_with_context(item, collection_list=self._collection_list)
|
||||
if context.resolved and context.redirect_list:
|
||||
self.internal_redirect_list = context.redirect_list
|
||||
elif context.redirect_list:
|
||||
self.internal_redirect_list = context.redirect_list
|
||||
|
||||
is_action_candidate = bool(self.internal_redirect_list)
|
||||
|
||||
if is_action_candidate:
|
||||
# finding more than one module name is a problem
|
||||
if action is not None:
|
||||
raise AnsibleParserError("conflicting action statements: %s, %s" % (action, item), obj=self._task_ds)
|
||||
|
|
|
@ -91,6 +91,10 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
|
|||
def __init__(self, block=None, role=None, task_include=None):
|
||||
''' constructors a task, without the Task.load classmethod, it will be pretty blank '''
|
||||
|
||||
# This is a reference of all the candidate action names for transparent execution of module_defaults with redirected content
|
||||
# This isn't a FieldAttribute to prevent it from being set via the playbook
|
||||
self._ansible_internal_redirect_list = []
|
||||
|
||||
self._role = role
|
||||
self._parent = None
|
||||
|
||||
|
@ -220,6 +224,8 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
|
|||
raise
|
||||
# But if it wasn't, we can add the yaml object now to get more detail
|
||||
raise AnsibleParserError(to_native(e), obj=ds, orig_exc=e)
|
||||
else:
|
||||
self._ansible_internal_redirect_list = args_parser.internal_redirect_list[:]
|
||||
|
||||
# the command/shell/script modules used to support the `cmd` arg,
|
||||
# which corresponds to what we now call _raw_params, so move that
|
||||
|
@ -394,6 +400,9 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
|
|||
def copy(self, exclude_parent=False, exclude_tasks=False):
|
||||
new_me = super(Task, self).copy()
|
||||
|
||||
# if the task has an associated list of candidate names, copy it to the new object too
|
||||
new_me._ansible_internal_redirect_list = self._ansible_internal_redirect_list[:]
|
||||
|
||||
new_me._parent = None
|
||||
if self._parent and not exclude_parent:
|
||||
new_me._parent = self._parent.copy(exclude_tasks=exclude_tasks)
|
||||
|
@ -415,6 +424,9 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
|
|||
if self._role:
|
||||
data['role'] = self._role.serialize()
|
||||
|
||||
if self._ansible_internal_redirect_list:
|
||||
data['_ansible_internal_redirect_list'] = self._ansible_internal_redirect_list[:]
|
||||
|
||||
return data
|
||||
|
||||
def deserialize(self, data):
|
||||
|
@ -443,6 +455,8 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
|
|||
self._role = r
|
||||
del data['role']
|
||||
|
||||
self._ansible_internal_redirect_list = data.get('_ansible_internal_redirect_list', [])
|
||||
|
||||
super(Task, self).deserialize(data)
|
||||
|
||||
def set_loader(self, loader):
|
||||
|
|
|
@ -42,7 +42,7 @@ class ActionModule(ActionBase):
|
|||
mod_args = dict((k, v) for k, v in mod_args.items() if v is not None)
|
||||
|
||||
# handle module defaults
|
||||
mod_args = get_action_args_with_defaults(fact_module, mod_args, self._task.module_defaults, self._templar)
|
||||
mod_args = get_action_args_with_defaults(fact_module, mod_args, self._task.module_defaults, self._templar, self._task._ansible_internal_redirect_list)
|
||||
|
||||
return mod_args
|
||||
|
||||
|
|
|
@ -66,7 +66,9 @@ class ActionModule(ActionBase):
|
|||
del new_module_args['use']
|
||||
|
||||
# get defaults for specific module
|
||||
new_module_args = get_action_args_with_defaults(module, new_module_args, self._task.module_defaults, self._templar)
|
||||
new_module_args = get_action_args_with_defaults(
|
||||
module, new_module_args, self._task.module_defaults, self._templar, self._task._ansible_internal_redirect_list
|
||||
)
|
||||
|
||||
display.vvvv("Running %s" % module)
|
||||
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async_val))
|
||||
|
|
|
@ -73,7 +73,9 @@ class ActionModule(ActionBase):
|
|||
self._display.warning('Ignoring "%s" as it is not used in "%s"' % (unused, module))
|
||||
|
||||
# get defaults for specific module
|
||||
new_module_args = get_action_args_with_defaults(module, new_module_args, self._task.module_defaults, self._templar)
|
||||
new_module_args = get_action_args_with_defaults(
|
||||
module, new_module_args, self._task.module_defaults, self._templar, self._task._ansible_internal_redirect_list
|
||||
)
|
||||
|
||||
self._display.vvvv("Running %s" % module)
|
||||
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async_val))
|
||||
|
|
|
@ -587,6 +587,10 @@ class PluginLoader:
|
|||
else:
|
||||
# 'ansible.builtin' should be handled here. This means only internal, or builtin, paths are searched.
|
||||
plugin_load_context = self._find_fq_plugin(candidate_name, suffix, plugin_load_context=plugin_load_context)
|
||||
|
||||
if candidate_name != plugin_load_context.original_name and candidate_name not in plugin_load_context.redirect_list:
|
||||
plugin_load_context.redirect_list.append(candidate_name)
|
||||
|
||||
if plugin_load_context.resolved or plugin_load_context.pending_redirect: # if we got an answer or need to chase down a redirect, return
|
||||
return plugin_load_context
|
||||
except (AnsiblePluginRemovedError, AnsiblePluginCircularRedirect, AnsibleCollectionUnsupportedVersionError):
|
||||
|
|
|
@ -531,6 +531,15 @@ class _AnsibleCollectionPkgLoader(_AnsibleCollectionPkgLoaderBase):
|
|||
# if redirect.startswith('..'):
|
||||
# redirect = redirect[2:]
|
||||
|
||||
action_groups = meta_dict.pop('action_groups', {})
|
||||
meta_dict['action_groups'] = {}
|
||||
for group_name in action_groups:
|
||||
for action_name in action_groups[group_name]:
|
||||
if action_name in meta_dict['action_groups']:
|
||||
meta_dict['action_groups'][action_name].append(group_name)
|
||||
else:
|
||||
meta_dict['action_groups'][action_name] = [group_name]
|
||||
|
||||
return meta_dict
|
||||
|
||||
|
||||
|
|
|
@ -2,7 +2,17 @@
|
|||
|
||||
- name: Integration test for AWS Step Function state machine module
|
||||
module_defaults:
|
||||
group/aws:
|
||||
iam_role:
|
||||
aws_access_key: "{{ aws_access_key }}"
|
||||
aws_secret_key: "{{ aws_secret_key }}"
|
||||
security_token: "{{ security_token | default(omit) }}"
|
||||
region: "{{ aws_region }}"
|
||||
aws_step_functions_state_machine:
|
||||
aws_access_key: "{{ aws_access_key }}"
|
||||
aws_secret_key: "{{ aws_secret_key }}"
|
||||
security_token: "{{ security_token | default(omit) }}"
|
||||
region: "{{ aws_region }}"
|
||||
aws_step_functions_state_machine_execution:
|
||||
aws_access_key: "{{ aws_access_key }}"
|
||||
aws_secret_key: "{{ aws_secret_key }}"
|
||||
security_token: "{{ security_token | default(omit) }}"
|
||||
|
|
|
@ -1,17 +1,25 @@
|
|||
---
|
||||
- name: set up aws connection info
|
||||
set_fact:
|
||||
aws_connection_info: &aws_connection_info
|
||||
aws_access_key: "{{ aws_access_key | default(omit) }}"
|
||||
aws_secret_key: "{{ aws_secret_key | default(omit) }}"
|
||||
security_token: "{{ security_token | default(omit) }}"
|
||||
region: "{{ aws_region | default(omit) }}"
|
||||
no_log: yes
|
||||
|
||||
- module_defaults:
|
||||
group/aws:
|
||||
aws_access_key: '{{ aws_access_key | default(omit) }}'
|
||||
aws_secret_key: '{{ aws_secret_key | default(omit) }}'
|
||||
security_token: '{{ security_token | default(omit) }}'
|
||||
region: '{{ aws_region | default(omit) }}'
|
||||
cloudformation:
|
||||
<<: *aws_connection_info
|
||||
cloudformation_info:
|
||||
<<: *aws_connection_info
|
||||
|
||||
block:
|
||||
|
||||
# ==== Env setup ==========================================================
|
||||
- name: list available AZs
|
||||
aws_az_info:
|
||||
<<: *aws_connection_info
|
||||
register: region_azs
|
||||
|
||||
- name: pick an AZ for testing
|
||||
|
@ -24,6 +32,7 @@
|
|||
cidr_block: "{{ vpc_cidr }}"
|
||||
tags:
|
||||
Name: Cloudformation testing
|
||||
<<: *aws_connection_info
|
||||
register: testing_vpc
|
||||
|
||||
- name: Create a test subnet
|
||||
|
@ -31,6 +40,7 @@
|
|||
vpc_id: "{{ testing_vpc.vpc.id }}"
|
||||
cidr: "{{ subnet_cidr }}"
|
||||
az: "{{ availability_zone }}"
|
||||
<<: *aws_connection_info
|
||||
register: testing_subnet
|
||||
|
||||
- name: Find AMI to use
|
||||
|
@ -38,6 +48,7 @@
|
|||
owners: 'amazon'
|
||||
filters:
|
||||
name: '{{ ec2_ami_name }}'
|
||||
<<: *aws_connection_info
|
||||
register: ec2_amis
|
||||
|
||||
- name: Set fact with latest AMI
|
||||
|
@ -453,6 +464,7 @@
|
|||
vpc_id: "{{ testing_vpc.vpc.id }}"
|
||||
cidr: "{{ subnet_cidr }}"
|
||||
state: absent
|
||||
<<: *aws_connection_info
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Delete test VPC
|
||||
|
@ -460,4 +472,5 @@
|
|||
name: "{{ vpc_name }}"
|
||||
cidr_block: "{{ vpc_cidr }}"
|
||||
state: absent
|
||||
<<: *aws_connection_info
|
||||
ignore_errors: yes
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
gather_facts: no
|
||||
tasks:
|
||||
- module_defaults:
|
||||
group/aws:
|
||||
ec2_ami_info:
|
||||
aws_access_key: "{{ aws_access_key }}"
|
||||
aws_secret_key: "{{ aws_secret_key }}"
|
||||
security_token: "{{ security_token | default(omit) }}"
|
||||
|
|
|
@ -1,9 +1,19 @@
|
|||
- name: set up aws connection info
|
||||
set_fact:
|
||||
aws_connection_info: &aws_connection_info
|
||||
aws_access_key: "{{ aws_access_key }}"
|
||||
aws_secret_key: "{{ aws_secret_key }}"
|
||||
security_token: "{{ security_token | default(omit) }}"
|
||||
region: "{{ aws_region }}"
|
||||
no_log: yes
|
||||
|
||||
- name: "remove Instances"
|
||||
ec2_instance:
|
||||
state: absent
|
||||
filters:
|
||||
vpc-id: "{{ testing_vpc.vpc.id }}"
|
||||
wait: yes
|
||||
<<: *aws_connection_info
|
||||
ignore_errors: yes
|
||||
retries: 10
|
||||
|
||||
|
@ -11,12 +21,14 @@
|
|||
ec2_eni_info:
|
||||
filters:
|
||||
vpc-id: "{{ testing_vpc.vpc.id }}"
|
||||
<<: *aws_connection_info
|
||||
register: enis
|
||||
|
||||
- name: "delete all ENIs"
|
||||
ec2_eni:
|
||||
state: absent
|
||||
eni_id: "{{ item.id }}"
|
||||
<<: *aws_connection_info
|
||||
until: removed is not failed
|
||||
with_items: "{{ enis.network_interfaces }}"
|
||||
ignore_errors: yes
|
||||
|
@ -28,6 +40,7 @@
|
|||
name: "{{ resource_prefix }}-sg"
|
||||
description: a security group for ansible tests
|
||||
vpc_id: "{{ testing_vpc.vpc.id }}"
|
||||
<<: *aws_connection_info
|
||||
register: removed
|
||||
until: removed is not failed
|
||||
ignore_errors: yes
|
||||
|
@ -45,6 +58,7 @@
|
|||
subnets:
|
||||
- "{{ testing_subnet_a.subnet.id }}"
|
||||
- "{{ testing_subnet_b.subnet.id }}"
|
||||
<<: *aws_connection_info
|
||||
register: removed
|
||||
until: removed is not failed
|
||||
ignore_errors: yes
|
||||
|
@ -54,6 +68,7 @@
|
|||
ec2_vpc_igw:
|
||||
state: absent
|
||||
vpc_id: "{{ testing_vpc.vpc.id }}"
|
||||
<<: *aws_connection_info
|
||||
register: removed
|
||||
until: removed is not failed
|
||||
ignore_errors: yes
|
||||
|
@ -64,6 +79,7 @@
|
|||
state: absent
|
||||
vpc_id: "{{ testing_vpc.vpc.id }}"
|
||||
cidr: "{{ subnet_a_cidr }}"
|
||||
<<: *aws_connection_info
|
||||
register: removed
|
||||
until: removed is not failed
|
||||
ignore_errors: yes
|
||||
|
@ -74,6 +90,7 @@
|
|||
state: absent
|
||||
vpc_id: "{{ testing_vpc.vpc.id }}"
|
||||
cidr: "{{ subnet_b_cidr }}"
|
||||
<<: *aws_connection_info
|
||||
register: removed
|
||||
until: removed is not failed
|
||||
ignore_errors: yes
|
||||
|
@ -87,6 +104,7 @@
|
|||
tags:
|
||||
Name: Ansible Testing VPC
|
||||
tenancy: default
|
||||
<<: *aws_connection_info
|
||||
register: removed
|
||||
until: removed is not failed
|
||||
ignore_errors: yes
|
||||
|
|
|
@ -1,7 +1,17 @@
|
|||
- name: set up aws connection info
|
||||
set_fact:
|
||||
aws_connection_info: &aws_connection_info
|
||||
aws_access_key: "{{ aws_access_key }}"
|
||||
aws_secret_key: "{{ aws_secret_key }}"
|
||||
security_token: "{{ security_token | default(omit) }}"
|
||||
region: "{{ aws_region }}"
|
||||
no_log: yes
|
||||
|
||||
- run_once: '{{ setup_run_once | default("no") | bool }}'
|
||||
block:
|
||||
- name: "fetch AZ availability"
|
||||
aws_az_info:
|
||||
<<: *aws_connection_info
|
||||
register: az_info
|
||||
- name: "Assert that we have multiple AZs available to us"
|
||||
assert:
|
||||
|
@ -20,12 +30,14 @@
|
|||
tags:
|
||||
Name: Ansible ec2_instance Testing VPC
|
||||
tenancy: default
|
||||
<<: *aws_connection_info
|
||||
register: testing_vpc
|
||||
|
||||
- name: "Create internet gateway for use in testing"
|
||||
ec2_vpc_igw:
|
||||
state: present
|
||||
vpc_id: "{{ testing_vpc.vpc.id }}"
|
||||
<<: *aws_connection_info
|
||||
register: igw
|
||||
|
||||
- name: "Create default subnet in zone A"
|
||||
|
@ -36,6 +48,7 @@
|
|||
az: "{{ subnet_a_az }}"
|
||||
resource_tags:
|
||||
Name: "{{ resource_prefix }}-subnet-a"
|
||||
<<: *aws_connection_info
|
||||
register: testing_subnet_a
|
||||
|
||||
- name: "Create secondary subnet in zone B"
|
||||
|
@ -46,6 +59,7 @@
|
|||
az: "{{ subnet_b_az }}"
|
||||
resource_tags:
|
||||
Name: "{{ resource_prefix }}-subnet-b"
|
||||
<<: *aws_connection_info
|
||||
register: testing_subnet_b
|
||||
|
||||
- name: "create routing rules"
|
||||
|
@ -60,6 +74,7 @@
|
|||
subnets:
|
||||
- "{{ testing_subnet_a.subnet.id }}"
|
||||
- "{{ testing_subnet_b.subnet.id }}"
|
||||
<<: *aws_connection_info
|
||||
|
||||
- name: "create a security group with the vpc"
|
||||
ec2_group:
|
||||
|
@ -76,4 +91,5 @@
|
|||
from_port: 80
|
||||
to_port: 80
|
||||
cidr_ip: 0.0.0.0/0
|
||||
<<: *aws_connection_info
|
||||
register: sg
|
||||
|
|
|
@ -17,13 +17,27 @@
|
|||
# - EC2_REGION -> AWS_REGION
|
||||
#
|
||||
|
||||
- name: "Wrap up all tests and setup AWS credentials"
|
||||
module_defaults:
|
||||
group/aws:
|
||||
- name: set up aws connection info
|
||||
set_fact:
|
||||
aws_connection_info: &aws_connection_info
|
||||
aws_access_key: "{{ aws_access_key }}"
|
||||
aws_secret_key: "{{ aws_secret_key }}"
|
||||
security_token: "{{ security_token | default(omit) }}"
|
||||
region: "{{ aws_region }}"
|
||||
no_log: yes
|
||||
|
||||
- name: "Wrap up all tests and setup AWS credentials"
|
||||
module_defaults:
|
||||
ec2_instance:
|
||||
<<: *aws_connection_info
|
||||
ec2_instance_info:
|
||||
<<: *aws_connection_info
|
||||
ec2_key:
|
||||
<<: *aws_connection_info
|
||||
ec2_eni:
|
||||
<<: *aws_connection_info
|
||||
iam_role:
|
||||
<<: *aws_connection_info
|
||||
block:
|
||||
- debug:
|
||||
msg: "{{ inventory_hostname }} start: {{ lookup('pipe','date') }}"
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.testns.testcoll.plugins.action.echoaction import ActionModule as BaseAM
|
||||
|
||||
|
||||
class ActionModule(BaseAM):
|
||||
pass
|
|
@ -0,0 +1,13 @@
|
|||
#!/usr/bin/python
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.testns.testcoll.plugins.module_utils.echo_impl import do_echo
|
||||
|
||||
|
||||
def main():
|
||||
do_echo()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,9 @@
|
|||
action_groups:
|
||||
testgroup:
|
||||
- testns.testcoll.echo1
|
||||
- testns.testcoll.echo2
|
||||
# note we can define defaults for an action
|
||||
- testns.testcoll.echoaction
|
||||
# note we can define defaults in this group for actions/modules in another collection
|
||||
- testns.othercoll.other_echoaction
|
||||
- testns.othercoll.other_echo1
|
|
@ -0,0 +1,19 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.plugins.action import ActionBase
|
||||
|
||||
|
||||
class ActionModule(ActionBase):
|
||||
TRANSFERS_FILES = False
|
||||
_VALID_ARGS = frozenset()
|
||||
|
||||
def run(self, tmp=None, task_vars=None):
|
||||
if task_vars is None:
|
||||
task_vars = dict()
|
||||
|
||||
result = super(ActionModule, self).run(None, task_vars)
|
||||
|
||||
result = dict(changed=False, args_in=self._task.args)
|
||||
|
||||
return result
|
|
@ -0,0 +1,15 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
from ansible.module_utils import basic
|
||||
from ansible.module_utils.basic import _load_params, AnsibleModule
|
||||
|
||||
|
||||
def do_echo():
|
||||
p = _load_params()
|
||||
d = json.loads(basic._ANSIBLE_ARGS)
|
||||
d['ANSIBLE_MODULE_ARGS'] = {}
|
||||
basic._ANSIBLE_ARGS = json.dumps(d).encode('utf-8')
|
||||
module = AnsibleModule(argument_spec={})
|
||||
module.exit_json(args_in=p)
|
|
@ -0,0 +1,13 @@
|
|||
#!/usr/bin/python
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.testns.testcoll.plugins.module_utils.echo_impl import do_echo
|
||||
|
||||
|
||||
def main():
|
||||
do_echo()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -0,0 +1,13 @@
|
|||
#!/usr/bin/python
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.testns.testcoll.plugins.module_utils.echo_impl import do_echo
|
||||
|
||||
|
||||
def main():
|
||||
do_echo()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
5
test/integration/targets/module_defaults/runme.sh
Executable file
5
test/integration/targets/module_defaults/runme.sh
Executable file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -eux
|
||||
|
||||
ansible-playbook test_defaults.yml "$@"
|
|
@ -87,30 +87,3 @@
|
|||
- assert:
|
||||
that:
|
||||
foo.msg == "Hello world!"
|
||||
- name: Module group defaults block
|
||||
module_defaults:
|
||||
group/test:
|
||||
arg1: "test1"
|
||||
arg2: "test2"
|
||||
block:
|
||||
- test_module_defaults:
|
||||
register: result
|
||||
- assert:
|
||||
that:
|
||||
- "result.test_module_defaults.arg1 == 'test1'"
|
||||
- "result.test_module_defaults.arg2 == 'test2'"
|
||||
- "result.test_module_defaults.arg3 == 'default3'"
|
||||
- name: Module group defaults block
|
||||
module_defaults:
|
||||
group/test:
|
||||
arg1: "test1"
|
||||
arg2: "test2"
|
||||
arg3: "test3"
|
||||
block:
|
||||
- test_module_defaults:
|
||||
register: result
|
||||
- assert:
|
||||
that:
|
||||
- "result.test_module_defaults.arg1 == 'test1'"
|
||||
- "result.test_module_defaults.arg2 == 'test2'"
|
||||
- "result.test_module_defaults.arg3 == 'test3'"
|
||||
|
|
60
test/integration/targets/module_defaults/test_defaults.yml
Normal file
60
test/integration/targets/module_defaults/test_defaults.yml
Normal file
|
@ -0,0 +1,60 @@
|
|||
- hosts: localhost
|
||||
gather_facts: no
|
||||
collections:
|
||||
- testns.testcoll
|
||||
- testns.othercoll
|
||||
module_defaults:
|
||||
testns.testcoll.echoaction:
|
||||
explicit_module_default: from playbook
|
||||
testns.testcoll.echo1:
|
||||
explicit_module_default: from playbook
|
||||
group/testgroup:
|
||||
group_module_default: from playbook
|
||||
tasks:
|
||||
- testns.testcoll.echoaction:
|
||||
task_arg: from task
|
||||
register: echoaction_fq
|
||||
- echoaction:
|
||||
task_arg: from task
|
||||
register: echoaction_unq
|
||||
- testns.testcoll.echo1:
|
||||
task_arg: from task
|
||||
register: echo1_fq
|
||||
- echo1:
|
||||
task_arg: from task
|
||||
register: echo1_unq
|
||||
- testns.testcoll.echo2:
|
||||
task_arg: from task
|
||||
register: echo2_fq
|
||||
- echo2:
|
||||
task_arg: from task
|
||||
register: echo2_unq
|
||||
- testns.othercoll.other_echoaction:
|
||||
task_arg: from task
|
||||
register: other_echoaction_fq
|
||||
- other_echoaction:
|
||||
task_arg: from task
|
||||
register: other_echoaction_unq
|
||||
- testns.othercoll.other_echo1:
|
||||
task_arg: from task
|
||||
register: other_echo1_fq
|
||||
- other_echo1:
|
||||
task_arg: from task
|
||||
register: other_echo1_unq
|
||||
|
||||
- debug: var=echo1_fq
|
||||
|
||||
- assert:
|
||||
that:
|
||||
- "echoaction_fq.args_in == {'task_arg': 'from task', 'explicit_module_default': 'from playbook', 'group_module_default': 'from playbook' }"
|
||||
- "echoaction_unq.args_in == {'task_arg': 'from task', 'explicit_module_default': 'from playbook', 'group_module_default': 'from playbook' }"
|
||||
- "echo1_fq.args_in == {'task_arg': 'from task', 'explicit_module_default': 'from playbook', 'group_module_default': 'from playbook' }"
|
||||
- "echo1_unq.args_in == {'task_arg': 'from task', 'explicit_module_default': 'from playbook', 'group_module_default': 'from playbook' }"
|
||||
- "echo2_fq.args_in == {'task_arg': 'from task', 'group_module_default': 'from playbook' }"
|
||||
- "echo2_unq.args_in == {'task_arg': 'from task', 'group_module_default': 'from playbook' }"
|
||||
- "other_echoaction_fq.args_in == {'task_arg': 'from task', 'group_module_default': 'from playbook' }"
|
||||
- "other_echoaction_unq.args_in == {'task_arg': 'from task', 'group_module_default': 'from playbook' }"
|
||||
- "other_echo1_fq.args_in == {'task_arg': 'from task', 'group_module_default': 'from playbook' }"
|
||||
- "other_echo1_unq.args_in == {'task_arg': 'from task', 'group_module_default': 'from playbook' }"
|
||||
|
||||
- include_tasks: tasks/main.yml
|
|
@ -220,7 +220,7 @@ class VcenterEnvironment(CloudEnvironment):
|
|||
env_vars=env_vars,
|
||||
ansible_vars=ansible_vars,
|
||||
module_defaults={
|
||||
'group/vmware': {
|
||||
'vmware_guest': {
|
||||
'hostname': ansible_vars['vcenter_hostname'],
|
||||
'username': ansible_vars['vcenter_username'],
|
||||
'password': ansible_vars['vcenter_password'],
|
||||
|
|
|
@ -57,7 +57,6 @@ lib/ansible/cli/console.py pylint:blacklisted-name
|
|||
lib/ansible/cli/scripts/ansible_cli_stub.py shebang
|
||||
lib/ansible/cli/scripts/ansible_connection_cli_stub.py shebang
|
||||
lib/ansible/config/base.yml no-unwanted-files
|
||||
lib/ansible/config/module_defaults.yml no-unwanted-files
|
||||
lib/ansible/executor/playbook_executor.py pylint:blacklisted-name
|
||||
lib/ansible/executor/powershell/async_watchdog.ps1 pslint:PSCustomUseLiteralPath
|
||||
lib/ansible/executor/powershell/async_wrapper.ps1 pslint:PSCustomUseLiteralPath
|
||||
|
|
|
@ -19,12 +19,13 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
from units.compat import unittest
|
||||
from units.compat.mock import MagicMock
|
||||
from units.compat.mock import MagicMock, patch
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.plugins.action.gather_facts import ActionModule
|
||||
from ansible.playbook.task import Task
|
||||
from ansible.template import Templar
|
||||
import ansible.executor.module_common as module_common
|
||||
|
||||
from units.mock.loader import DictDataLoader
|
||||
|
||||
|
@ -48,6 +49,7 @@ class TestNetworkFacts(unittest.TestCase):
|
|||
self.task_vars = {'ansible_network_os': 'ios'}
|
||||
self.task.action = 'gather_facts'
|
||||
self.task.async_val = False
|
||||
self.task._ansible_internal_redirect_list = []
|
||||
self.task.args = {'gather_subset': 'min'}
|
||||
self.task.module_defaults = [{'ios_facts': {'gather_subset': 'min'}}]
|
||||
|
||||
|
@ -63,9 +65,11 @@ class TestNetworkFacts(unittest.TestCase):
|
|||
facts_modules = C.config.get_config_value('FACTS_MODULES', variables=self.task_vars)
|
||||
self.assertEqual(facts_modules, ['ios_facts'])
|
||||
|
||||
def test_network_gather_facts_fqcn(self):
|
||||
@patch.object(module_common, '_get_collection_metadata', return_value={})
|
||||
def test_network_gather_facts_fqcn(self, mock_collection_metadata):
|
||||
self.fqcn_task_vars = {'ansible_network_os': 'cisco.ios.ios'}
|
||||
self.task.action = 'gather_facts'
|
||||
self.task._ansible_internal_redirect_list = ['cisco.ios.ios_facts']
|
||||
self.task.async_val = False
|
||||
self.task.args = {'gather_subset': 'min'}
|
||||
self.task.module_defaults = [{'cisco.ios.ios_facts': {'gather_subset': 'min'}}]
|
||||
|
|
Loading…
Reference in a new issue