Merge remote-tracking branch 'ansible/devel' into issue_13438
This commit is contained in:
commit
cdc3c88018
21 changed files with 119 additions and 84 deletions
|
@ -4,6 +4,7 @@ Ansible Changes By Release
|
|||
## 2.1 TBD - ACTIVE DEVELOPMENT
|
||||
|
||||
####New Modules:
|
||||
* aws: ec2_vpc_net_facts
|
||||
* cloudstack: cs_volume
|
||||
|
||||
####New Filters:
|
||||
|
@ -37,9 +38,9 @@ Ansible Changes By Release
|
|||
* New ssh configuration variables(`ansible_ssh_common_args`, `ansible_ssh_extra_args`) can be used to configure a
|
||||
per-group or per-host ssh ProxyCommand or set any other ssh options.
|
||||
`ansible_ssh_extra_args` is used to set options that are accepted only by ssh (not sftp or scp, which have their own analogous settings).
|
||||
* ansible-pull can now verify the code it runs when using git as a source repository, using git's code signing and verification features.
|
||||
* Backslashes used when specifying parameters in jinja2 expressions in YAML dicts sometimes needed to be escaped twice.
|
||||
This has been fixed so that escaping once works. Here's an example of how playbooks need to be modified:
|
||||
* ansible-pull can now verify the code it runs when using git as a source repository, using git's code signing and verification features.
|
||||
|
||||
```
|
||||
# Syntax in 1.9.x
|
||||
|
|
|
@ -111,9 +111,8 @@ If you use boto profiles to manage multiple AWS accounts, you can pass ``--profi
|
|||
aws_access_key_id = <prod access key>
|
||||
aws_secret_access_key = <prod secret key>
|
||||
|
||||
You can then run ``ec2.py --profile prod`` to get the inventory for the prod account, or run playbooks with: ``ansible-playbook -i 'ec2.py --profile prod' myplaybook.yml``.
|
||||
|
||||
Alternatively, use the ``AWS_PROFILE`` variable - e.g. ``AWS_PROFILE=prod ansible-playbook -i ec2.py myplaybook.yml``
|
||||
You can then run ``ec2.py --profile prod`` to get the inventory for the prod account, this option is not supported by ``anisble-playbook`` though.
|
||||
But you can use the ``AWS_PROFILE`` variable - e.g. ``AWS_PROFILE=prod ansible-playbook -i ec2.py myplaybook.yml``
|
||||
|
||||
Since each region requires its own API call, if you are only using a small set of regions, feel free to edit ``ec2.ini`` and list only the regions you are interested in. There are other config options in ``ec2.ini`` including cache control, and destination variables.
|
||||
|
||||
|
|
|
@ -793,8 +793,8 @@ Basically, anything that goes into "role defaults" (the defaults folder inside t
|
|||
|
||||
.. rubric:: Footnotes
|
||||
|
||||
.. [1] Tasks in each role will see their own role's defaults tasks outside of roles will the last role's defaults
|
||||
.. [2] Variables defined in inventory file or provided by dynamic inventory
|
||||
.. [1] Tasks in each role will see their own role's defaults. Tasks defined outside of a role will see the last role's defaults.
|
||||
.. [2] Variables defined in inventory file or provided by dynamic inventory.
|
||||
|
||||
.. note:: Within a any section, redefining a var will overwrite the previous instance.
|
||||
If multiple groups have the same variable, the last one loaded wins.
|
||||
|
|
|
@ -182,7 +182,7 @@
|
|||
#no_log = False
|
||||
|
||||
# prevents logging of tasks, but only on the targets, data is still logged on the master/controller
|
||||
#no_target_syslog = True
|
||||
#no_target_syslog = False
|
||||
|
||||
# controls the compression level of variables sent to
|
||||
# worker processes. At the default of 0, no compression
|
||||
|
|
|
@ -70,7 +70,7 @@ class AdHocCLI(CLI):
|
|||
help="module name to execute (default=%s)" % C.DEFAULT_MODULE_NAME,
|
||||
default=C.DEFAULT_MODULE_NAME)
|
||||
|
||||
self.options, self.args = self.parser.parse_args()
|
||||
self.options, self.args = self.parser.parse_args(self.args[1:])
|
||||
|
||||
if len(self.args) != 1:
|
||||
raise AnsibleOptionsError("Missing target hosts")
|
||||
|
@ -163,6 +163,9 @@ class AdHocCLI(CLI):
|
|||
else:
|
||||
cb = 'minimal'
|
||||
|
||||
if not C.DEFAULT_LOAD_CALLBACK_PLUGINS:
|
||||
C.DEFAULT_CALLBACK_WHITELIST = []
|
||||
|
||||
if self.options.tree:
|
||||
C.DEFAULT_CALLBACK_WHITELIST.append('tree')
|
||||
C.TREE_DIR = self.options.tree
|
||||
|
|
|
@ -62,7 +62,7 @@ class DocCLI(CLI):
|
|||
self.parser.add_option("-s", "--snippet", action="store_true", default=False, dest='show_snippet',
|
||||
help='Show playbook snippet for specified module(s)')
|
||||
|
||||
self.options, self.args = self.parser.parse_args()
|
||||
self.options, self.args = self.parser.parse_args(self.args[1:])
|
||||
display.verbosity = self.options.verbosity
|
||||
|
||||
def run(self):
|
||||
|
|
|
@ -113,7 +113,7 @@ class GalaxyCLI(CLI):
|
|||
help='Force overwriting an existing role')
|
||||
|
||||
# get options, args and galaxy object
|
||||
self.options, self.args =self.parser.parse_args()
|
||||
self.options, self.args =self.parser.parse_args(self.args[1:])
|
||||
display.verbosity = self.options.verbosity
|
||||
self.galaxy = Galaxy(self.options)
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ class PlaybookCLI(CLI):
|
|||
parser.add_option('--start-at-task', dest='start_at_task',
|
||||
help="start the playbook at the task matching this name")
|
||||
|
||||
self.options, self.args = parser.parse_args()
|
||||
self.options, self.args = parser.parse_args(self.args[1:])
|
||||
|
||||
|
||||
self.parser = parser
|
||||
|
|
|
@ -90,7 +90,7 @@ class PullCLI(CLI):
|
|||
help='verify GPG signature of checked out commit, if it fails abort running the playbook.'
|
||||
' This needs the corresponding VCS module to support such an operation')
|
||||
|
||||
self.options, self.args = self.parser.parse_args()
|
||||
self.options, self.args = self.parser.parse_args(self.args[1:])
|
||||
|
||||
if not self.options.dest:
|
||||
hostname = socket.getfqdn()
|
||||
|
|
|
@ -69,7 +69,7 @@ class VaultCLI(CLI):
|
|||
elif self.action == "rekey":
|
||||
self.parser.set_usage("usage: %prog rekey [options] file_name")
|
||||
|
||||
self.options, self.args = self.parser.parse_args()
|
||||
self.options, self.args = self.parser.parse_args(self.args[1:])
|
||||
display.verbosity = self.options.verbosity
|
||||
|
||||
can_output = ['encrypt', 'decrypt']
|
||||
|
|
|
@ -159,7 +159,7 @@ DEFAULT_VAR_COMPRESSION_LEVEL = get_config(p, DEFAULTS, 'var_compression_level',
|
|||
|
||||
# disclosure
|
||||
DEFAULT_NO_LOG = get_config(p, DEFAULTS, 'no_log', 'ANSIBLE_NO_LOG', False, boolean=True)
|
||||
DEFAULT_NO_TARGET_SYSLOG = get_config(p, DEFAULTS, 'no_target_syslog', 'ANSIBLE_NO_TARGET_SYSLOG', True, boolean=True)
|
||||
DEFAULT_NO_TARGET_SYSLOG = get_config(p, DEFAULTS, 'no_target_syslog', 'ANSIBLE_NO_TARGET_SYSLOG', False, boolean=True)
|
||||
|
||||
# selinux
|
||||
DEFAULT_SELINUX_SPECIAL_FS = get_config(p, 'selinux', 'special_context_filesystems', None, 'fuse, nfs, vboxsf, ramfs', islist=True)
|
||||
|
|
|
@ -178,25 +178,25 @@ class Inventory(object):
|
|||
if self._restriction:
|
||||
pattern_hash += u":%s" % to_unicode(self._restriction)
|
||||
|
||||
if pattern_hash in HOSTS_PATTERNS_CACHE:
|
||||
return HOSTS_PATTERNS_CACHE[pattern_hash][:]
|
||||
if pattern_hash not in HOSTS_PATTERNS_CACHE:
|
||||
|
||||
patterns = Inventory.split_host_pattern(pattern)
|
||||
hosts = self._evaluate_patterns(patterns)
|
||||
patterns = Inventory.split_host_pattern(pattern)
|
||||
hosts = self._evaluate_patterns(patterns)
|
||||
|
||||
# mainly useful for hostvars[host] access
|
||||
if not ignore_limits_and_restrictions:
|
||||
# exclude hosts not in a subset, if defined
|
||||
if self._subset:
|
||||
subset = self._evaluate_patterns(self._subset)
|
||||
hosts = [ h for h in hosts if h in subset ]
|
||||
# mainly useful for hostvars[host] access
|
||||
if not ignore_limits_and_restrictions:
|
||||
# exclude hosts not in a subset, if defined
|
||||
if self._subset:
|
||||
subset = self._evaluate_patterns(self._subset)
|
||||
hosts = [ h for h in hosts if h in subset ]
|
||||
|
||||
# exclude hosts mentioned in any restriction (ex: failed hosts)
|
||||
if self._restriction is not None:
|
||||
hosts = [ h for h in hosts if h in self._restriction ]
|
||||
# exclude hosts mentioned in any restriction (ex: failed hosts)
|
||||
if self._restriction is not None:
|
||||
hosts = [ h for h in hosts if h in self._restriction ]
|
||||
|
||||
HOSTS_PATTERNS_CACHE[pattern_hash] = hosts[:]
|
||||
return list(set(hosts))
|
||||
HOSTS_PATTERNS_CACHE[pattern_hash] = list(set(hosts))
|
||||
|
||||
return HOSTS_PATTERNS_CACHE[pattern_hash][:]
|
||||
|
||||
@classmethod
|
||||
def split_host_pattern(cls, pattern):
|
||||
|
|
|
@ -80,7 +80,7 @@ def ios_module(**kwargs):
|
|||
"""
|
||||
spec = kwargs.get('argument_spec') or dict()
|
||||
|
||||
argument_spec = url_argument_spec()
|
||||
argument_spec = shell_argument_spec()
|
||||
argument_spec.update(IOS_COMMON_ARGS)
|
||||
if kwargs.get('argument_spec'):
|
||||
argument_spec.update(kwargs['argument_spec'])
|
||||
|
@ -150,21 +150,6 @@ class IosShell(object):
|
|||
responses.append(response)
|
||||
return responses
|
||||
|
||||
def ios_from_args(module):
|
||||
"""Extracts the set of argumetns to build a valid IOS connection
|
||||
"""
|
||||
params = dict()
|
||||
for arg, attrs in IOS_COMMON_ARGS.iteritems():
|
||||
if module.params['device']:
|
||||
params[arg] = module.params['device'].get(arg)
|
||||
if arg not in params or module.params[arg]:
|
||||
params[arg] = module.params[arg]
|
||||
if params[arg] is None:
|
||||
if attrs.get('required'):
|
||||
module.fail_json(msg='argument %s is required' % arg)
|
||||
params[arg] = attrs.get('default')
|
||||
return params
|
||||
|
||||
def ios_connection(module):
|
||||
"""Creates a connection to an IOS device based on the module arguments
|
||||
"""
|
||||
|
@ -180,16 +165,16 @@ def ios_connection(module):
|
|||
shell = IosShell()
|
||||
shell.connect(host, port=port, username=username, password=password,
|
||||
timeout=timeout)
|
||||
shell.send('terminal length 0')
|
||||
except paramiko.ssh_exception.AuthenticationException, exc:
|
||||
module.fail_json(msg=exc.message)
|
||||
except socket.error, exc:
|
||||
module.fail_json(msg=exc.strerror, errno=exc.errno)
|
||||
|
||||
shell.send('terminal length 0')
|
||||
|
||||
if module.params['enable_mode']:
|
||||
shell.authorize(module.params['enable_password'])
|
||||
|
||||
return shell
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -32,16 +32,16 @@ from ansible.module_utils.nxapi import *
|
|||
|
||||
The nxapi module provides the following common argument spec:
|
||||
|
||||
* host (str) - [Required] The IPv4 address or FQDN of the network device
|
||||
* host (str) - The IPv4 address or FQDN of the network device
|
||||
|
||||
* port (str) - Overrides the default port to use for the HTTP/S
|
||||
connection. The default values are 80 for HTTP and
|
||||
443 for HTTPS
|
||||
|
||||
* url_username (str) - [Required] The username to use to authenticate
|
||||
* username (str) - The username to use to authenticate
|
||||
the HTTP/S connection. Aliases: username
|
||||
|
||||
* url_password (str) - [Required] The password to use to authenticate
|
||||
* password (str) - The password to use to authenticate
|
||||
the HTTP/S connection. Aliases: password
|
||||
|
||||
* use_ssl (bool) - Specifies whether or not to use an encrypted (HTTPS)
|
||||
|
@ -51,6 +51,10 @@ The nxapi module provides the following common argument spec:
|
|||
device. Valid values in `cli_show`, `cli_show_ascii`, 'cli_conf`
|
||||
and `bash`. The default value is `cli_show_ascii`
|
||||
|
||||
* device (dict) - Used to send the entire set of connection parameters
|
||||
as a dict object. This argument is mutually exclusive with the
|
||||
host argument
|
||||
|
||||
In order to communicate with Cisco NXOS devices, the NXAPI feature
|
||||
must be enabled and configured on the device.
|
||||
|
||||
|
@ -58,34 +62,52 @@ must be enabled and configured on the device.
|
|||
|
||||
NXAPI_COMMAND_TYPES = ['cli_show', 'cli_show_ascii', 'cli_conf', 'bash']
|
||||
|
||||
def nxapi_argument_spec(spec=None):
|
||||
"""Creates an argument spec for working with NXAPI
|
||||
"""
|
||||
arg_spec = url_argument_spec()
|
||||
arg_spec.update(dict(
|
||||
host=dict(required=True),
|
||||
port=dict(),
|
||||
url_username=dict(required=True, aliases=['username']),
|
||||
url_password=dict(required=True, aliases=['password']),
|
||||
use_ssl=dict(default=False, type='bool'),
|
||||
command_type=dict(default='cli_show_ascii', choices=NXAPI_COMMAND_TYPES)
|
||||
))
|
||||
if spec:
|
||||
arg_spec.update(spec)
|
||||
return arg_spec
|
||||
NXAPI_COMMON_ARGS = dict(
|
||||
host=dict(),
|
||||
port=dict(),
|
||||
username=dict(),
|
||||
password=dict(),
|
||||
use_ssl=dict(default=False, type='bool'),
|
||||
device=dict(),
|
||||
command_type=dict(default='cli_show_ascii', choices=NXAPI_COMMAND_TYPES)
|
||||
)
|
||||
|
||||
def nxapi_url(module):
|
||||
def nxapi_module(**kwargs):
|
||||
"""Append the common args to the argument_spec
|
||||
"""
|
||||
spec = kwargs.get('argument_spec') or dict()
|
||||
|
||||
argument_spec = url_argument_spec()
|
||||
argument_spec.update(NXAPI_COMMON_ARGS)
|
||||
if kwargs.get('argument_spec'):
|
||||
argument_spec.update(kwargs['argument_spec'])
|
||||
kwargs['argument_spec'] = argument_spec
|
||||
|
||||
module = AnsibleModule(**kwargs)
|
||||
|
||||
device = module.params.get('device') or dict()
|
||||
for key, value in device.iteritems():
|
||||
if key in NXAPI_COMMON_ARGS:
|
||||
module.params[key] = value
|
||||
|
||||
params = json_dict_unicode_to_bytes(json.loads(MODULE_COMPLEX_ARGS))
|
||||
for key, value in params.iteritems():
|
||||
if key != 'device':
|
||||
module.params[key] = value
|
||||
|
||||
return module
|
||||
|
||||
def nxapi_url(params):
|
||||
"""Constructs a valid NXAPI url
|
||||
"""
|
||||
if module.params['use_ssl']:
|
||||
if params['use_ssl']:
|
||||
proto = 'https'
|
||||
else:
|
||||
proto = 'http'
|
||||
host = module.params['host']
|
||||
host = params['host']
|
||||
url = '{}://{}'.format(proto, host)
|
||||
port = module.params['port']
|
||||
if module.params['port']:
|
||||
url = '{}:{}'.format(url, module.params['port'])
|
||||
if params['port']:
|
||||
url = '{}:{}'.format(url, params['port'])
|
||||
url = '{}/ins'.format(url)
|
||||
return url
|
||||
|
||||
|
@ -109,7 +131,7 @@ def nxapi_body(commands, command_type, **kwargs):
|
|||
def nxapi_command(module, commands, command_type=None, **kwargs):
|
||||
"""Sends the list of commands to the device over NXAPI
|
||||
"""
|
||||
url = nxapi_url(module)
|
||||
url = nxapi_url(module.params)
|
||||
|
||||
command_type = command_type or module.params['command_type']
|
||||
|
||||
|
@ -118,6 +140,9 @@ def nxapi_command(module, commands, command_type=None, **kwargs):
|
|||
|
||||
headers = {'Content-Type': 'text/json'}
|
||||
|
||||
module.params['url_username'] = module.params['username']
|
||||
module.params['url_password'] = module.params['password']
|
||||
|
||||
response, headers = fetch_url(module, url, data=data, headers=headers,
|
||||
method='POST')
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ __metaclass__ = type
|
|||
import yaml
|
||||
from ansible.compat.six import PY3
|
||||
|
||||
from ansible.parsing.yaml.objects import AnsibleUnicode
|
||||
from ansible.parsing.yaml.objects import AnsibleUnicode, AnsibleSequence, AnsibleMapping
|
||||
from ansible.vars.hostvars import HostVars
|
||||
|
||||
class AnsibleDumper(yaml.SafeDumper):
|
||||
|
@ -50,3 +50,13 @@ AnsibleDumper.add_representer(
|
|||
represent_hostvars,
|
||||
)
|
||||
|
||||
AnsibleDumper.add_representer(
|
||||
AnsibleSequence,
|
||||
yaml.representer.SafeRepresenter.represent_list,
|
||||
)
|
||||
|
||||
AnsibleDumper.add_representer(
|
||||
AnsibleMapping,
|
||||
yaml.representer.SafeRepresenter.represent_dict,
|
||||
)
|
||||
|
||||
|
|
|
@ -90,16 +90,18 @@ class Become:
|
|||
|
||||
display.deprecated("Instead of su/su_user, use become/become_user and set become_method to 'su' (default is sudo)")
|
||||
|
||||
# if we are becoming someone else, but some fields are unset,
|
||||
# make sure they're initialized to the default config values
|
||||
if ds.get('become', False):
|
||||
if ds.get('become_method', None) is None:
|
||||
ds['become_method'] = C.DEFAULT_BECOME_METHOD
|
||||
if ds.get('become_user', None) is None:
|
||||
ds['become_user'] = C.DEFAULT_BECOME_USER
|
||||
|
||||
return ds
|
||||
|
||||
def set_become_defaults(self, become, become_method, become_user):
|
||||
''' if we are becoming someone else, but some fields are unset,
|
||||
make sure they're initialized to the default config values '''
|
||||
if become:
|
||||
if become_method is None:
|
||||
become_method = C.DEFAULT_BECOME_METHOD
|
||||
if become_user is None:
|
||||
become_user = C.DEFAULT_BECOME_USER
|
||||
|
||||
def _get_attr_become(self):
|
||||
'''
|
||||
Override for the 'become' getattr fetcher, used from Base.
|
||||
|
|
|
@ -392,6 +392,9 @@ class PlayContext(Base):
|
|||
if new_info.no_log is None:
|
||||
new_info.no_log = C.DEFAULT_NO_LOG
|
||||
|
||||
# set become defaults if not previouslly set
|
||||
task.set_become_defaults(new_info.become, new_info.become_method, new_info.become_user)
|
||||
|
||||
return new_info
|
||||
|
||||
def make_become_cmd(self, cmd, executable=None):
|
||||
|
|
|
@ -133,7 +133,10 @@ class Task(Base, Conditional, Taggable, Become):
|
|||
|
||||
def __repr__(self):
|
||||
''' returns a human readable representation of the task '''
|
||||
return "TASK: %s" % self.get_name()
|
||||
if self.get_name() == 'meta ':
|
||||
return "TASK: meta (%s)" % self.args['_raw_params']
|
||||
else:
|
||||
return "TASK: %s" % self.get_name()
|
||||
|
||||
def _preprocess_loop(self, ds, new_ds, k, v):
|
||||
''' take a lookup plugin name and store it correctly '''
|
||||
|
|
|
@ -382,7 +382,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
|||
module_args['_ansible_check_mode'] = True
|
||||
|
||||
# set no log in the module arguments, if required
|
||||
if self._play_context.no_log or not C.DEFAULT_NO_TARGET_SYSLOG:
|
||||
if self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG:
|
||||
module_args['_ansible_no_log'] = True
|
||||
|
||||
# set debug in the module arguments, if required
|
||||
|
|
|
@ -48,7 +48,7 @@ class ActionModule(ActionBase):
|
|||
env_string = self._compute_environment_string()
|
||||
|
||||
module_args = self._task.args.copy()
|
||||
if self._play_context.no_log or not C.DEFAULT_NO_TARGET_SYSLOG:
|
||||
if self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG:
|
||||
module_args['_ansible_no_log'] = True
|
||||
|
||||
# configure, upload, and chmod the target module
|
||||
|
|
|
@ -45,8 +45,12 @@ class ActionModule(ActionBase):
|
|||
# If var is a list or dict, use the type as key to display
|
||||
result[to_unicode(type(self._task.args['var']))] = results
|
||||
else:
|
||||
# If var name is same as result, try to template it
|
||||
if results == self._task.args['var']:
|
||||
results = "VARIABLE IS NOT DEFINED!"
|
||||
try:
|
||||
results = self._templar.template("{{" + results + "}}", convert_bare=True, fail_on_undefined=True)
|
||||
except:
|
||||
results = "VARIABLE IS NOT DEFINED!"
|
||||
result[self._task.args['var']] = results
|
||||
else:
|
||||
result['msg'] = 'here we are'
|
||||
|
|
Loading…
Reference in a new issue