Merge remote-tracking branch 'ansible/devel' into issue_13438
This commit is contained in:
commit
cdc3c88018
21 changed files with 119 additions and 84 deletions
|
@ -4,6 +4,7 @@ Ansible Changes By Release
|
||||||
## 2.1 TBD - ACTIVE DEVELOPMENT
|
## 2.1 TBD - ACTIVE DEVELOPMENT
|
||||||
|
|
||||||
####New Modules:
|
####New Modules:
|
||||||
|
* aws: ec2_vpc_net_facts
|
||||||
* cloudstack: cs_volume
|
* cloudstack: cs_volume
|
||||||
|
|
||||||
####New Filters:
|
####New Filters:
|
||||||
|
@ -37,9 +38,9 @@ Ansible Changes By Release
|
||||||
* New ssh configuration variables(`ansible_ssh_common_args`, `ansible_ssh_extra_args`) can be used to configure a
|
* New ssh configuration variables(`ansible_ssh_common_args`, `ansible_ssh_extra_args`) can be used to configure a
|
||||||
per-group or per-host ssh ProxyCommand or set any other ssh options.
|
per-group or per-host ssh ProxyCommand or set any other ssh options.
|
||||||
`ansible_ssh_extra_args` is used to set options that are accepted only by ssh (not sftp or scp, which have their own analogous settings).
|
`ansible_ssh_extra_args` is used to set options that are accepted only by ssh (not sftp or scp, which have their own analogous settings).
|
||||||
|
* ansible-pull can now verify the code it runs when using git as a source repository, using git's code signing and verification features.
|
||||||
* Backslashes used when specifying parameters in jinja2 expressions in YAML dicts sometimes needed to be escaped twice.
|
* Backslashes used when specifying parameters in jinja2 expressions in YAML dicts sometimes needed to be escaped twice.
|
||||||
This has been fixed so that escaping once works. Here's an example of how playbooks need to be modified:
|
This has been fixed so that escaping once works. Here's an example of how playbooks need to be modified:
|
||||||
* ansible-pull can now verify the code it runs when using git as a source repository, using git's code signing and verification features.
|
|
||||||
|
|
||||||
```
|
```
|
||||||
# Syntax in 1.9.x
|
# Syntax in 1.9.x
|
||||||
|
|
|
@ -111,9 +111,8 @@ If you use boto profiles to manage multiple AWS accounts, you can pass ``--profi
|
||||||
aws_access_key_id = <prod access key>
|
aws_access_key_id = <prod access key>
|
||||||
aws_secret_access_key = <prod secret key>
|
aws_secret_access_key = <prod secret key>
|
||||||
|
|
||||||
You can then run ``ec2.py --profile prod`` to get the inventory for the prod account, or run playbooks with: ``ansible-playbook -i 'ec2.py --profile prod' myplaybook.yml``.
|
You can then run ``ec2.py --profile prod`` to get the inventory for the prod account, this option is not supported by ``anisble-playbook`` though.
|
||||||
|
But you can use the ``AWS_PROFILE`` variable - e.g. ``AWS_PROFILE=prod ansible-playbook -i ec2.py myplaybook.yml``
|
||||||
Alternatively, use the ``AWS_PROFILE`` variable - e.g. ``AWS_PROFILE=prod ansible-playbook -i ec2.py myplaybook.yml``
|
|
||||||
|
|
||||||
Since each region requires its own API call, if you are only using a small set of regions, feel free to edit ``ec2.ini`` and list only the regions you are interested in. There are other config options in ``ec2.ini`` including cache control, and destination variables.
|
Since each region requires its own API call, if you are only using a small set of regions, feel free to edit ``ec2.ini`` and list only the regions you are interested in. There are other config options in ``ec2.ini`` including cache control, and destination variables.
|
||||||
|
|
||||||
|
|
|
@ -793,8 +793,8 @@ Basically, anything that goes into "role defaults" (the defaults folder inside t
|
||||||
|
|
||||||
.. rubric:: Footnotes
|
.. rubric:: Footnotes
|
||||||
|
|
||||||
.. [1] Tasks in each role will see their own role's defaults tasks outside of roles will the last role's defaults
|
.. [1] Tasks in each role will see their own role's defaults. Tasks defined outside of a role will see the last role's defaults.
|
||||||
.. [2] Variables defined in inventory file or provided by dynamic inventory
|
.. [2] Variables defined in inventory file or provided by dynamic inventory.
|
||||||
|
|
||||||
.. note:: Within a any section, redefining a var will overwrite the previous instance.
|
.. note:: Within a any section, redefining a var will overwrite the previous instance.
|
||||||
If multiple groups have the same variable, the last one loaded wins.
|
If multiple groups have the same variable, the last one loaded wins.
|
||||||
|
|
|
@ -182,7 +182,7 @@
|
||||||
#no_log = False
|
#no_log = False
|
||||||
|
|
||||||
# prevents logging of tasks, but only on the targets, data is still logged on the master/controller
|
# prevents logging of tasks, but only on the targets, data is still logged on the master/controller
|
||||||
#no_target_syslog = True
|
#no_target_syslog = False
|
||||||
|
|
||||||
# controls the compression level of variables sent to
|
# controls the compression level of variables sent to
|
||||||
# worker processes. At the default of 0, no compression
|
# worker processes. At the default of 0, no compression
|
||||||
|
|
|
@ -70,7 +70,7 @@ class AdHocCLI(CLI):
|
||||||
help="module name to execute (default=%s)" % C.DEFAULT_MODULE_NAME,
|
help="module name to execute (default=%s)" % C.DEFAULT_MODULE_NAME,
|
||||||
default=C.DEFAULT_MODULE_NAME)
|
default=C.DEFAULT_MODULE_NAME)
|
||||||
|
|
||||||
self.options, self.args = self.parser.parse_args()
|
self.options, self.args = self.parser.parse_args(self.args[1:])
|
||||||
|
|
||||||
if len(self.args) != 1:
|
if len(self.args) != 1:
|
||||||
raise AnsibleOptionsError("Missing target hosts")
|
raise AnsibleOptionsError("Missing target hosts")
|
||||||
|
@ -163,6 +163,9 @@ class AdHocCLI(CLI):
|
||||||
else:
|
else:
|
||||||
cb = 'minimal'
|
cb = 'minimal'
|
||||||
|
|
||||||
|
if not C.DEFAULT_LOAD_CALLBACK_PLUGINS:
|
||||||
|
C.DEFAULT_CALLBACK_WHITELIST = []
|
||||||
|
|
||||||
if self.options.tree:
|
if self.options.tree:
|
||||||
C.DEFAULT_CALLBACK_WHITELIST.append('tree')
|
C.DEFAULT_CALLBACK_WHITELIST.append('tree')
|
||||||
C.TREE_DIR = self.options.tree
|
C.TREE_DIR = self.options.tree
|
||||||
|
|
|
@ -62,7 +62,7 @@ class DocCLI(CLI):
|
||||||
self.parser.add_option("-s", "--snippet", action="store_true", default=False, dest='show_snippet',
|
self.parser.add_option("-s", "--snippet", action="store_true", default=False, dest='show_snippet',
|
||||||
help='Show playbook snippet for specified module(s)')
|
help='Show playbook snippet for specified module(s)')
|
||||||
|
|
||||||
self.options, self.args = self.parser.parse_args()
|
self.options, self.args = self.parser.parse_args(self.args[1:])
|
||||||
display.verbosity = self.options.verbosity
|
display.verbosity = self.options.verbosity
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
|
|
|
@ -113,7 +113,7 @@ class GalaxyCLI(CLI):
|
||||||
help='Force overwriting an existing role')
|
help='Force overwriting an existing role')
|
||||||
|
|
||||||
# get options, args and galaxy object
|
# get options, args and galaxy object
|
||||||
self.options, self.args =self.parser.parse_args()
|
self.options, self.args =self.parser.parse_args(self.args[1:])
|
||||||
display.verbosity = self.options.verbosity
|
display.verbosity = self.options.verbosity
|
||||||
self.galaxy = Galaxy(self.options)
|
self.galaxy = Galaxy(self.options)
|
||||||
|
|
||||||
|
|
|
@ -72,7 +72,7 @@ class PlaybookCLI(CLI):
|
||||||
parser.add_option('--start-at-task', dest='start_at_task',
|
parser.add_option('--start-at-task', dest='start_at_task',
|
||||||
help="start the playbook at the task matching this name")
|
help="start the playbook at the task matching this name")
|
||||||
|
|
||||||
self.options, self.args = parser.parse_args()
|
self.options, self.args = parser.parse_args(self.args[1:])
|
||||||
|
|
||||||
|
|
||||||
self.parser = parser
|
self.parser = parser
|
||||||
|
|
|
@ -90,7 +90,7 @@ class PullCLI(CLI):
|
||||||
help='verify GPG signature of checked out commit, if it fails abort running the playbook.'
|
help='verify GPG signature of checked out commit, if it fails abort running the playbook.'
|
||||||
' This needs the corresponding VCS module to support such an operation')
|
' This needs the corresponding VCS module to support such an operation')
|
||||||
|
|
||||||
self.options, self.args = self.parser.parse_args()
|
self.options, self.args = self.parser.parse_args(self.args[1:])
|
||||||
|
|
||||||
if not self.options.dest:
|
if not self.options.dest:
|
||||||
hostname = socket.getfqdn()
|
hostname = socket.getfqdn()
|
||||||
|
|
|
@ -69,7 +69,7 @@ class VaultCLI(CLI):
|
||||||
elif self.action == "rekey":
|
elif self.action == "rekey":
|
||||||
self.parser.set_usage("usage: %prog rekey [options] file_name")
|
self.parser.set_usage("usage: %prog rekey [options] file_name")
|
||||||
|
|
||||||
self.options, self.args = self.parser.parse_args()
|
self.options, self.args = self.parser.parse_args(self.args[1:])
|
||||||
display.verbosity = self.options.verbosity
|
display.verbosity = self.options.verbosity
|
||||||
|
|
||||||
can_output = ['encrypt', 'decrypt']
|
can_output = ['encrypt', 'decrypt']
|
||||||
|
|
|
@ -159,7 +159,7 @@ DEFAULT_VAR_COMPRESSION_LEVEL = get_config(p, DEFAULTS, 'var_compression_level',
|
||||||
|
|
||||||
# disclosure
|
# disclosure
|
||||||
DEFAULT_NO_LOG = get_config(p, DEFAULTS, 'no_log', 'ANSIBLE_NO_LOG', False, boolean=True)
|
DEFAULT_NO_LOG = get_config(p, DEFAULTS, 'no_log', 'ANSIBLE_NO_LOG', False, boolean=True)
|
||||||
DEFAULT_NO_TARGET_SYSLOG = get_config(p, DEFAULTS, 'no_target_syslog', 'ANSIBLE_NO_TARGET_SYSLOG', True, boolean=True)
|
DEFAULT_NO_TARGET_SYSLOG = get_config(p, DEFAULTS, 'no_target_syslog', 'ANSIBLE_NO_TARGET_SYSLOG', False, boolean=True)
|
||||||
|
|
||||||
# selinux
|
# selinux
|
||||||
DEFAULT_SELINUX_SPECIAL_FS = get_config(p, 'selinux', 'special_context_filesystems', None, 'fuse, nfs, vboxsf, ramfs', islist=True)
|
DEFAULT_SELINUX_SPECIAL_FS = get_config(p, 'selinux', 'special_context_filesystems', None, 'fuse, nfs, vboxsf, ramfs', islist=True)
|
||||||
|
|
|
@ -178,8 +178,7 @@ class Inventory(object):
|
||||||
if self._restriction:
|
if self._restriction:
|
||||||
pattern_hash += u":%s" % to_unicode(self._restriction)
|
pattern_hash += u":%s" % to_unicode(self._restriction)
|
||||||
|
|
||||||
if pattern_hash in HOSTS_PATTERNS_CACHE:
|
if pattern_hash not in HOSTS_PATTERNS_CACHE:
|
||||||
return HOSTS_PATTERNS_CACHE[pattern_hash][:]
|
|
||||||
|
|
||||||
patterns = Inventory.split_host_pattern(pattern)
|
patterns = Inventory.split_host_pattern(pattern)
|
||||||
hosts = self._evaluate_patterns(patterns)
|
hosts = self._evaluate_patterns(patterns)
|
||||||
|
@ -195,8 +194,9 @@ class Inventory(object):
|
||||||
if self._restriction is not None:
|
if self._restriction is not None:
|
||||||
hosts = [ h for h in hosts if h in self._restriction ]
|
hosts = [ h for h in hosts if h in self._restriction ]
|
||||||
|
|
||||||
HOSTS_PATTERNS_CACHE[pattern_hash] = hosts[:]
|
HOSTS_PATTERNS_CACHE[pattern_hash] = list(set(hosts))
|
||||||
return list(set(hosts))
|
|
||||||
|
return HOSTS_PATTERNS_CACHE[pattern_hash][:]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def split_host_pattern(cls, pattern):
|
def split_host_pattern(cls, pattern):
|
||||||
|
|
|
@ -80,7 +80,7 @@ def ios_module(**kwargs):
|
||||||
"""
|
"""
|
||||||
spec = kwargs.get('argument_spec') or dict()
|
spec = kwargs.get('argument_spec') or dict()
|
||||||
|
|
||||||
argument_spec = url_argument_spec()
|
argument_spec = shell_argument_spec()
|
||||||
argument_spec.update(IOS_COMMON_ARGS)
|
argument_spec.update(IOS_COMMON_ARGS)
|
||||||
if kwargs.get('argument_spec'):
|
if kwargs.get('argument_spec'):
|
||||||
argument_spec.update(kwargs['argument_spec'])
|
argument_spec.update(kwargs['argument_spec'])
|
||||||
|
@ -150,21 +150,6 @@ class IosShell(object):
|
||||||
responses.append(response)
|
responses.append(response)
|
||||||
return responses
|
return responses
|
||||||
|
|
||||||
def ios_from_args(module):
|
|
||||||
"""Extracts the set of argumetns to build a valid IOS connection
|
|
||||||
"""
|
|
||||||
params = dict()
|
|
||||||
for arg, attrs in IOS_COMMON_ARGS.iteritems():
|
|
||||||
if module.params['device']:
|
|
||||||
params[arg] = module.params['device'].get(arg)
|
|
||||||
if arg not in params or module.params[arg]:
|
|
||||||
params[arg] = module.params[arg]
|
|
||||||
if params[arg] is None:
|
|
||||||
if attrs.get('required'):
|
|
||||||
module.fail_json(msg='argument %s is required' % arg)
|
|
||||||
params[arg] = attrs.get('default')
|
|
||||||
return params
|
|
||||||
|
|
||||||
def ios_connection(module):
|
def ios_connection(module):
|
||||||
"""Creates a connection to an IOS device based on the module arguments
|
"""Creates a connection to an IOS device based on the module arguments
|
||||||
"""
|
"""
|
||||||
|
@ -180,16 +165,16 @@ def ios_connection(module):
|
||||||
shell = IosShell()
|
shell = IosShell()
|
||||||
shell.connect(host, port=port, username=username, password=password,
|
shell.connect(host, port=port, username=username, password=password,
|
||||||
timeout=timeout)
|
timeout=timeout)
|
||||||
|
shell.send('terminal length 0')
|
||||||
except paramiko.ssh_exception.AuthenticationException, exc:
|
except paramiko.ssh_exception.AuthenticationException, exc:
|
||||||
module.fail_json(msg=exc.message)
|
module.fail_json(msg=exc.message)
|
||||||
except socket.error, exc:
|
except socket.error, exc:
|
||||||
module.fail_json(msg=exc.strerror, errno=exc.errno)
|
module.fail_json(msg=exc.strerror, errno=exc.errno)
|
||||||
|
|
||||||
shell.send('terminal length 0')
|
|
||||||
|
|
||||||
if module.params['enable_mode']:
|
if module.params['enable_mode']:
|
||||||
shell.authorize(module.params['enable_password'])
|
shell.authorize(module.params['enable_password'])
|
||||||
|
|
||||||
return shell
|
return shell
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -32,16 +32,16 @@ from ansible.module_utils.nxapi import *
|
||||||
|
|
||||||
The nxapi module provides the following common argument spec:
|
The nxapi module provides the following common argument spec:
|
||||||
|
|
||||||
* host (str) - [Required] The IPv4 address or FQDN of the network device
|
* host (str) - The IPv4 address or FQDN of the network device
|
||||||
|
|
||||||
* port (str) - Overrides the default port to use for the HTTP/S
|
* port (str) - Overrides the default port to use for the HTTP/S
|
||||||
connection. The default values are 80 for HTTP and
|
connection. The default values are 80 for HTTP and
|
||||||
443 for HTTPS
|
443 for HTTPS
|
||||||
|
|
||||||
* url_username (str) - [Required] The username to use to authenticate
|
* username (str) - The username to use to authenticate
|
||||||
the HTTP/S connection. Aliases: username
|
the HTTP/S connection. Aliases: username
|
||||||
|
|
||||||
* url_password (str) - [Required] The password to use to authenticate
|
* password (str) - The password to use to authenticate
|
||||||
the HTTP/S connection. Aliases: password
|
the HTTP/S connection. Aliases: password
|
||||||
|
|
||||||
* use_ssl (bool) - Specifies whether or not to use an encrypted (HTTPS)
|
* use_ssl (bool) - Specifies whether or not to use an encrypted (HTTPS)
|
||||||
|
@ -51,6 +51,10 @@ The nxapi module provides the following common argument spec:
|
||||||
device. Valid values in `cli_show`, `cli_show_ascii`, 'cli_conf`
|
device. Valid values in `cli_show`, `cli_show_ascii`, 'cli_conf`
|
||||||
and `bash`. The default value is `cli_show_ascii`
|
and `bash`. The default value is `cli_show_ascii`
|
||||||
|
|
||||||
|
* device (dict) - Used to send the entire set of connection parameters
|
||||||
|
as a dict object. This argument is mutually exclusive with the
|
||||||
|
host argument
|
||||||
|
|
||||||
In order to communicate with Cisco NXOS devices, the NXAPI feature
|
In order to communicate with Cisco NXOS devices, the NXAPI feature
|
||||||
must be enabled and configured on the device.
|
must be enabled and configured on the device.
|
||||||
|
|
||||||
|
@ -58,34 +62,52 @@ must be enabled and configured on the device.
|
||||||
|
|
||||||
NXAPI_COMMAND_TYPES = ['cli_show', 'cli_show_ascii', 'cli_conf', 'bash']
|
NXAPI_COMMAND_TYPES = ['cli_show', 'cli_show_ascii', 'cli_conf', 'bash']
|
||||||
|
|
||||||
def nxapi_argument_spec(spec=None):
|
NXAPI_COMMON_ARGS = dict(
|
||||||
"""Creates an argument spec for working with NXAPI
|
host=dict(),
|
||||||
"""
|
|
||||||
arg_spec = url_argument_spec()
|
|
||||||
arg_spec.update(dict(
|
|
||||||
host=dict(required=True),
|
|
||||||
port=dict(),
|
port=dict(),
|
||||||
url_username=dict(required=True, aliases=['username']),
|
username=dict(),
|
||||||
url_password=dict(required=True, aliases=['password']),
|
password=dict(),
|
||||||
use_ssl=dict(default=False, type='bool'),
|
use_ssl=dict(default=False, type='bool'),
|
||||||
|
device=dict(),
|
||||||
command_type=dict(default='cli_show_ascii', choices=NXAPI_COMMAND_TYPES)
|
command_type=dict(default='cli_show_ascii', choices=NXAPI_COMMAND_TYPES)
|
||||||
))
|
)
|
||||||
if spec:
|
|
||||||
arg_spec.update(spec)
|
|
||||||
return arg_spec
|
|
||||||
|
|
||||||
def nxapi_url(module):
|
def nxapi_module(**kwargs):
|
||||||
|
"""Append the common args to the argument_spec
|
||||||
|
"""
|
||||||
|
spec = kwargs.get('argument_spec') or dict()
|
||||||
|
|
||||||
|
argument_spec = url_argument_spec()
|
||||||
|
argument_spec.update(NXAPI_COMMON_ARGS)
|
||||||
|
if kwargs.get('argument_spec'):
|
||||||
|
argument_spec.update(kwargs['argument_spec'])
|
||||||
|
kwargs['argument_spec'] = argument_spec
|
||||||
|
|
||||||
|
module = AnsibleModule(**kwargs)
|
||||||
|
|
||||||
|
device = module.params.get('device') or dict()
|
||||||
|
for key, value in device.iteritems():
|
||||||
|
if key in NXAPI_COMMON_ARGS:
|
||||||
|
module.params[key] = value
|
||||||
|
|
||||||
|
params = json_dict_unicode_to_bytes(json.loads(MODULE_COMPLEX_ARGS))
|
||||||
|
for key, value in params.iteritems():
|
||||||
|
if key != 'device':
|
||||||
|
module.params[key] = value
|
||||||
|
|
||||||
|
return module
|
||||||
|
|
||||||
|
def nxapi_url(params):
|
||||||
"""Constructs a valid NXAPI url
|
"""Constructs a valid NXAPI url
|
||||||
"""
|
"""
|
||||||
if module.params['use_ssl']:
|
if params['use_ssl']:
|
||||||
proto = 'https'
|
proto = 'https'
|
||||||
else:
|
else:
|
||||||
proto = 'http'
|
proto = 'http'
|
||||||
host = module.params['host']
|
host = params['host']
|
||||||
url = '{}://{}'.format(proto, host)
|
url = '{}://{}'.format(proto, host)
|
||||||
port = module.params['port']
|
if params['port']:
|
||||||
if module.params['port']:
|
url = '{}:{}'.format(url, params['port'])
|
||||||
url = '{}:{}'.format(url, module.params['port'])
|
|
||||||
url = '{}/ins'.format(url)
|
url = '{}/ins'.format(url)
|
||||||
return url
|
return url
|
||||||
|
|
||||||
|
@ -109,7 +131,7 @@ def nxapi_body(commands, command_type, **kwargs):
|
||||||
def nxapi_command(module, commands, command_type=None, **kwargs):
|
def nxapi_command(module, commands, command_type=None, **kwargs):
|
||||||
"""Sends the list of commands to the device over NXAPI
|
"""Sends the list of commands to the device over NXAPI
|
||||||
"""
|
"""
|
||||||
url = nxapi_url(module)
|
url = nxapi_url(module.params)
|
||||||
|
|
||||||
command_type = command_type or module.params['command_type']
|
command_type = command_type or module.params['command_type']
|
||||||
|
|
||||||
|
@ -118,6 +140,9 @@ def nxapi_command(module, commands, command_type=None, **kwargs):
|
||||||
|
|
||||||
headers = {'Content-Type': 'text/json'}
|
headers = {'Content-Type': 'text/json'}
|
||||||
|
|
||||||
|
module.params['url_username'] = module.params['username']
|
||||||
|
module.params['url_password'] = module.params['password']
|
||||||
|
|
||||||
response, headers = fetch_url(module, url, data=data, headers=headers,
|
response, headers = fetch_url(module, url, data=data, headers=headers,
|
||||||
method='POST')
|
method='POST')
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ __metaclass__ = type
|
||||||
import yaml
|
import yaml
|
||||||
from ansible.compat.six import PY3
|
from ansible.compat.six import PY3
|
||||||
|
|
||||||
from ansible.parsing.yaml.objects import AnsibleUnicode
|
from ansible.parsing.yaml.objects import AnsibleUnicode, AnsibleSequence, AnsibleMapping
|
||||||
from ansible.vars.hostvars import HostVars
|
from ansible.vars.hostvars import HostVars
|
||||||
|
|
||||||
class AnsibleDumper(yaml.SafeDumper):
|
class AnsibleDumper(yaml.SafeDumper):
|
||||||
|
@ -50,3 +50,13 @@ AnsibleDumper.add_representer(
|
||||||
represent_hostvars,
|
represent_hostvars,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
AnsibleDumper.add_representer(
|
||||||
|
AnsibleSequence,
|
||||||
|
yaml.representer.SafeRepresenter.represent_list,
|
||||||
|
)
|
||||||
|
|
||||||
|
AnsibleDumper.add_representer(
|
||||||
|
AnsibleMapping,
|
||||||
|
yaml.representer.SafeRepresenter.represent_dict,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
|
@ -90,16 +90,18 @@ class Become:
|
||||||
|
|
||||||
display.deprecated("Instead of su/su_user, use become/become_user and set become_method to 'su' (default is sudo)")
|
display.deprecated("Instead of su/su_user, use become/become_user and set become_method to 'su' (default is sudo)")
|
||||||
|
|
||||||
# if we are becoming someone else, but some fields are unset,
|
|
||||||
# make sure they're initialized to the default config values
|
|
||||||
if ds.get('become', False):
|
|
||||||
if ds.get('become_method', None) is None:
|
|
||||||
ds['become_method'] = C.DEFAULT_BECOME_METHOD
|
|
||||||
if ds.get('become_user', None) is None:
|
|
||||||
ds['become_user'] = C.DEFAULT_BECOME_USER
|
|
||||||
|
|
||||||
return ds
|
return ds
|
||||||
|
|
||||||
|
def set_become_defaults(self, become, become_method, become_user):
|
||||||
|
''' if we are becoming someone else, but some fields are unset,
|
||||||
|
make sure they're initialized to the default config values '''
|
||||||
|
if become:
|
||||||
|
if become_method is None:
|
||||||
|
become_method = C.DEFAULT_BECOME_METHOD
|
||||||
|
if become_user is None:
|
||||||
|
become_user = C.DEFAULT_BECOME_USER
|
||||||
|
|
||||||
def _get_attr_become(self):
|
def _get_attr_become(self):
|
||||||
'''
|
'''
|
||||||
Override for the 'become' getattr fetcher, used from Base.
|
Override for the 'become' getattr fetcher, used from Base.
|
||||||
|
|
|
@ -392,6 +392,9 @@ class PlayContext(Base):
|
||||||
if new_info.no_log is None:
|
if new_info.no_log is None:
|
||||||
new_info.no_log = C.DEFAULT_NO_LOG
|
new_info.no_log = C.DEFAULT_NO_LOG
|
||||||
|
|
||||||
|
# set become defaults if not previouslly set
|
||||||
|
task.set_become_defaults(new_info.become, new_info.become_method, new_info.become_user)
|
||||||
|
|
||||||
return new_info
|
return new_info
|
||||||
|
|
||||||
def make_become_cmd(self, cmd, executable=None):
|
def make_become_cmd(self, cmd, executable=None):
|
||||||
|
|
|
@ -133,6 +133,9 @@ class Task(Base, Conditional, Taggable, Become):
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
''' returns a human readable representation of the task '''
|
''' returns a human readable representation of the task '''
|
||||||
|
if self.get_name() == 'meta ':
|
||||||
|
return "TASK: meta (%s)" % self.args['_raw_params']
|
||||||
|
else:
|
||||||
return "TASK: %s" % self.get_name()
|
return "TASK: %s" % self.get_name()
|
||||||
|
|
||||||
def _preprocess_loop(self, ds, new_ds, k, v):
|
def _preprocess_loop(self, ds, new_ds, k, v):
|
||||||
|
|
|
@ -382,7 +382,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
|
||||||
module_args['_ansible_check_mode'] = True
|
module_args['_ansible_check_mode'] = True
|
||||||
|
|
||||||
# set no log in the module arguments, if required
|
# set no log in the module arguments, if required
|
||||||
if self._play_context.no_log or not C.DEFAULT_NO_TARGET_SYSLOG:
|
if self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG:
|
||||||
module_args['_ansible_no_log'] = True
|
module_args['_ansible_no_log'] = True
|
||||||
|
|
||||||
# set debug in the module arguments, if required
|
# set debug in the module arguments, if required
|
||||||
|
|
|
@ -48,7 +48,7 @@ class ActionModule(ActionBase):
|
||||||
env_string = self._compute_environment_string()
|
env_string = self._compute_environment_string()
|
||||||
|
|
||||||
module_args = self._task.args.copy()
|
module_args = self._task.args.copy()
|
||||||
if self._play_context.no_log or not C.DEFAULT_NO_TARGET_SYSLOG:
|
if self._play_context.no_log or C.DEFAULT_NO_TARGET_SYSLOG:
|
||||||
module_args['_ansible_no_log'] = True
|
module_args['_ansible_no_log'] = True
|
||||||
|
|
||||||
# configure, upload, and chmod the target module
|
# configure, upload, and chmod the target module
|
||||||
|
|
|
@ -45,7 +45,11 @@ class ActionModule(ActionBase):
|
||||||
# If var is a list or dict, use the type as key to display
|
# If var is a list or dict, use the type as key to display
|
||||||
result[to_unicode(type(self._task.args['var']))] = results
|
result[to_unicode(type(self._task.args['var']))] = results
|
||||||
else:
|
else:
|
||||||
|
# If var name is same as result, try to template it
|
||||||
if results == self._task.args['var']:
|
if results == self._task.args['var']:
|
||||||
|
try:
|
||||||
|
results = self._templar.template("{{" + results + "}}", convert_bare=True, fail_on_undefined=True)
|
||||||
|
except:
|
||||||
results = "VARIABLE IS NOT DEFINED!"
|
results = "VARIABLE IS NOT DEFINED!"
|
||||||
result[self._task.args['var']] = results
|
result[self._task.args['var']] = results
|
||||||
else:
|
else:
|
||||||
|
|
Loading…
Reference in a new issue