* allow shells to have per host options, remote_tmp

added language to shell
removed module lang setting from general as  plugins have it now
use get to avoid bad powershell plugin
more resilient tmp discovery, fall back to `pwd`
add shell to docs
fixed options for when frags are only options
added shell set ops in t_e and fixed option frags
normalize tmp dir usag4e

- pass tmpdir/tmp/temp options as env var to commands, making it default for tempfile
- adjusted ansiballz tmpdir
- default local tempfile usage to the configured local tmp
- set env temp in action

add options to powershell
shift temporary to internal envvar/params
ensure tempdir is set if we pass var
ensure basic and url use expected tempdir
ensure localhost uses local tmp
give /var/tmp priority, less perms issues
more consistent tempfile mgmt for ansiballz
made async_dir configurable
better action handling, allow for finally rm tmp
fixed tmp issue and no more tempdir in ballz
hostvarize world readable and admin users
always set shell tempdir
added comment to discourage use of exception/flow control

* Mostly revert expand_user as it's not quite working.

This was an additional feature anyhow.

Kept the use of pwd as a fallback but moved it to a second ssh
connection.  This is not optimal but getting that to work in a single
ssh connection was part of the problem holding this up.

(cherry picked from commit 395b714120522f15e4c90a346f5e8e8d79213aca)

* fixed script and other action plugins

ensure tmpdir deletion
allow for connections that don't support new options (legacy, 3rd party)
fixed tests
This commit is contained in:
Brian Coca 2018-01-16 00:15:04 -05:00 committed by Toshio Kuratomi
parent eca3fcd214
commit bbd6b8bb42
44 changed files with 1010 additions and 972 deletions

View file

@ -49,6 +49,7 @@ from six import iteritems, string_types
from ansible.errors import AnsibleError from ansible.errors import AnsibleError
from ansible.module_utils._text import to_bytes from ansible.module_utils._text import to_bytes
from ansible.plugins.loader import fragment_loader
from ansible.utils import plugin_docs from ansible.utils import plugin_docs
from ansible.utils.display import Display from ansible.utils.display import Display
@ -235,7 +236,7 @@ def get_plugin_info(module_dir, limit_to=None, verbose=False):
primary_category = module_categories[0] primary_category = module_categories[0]
# use ansible core library to parse out doc metadata YAML and plaintext examples # use ansible core library to parse out doc metadata YAML and plaintext examples
doc, examples, returndocs, metadata = plugin_docs.get_docstring(module_path, verbose=verbose) doc, examples, returndocs, metadata = plugin_docs.get_docstring(module_path, fragment_loader, verbose=verbose)
# save all the information # save all the information
module_info[module] = {'path': module_path, module_info[module] = {'path': module_path,

View file

@ -22,7 +22,7 @@ and guidelines:
* In the event of failure, a key of 'failed' should be included, along with a string explanation in 'msg'. Modules that raise tracebacks (stacktraces) are generally considered 'poor' modules, though Ansible can deal with these returns and will automatically convert anything unparseable into a failed result. If you are using the AnsibleModule common Python code, the 'failed' element will be included for you automatically when you call 'fail_json'. * In the event of failure, a key of 'failed' should be included, along with a string explanation in 'msg'. Modules that raise tracebacks (stacktraces) are generally considered 'poor' modules, though Ansible can deal with these returns and will automatically convert anything unparseable into a failed result. If you are using the AnsibleModule common Python code, the 'failed' element will be included for you automatically when you call 'fail_json'.
* Return codes from modules are actually not significant, but continue on with 0=success and non-zero=failure for reasons of future proofing. * Return codes from modules are used if 'failed' is missing, 0=success and non-zero=failure.
* As results from many hosts will be aggregated at once, modules should return only relevant output. Returning the entire contents of a log file is generally bad form. * As results from many hosts will be aggregated at once, modules should return only relevant output. Returning the entire contents of a log file is generally bad form.
@ -194,5 +194,4 @@ Avoid creating a module that does the work of other modules; this leads to code
Avoid creating 'caches'. Ansible is designed without a central server or authority, so you cannot guarantee it will not run with different permissions, options or locations. If you need a central authority, have it on top of Ansible (for example, using bastion/cm/ci server or tower); do not try to build it into modules. Avoid creating 'caches'. Ansible is designed without a central server or authority, so you cannot guarantee it will not run with different permissions, options or locations. If you need a central authority, have it on top of Ansible (for example, using bastion/cm/ci server or tower); do not try to build it into modules.
Always use the hacking/test-module script when developing modules and it will warn Always use the hacking/test-module script when developing modules and it will warn you about these kind of things.
you about these kind of things.

View file

@ -44,7 +44,7 @@ from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.parsing.convert_bool import boolean
from ansible.parsing.splitter import parse_kv from ansible.parsing.splitter import parse_kv
from ansible.playbook.play import Play from ansible.playbook.play import Play
from ansible.plugins.loader import module_loader from ansible.plugins.loader import module_loader, fragment_loader
from ansible.utils import plugin_docs from ansible.utils import plugin_docs
from ansible.utils.color import stringc from ansible.utils.color import stringc
@ -356,7 +356,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
if module_name in self.modules: if module_name in self.modules:
in_path = module_loader.find_plugin(module_name) in_path = module_loader.find_plugin(module_name)
if in_path: if in_path:
oc, a, _, _ = plugin_docs.get_docstring(in_path) oc, a, _, _ = plugin_docs.get_docstring(in_path, fragment_loader)
if oc: if oc:
display.display(oc['short_description']) display.display(oc['short_description'])
display.display('Parameters:') display.display('Parameters:')
@ -388,7 +388,7 @@ class ConsoleCLI(CLI, cmd.Cmd):
def module_args(self, module_name): def module_args(self, module_name):
in_path = module_loader.find_plugin(module_name) in_path = module_loader.find_plugin(module_name)
oc, a, _, _ = plugin_docs.get_docstring(in_path) oc, a, _, _ = plugin_docs.get_docstring(in_path, fragment_loader)
return list(oc['options'].keys()) return list(oc['options'].keys())
def run(self): def run(self):

View file

@ -29,8 +29,9 @@ from ansible.module_utils._text import to_native
from ansible.module_utils.six import string_types from ansible.module_utils.six import string_types
from ansible.parsing.yaml.dumper import AnsibleDumper from ansible.parsing.yaml.dumper import AnsibleDumper
from ansible.plugins.loader import module_loader, action_loader, lookup_loader, callback_loader, cache_loader, \ from ansible.plugins.loader import module_loader, action_loader, lookup_loader, callback_loader, cache_loader, \
vars_loader, connection_loader, strategy_loader, inventory_loader vars_loader, connection_loader, strategy_loader, inventory_loader, shell_loader, fragment_loader
from ansible.utils import plugin_docs from ansible.utils.plugin_docs import BLACKLIST, get_docstring
try: try:
from __main__ import display from __main__ import display
except ImportError: except ImportError:
@ -71,7 +72,7 @@ class DocCLI(CLI):
help='**For internal testing only** Show documentation for all plugins.') help='**For internal testing only** Show documentation for all plugins.')
self.parser.add_option("-t", "--type", action="store", default='module', dest='type', type='choice', self.parser.add_option("-t", "--type", action="store", default='module', dest='type', type='choice',
help='Choose which plugin type (defaults to "module")', help='Choose which plugin type (defaults to "module")',
choices=['cache', 'callback', 'connection', 'inventory', 'lookup', 'module', 'strategy', 'vars']) choices=['cache', 'callback', 'connection', 'inventory', 'lookup', 'module', 'shell', 'strategy', 'vars'])
super(DocCLI, self).parse() super(DocCLI, self).parse()
@ -101,6 +102,8 @@ class DocCLI(CLI):
loader = vars_loader loader = vars_loader
elif plugin_type == 'inventory': elif plugin_type == 'inventory':
loader = inventory_loader loader = inventory_loader
elif plugin_type == 'shell':
loader = shell_loader
else: else:
loader = module_loader loader = module_loader
@ -146,7 +149,6 @@ class DocCLI(CLI):
# process command line list # process command line list
text = '' text = ''
for plugin in self.args: for plugin in self.args:
try: try:
# if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs # if the plugin lives in a non-python file (eg, win_X.ps1), require the corresponding python file for docs
filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True) filename = loader.find_plugin(plugin, mod_type='.py', ignore_deprecated=True, check_aliases=True)
@ -158,7 +160,7 @@ class DocCLI(CLI):
continue continue
try: try:
doc, plainexamples, returndocs, metadata = plugin_docs.get_docstring(filename, verbose=(self.options.verbosity > 0)) doc, plainexamples, returndocs, metadata = get_docstring(filename, fragment_loader, verbose=(self.options.verbosity > 0))
except: except:
display.vvv(traceback.format_exc()) display.vvv(traceback.format_exc())
display.error("%s %s has a documentation error formatting or is missing documentation." % (plugin_type, plugin)) display.error("%s %s has a documentation error formatting or is missing documentation." % (plugin_type, plugin))
@ -229,7 +231,7 @@ class DocCLI(CLI):
plugin = os.path.splitext(plugin)[0] # removes the extension plugin = os.path.splitext(plugin)[0] # removes the extension
plugin = plugin.lstrip('_') # remove underscore from deprecated plugins plugin = plugin.lstrip('_') # remove underscore from deprecated plugins
if plugin not in plugin_docs.BLACKLIST.get(bkey, ()): if plugin not in BLACKLIST.get(bkey, ()):
self.plugin_list.add(plugin) self.plugin_list.add(plugin)
display.vvvv("Added %s" % plugin) display.vvvv("Added %s" % plugin)
@ -254,7 +256,7 @@ class DocCLI(CLI):
doc = None doc = None
try: try:
doc, plainexamples, returndocs, metadata = plugin_docs.get_docstring(filename) doc, plainexamples, returndocs, metadata = get_docstring(filename, fragment_loader)
except: except:
display.warning("%s has a documentation formatting error" % plugin) display.warning("%s has a documentation formatting error" % plugin)

View file

@ -1,18 +1,6 @@
# Copyright (c) 2017 Ansible Project # Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
--- ---
ALLOW_WORLD_READABLE_TMPFILES:
name: Allow world readable temporary files
default: False
description:
- This makes the temporary files created on the machine to be world readable and will issue a warning instead of failing the task.
- It is useful when becoming an unprivileged user.
env: []
ini:
- {key: allow_world_readable_tmpfiles, section: defaults}
type: boolean
yaml: {key: defaults.allow_world_readable_tmpfiles}
version_added: "2.1"
ANSIBLE_COW_SELECTION: ANSIBLE_COW_SELECTION:
name: Cowsay filter selection name: Cowsay filter selection
default: default default: default
@ -744,15 +732,6 @@ DEFAULT_MODULE_COMPRESSION:
- {key: module_compression, section: defaults} - {key: module_compression, section: defaults}
# vars: # vars:
# - name: ansible_module_compression # - name: ansible_module_compression
DEFAULT_MODULE_LANG:
name: Target language environment
default: "{{CONTROLER_LANG}}"
description: "Language locale setting to use for modules when they execute on the target, if empty it defaults to 'en_US.UTF-8'"
env: [{name: ANSIBLE_MODULE_LANG}]
ini:
- {key: module_lang, section: defaults}
# vars:
# - name: ansible_module_lang
DEFAULT_MODULE_NAME: DEFAULT_MODULE_NAME:
name: Default adhoc module name: Default adhoc module
default: command default: command
@ -768,16 +747,6 @@ DEFAULT_MODULE_PATH:
ini: ini:
- {key: library, section: defaults} - {key: library, section: defaults}
type: pathspec type: pathspec
DEFAULT_MODULE_SET_LOCALE:
name: Target locale
default: False
description: Controls if we set locale for modules when executing on the target.
env: [{name: ANSIBLE_MODULE_SET_LOCALE}]
ini:
- {key: module_set_locale, section: defaults}
type: boolean
# vars:
# - name: ansible_module_locale
DEFAULT_MODULE_UTILS_PATH: DEFAULT_MODULE_UTILS_PATH:
name: Module Utils Path name: Module Utils Path
description: Colon separated paths in which Ansible will search for Module utils files, which are shared by modules. description: Colon separated paths in which Ansible will search for Module utils files, which are shared by modules.
@ -851,17 +820,6 @@ DEFAULT_REMOTE_PORT:
- {key: remote_port, section: defaults} - {key: remote_port, section: defaults}
type: integer type: integer
yaml: {key: defaults.remote_port} yaml: {key: defaults.remote_port}
DEFAULT_REMOTE_TMP:
name: Target temporary directory
default: ~/.ansible/tmp
description:
- Temporary directory to use on targets when executing tasks.
- In some cases Ansible may still choose to use a system temporary dir to avoid permission issues.
env: [{name: ANSIBLE_REMOTE_TEMP}]
ini:
- {key: remote_tmp, section: defaults}
vars:
- name: ansible_remote_tmp
DEFAULT_REMOTE_USER: DEFAULT_REMOTE_USER:
name: Login/Remote User name: Login/Remote User
default: default:

View file

@ -5,7 +5,7 @@
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
import os # used to set lang and for backwards compat get_config import os
from ast import literal_eval from ast import literal_eval
from jinja2 import Template from jinja2 import Template
@ -114,7 +114,6 @@ MAGIC_VARIABLE_MAPPING = dict(
module_compression=('ansible_module_compression', ), module_compression=('ansible_module_compression', ),
shell=('ansible_shell_type', ), shell=('ansible_shell_type', ),
executable=('ansible_shell_executable', ), executable=('ansible_shell_executable', ),
remote_tmp_dir=('ansible_remote_tmp', ),
# connection common # connection common
remote_addr=('ansible_ssh_host', 'ansible_host'), remote_addr=('ansible_ssh_host', 'ansible_host'),

View file

@ -252,11 +252,38 @@ class AnsibleFileNotFound(AnsibleRuntimeError):
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc) suppress_extended_error=suppress_extended_error, orig_exc=orig_exc)
class AnsibleActionSkip(AnsibleRuntimeError): # These Exceptions are temporary, using them as flow control until we can get a better solution.
# DO NOT USE as they will probably be removed soon.
class AnsibleAction(AnsibleRuntimeError):
''' Base Exception for Action plugin flow control '''
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
super(AnsibleAction, self).__init__(message=message, obj=obj, show_content=show_content,
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc)
if result is None:
self.result = {}
else:
self.result = result
class AnsibleActionSkip(AnsibleAction):
''' an action runtime skip''' ''' an action runtime skip'''
pass
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
super(AnsibleActionSkip, self).__init__(message=message, obj=obj, show_content=show_content,
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc, result=result)
self.result.update({'skipped': True, 'msg': message})
class AnsibleActionFail(AnsibleRuntimeError): class AnsibleActionFail(AnsibleAction):
''' an action runtime failure''' ''' an action runtime failure'''
def __init__(self, message="", obj=None, show_content=True, suppress_extended_error=False, orig_exc=None, result=None):
super(AnsibleActionFail, self).__init__(message=message, obj=obj, show_content=show_content,
suppress_extended_error=suppress_extended_error, orig_exc=orig_exc, result=result)
self.result.update({'failed': True, 'msg': message})
class AnsibleActionDone(AnsibleAction):
''' an action runtime early exit'''
pass pass

View file

@ -486,6 +486,7 @@ class TaskExecutor:
self._connection._play_context = self._play_context self._connection._play_context = self._play_context
self._set_connection_options(variables, templar) self._set_connection_options(variables, templar)
self._set_shell_options(variables, templar)
# get handler # get handler
self._handler = self._get_action_handler(connection=self._connection, templar=templar) self._handler = self._get_action_handler(connection=self._connection, templar=templar)
@ -774,6 +775,15 @@ class TaskExecutor:
# set options with 'templated vars' specific to this plugin # set options with 'templated vars' specific to this plugin
self._connection.set_options(var_options=options) self._connection.set_options(var_options=options)
self._set_shell_options(final_vars, templar)
def _set_shell_options(self, variables, templar):
option_vars = C.config.get_plugin_vars('shell', self._connection._shell._load_name)
options = {}
for k in option_vars:
if k in variables:
options[k] = templar.template(variables[k])
self._connection._shell.set_options(var_options=options)
def _get_action_handler(self, connection, templar): def _get_action_handler(self, connection, templar):
''' '''

View file

@ -97,6 +97,7 @@ class InventoryData(object):
'You can correct this by setting ansible_python_interpreter for localhost') 'You can correct this by setting ansible_python_interpreter for localhost')
new_host.set_variable("ansible_python_interpreter", py_interp) new_host.set_variable("ansible_python_interpreter", py_interp)
new_host.set_variable("ansible_connection", 'local') new_host.set_variable("ansible_connection", 'local')
new_host.set_variable("ansible_remote_tmp", C.DEFAULT_LOCAL_TMP)
self.localhost = new_host self.localhost = new_host

View file

@ -37,9 +37,25 @@ FILE_ATTRIBUTES = {
'Z': 'compresseddirty', 'Z': 'compresseddirty',
} }
# ansible modules can be written in any language. To simplify PASS_VARS = {
# development of Python modules, the functions available here can 'check_mode': 'check_mode',
# be used to do many common tasks 'debug': '_debug',
'diff': '_diff',
'module_name': '_name',
'no_log': 'no_log',
'selinux_special_fs': '_selinux_special_fs',
'shell_executable': '_shell',
'socket': '_socket_path',
'syslog_facility': '_syslog_facility',
'verbosity': '_verbosity',
'version': 'ansible_version',
}
PASS_BOOLS = ('no_log', 'debug', 'diff')
# Ansible modules can be written in any language.
# The functions available here can be used to do many common tasks,
# to simplify development of Python modules.
import locale import locale
import os import os
@ -90,7 +106,7 @@ NoneType = type(None)
try: try:
from collections.abc import KeysView from collections.abc import KeysView
SEQUENCETYPE = (Sequence, frozenset, KeysView) SEQUENCETYPE = (Sequence, frozenset, KeysView)
except: except ImportError:
SEQUENCETYPE = (Sequence, frozenset) SEQUENCETYPE = (Sequence, frozenset)
try: try:
@ -826,11 +842,12 @@ class AnsibleModule(object):
self._clean = {} self._clean = {}
self.aliases = {} self.aliases = {}
self._legal_inputs = ['_ansible_check_mode', '_ansible_no_log', '_ansible_debug', '_ansible_diff', '_ansible_verbosity', self._legal_inputs = ['_ansible_%s' % k for k in PASS_VARS]
'_ansible_selinux_special_fs', '_ansible_module_name', '_ansible_version', '_ansible_syslog_facility',
'_ansible_socket', '_ansible_shell_executable']
self._options_context = list() self._options_context = list()
# set tempdir to remote tmp
self.tempdir = os.environ.get('ANSIBLE_REMOTE_TEMP', None)
if add_file_common_args: if add_file_common_args:
for k, v in FILE_COMMON_ARGUMENTS.items(): for k, v in FILE_COMMON_ARGUMENTS.items():
if k not in self.argument_spec: if k not in self.argument_spec:
@ -1634,44 +1651,17 @@ class AnsibleModule(object):
for (k, v) in list(param.items()): for (k, v) in list(param.items()):
if k == '_ansible_check_mode' and v: if check_invalid_arguments and k not in legal_inputs:
self.check_mode = True
elif k == '_ansible_no_log':
self.no_log = self.boolean(v)
elif k == '_ansible_debug':
self._debug = self.boolean(v)
elif k == '_ansible_diff':
self._diff = self.boolean(v)
elif k == '_ansible_verbosity':
self._verbosity = v
elif k == '_ansible_selinux_special_fs':
self._selinux_special_fs = v
elif k == '_ansible_syslog_facility':
self._syslog_facility = v
elif k == '_ansible_version':
self.ansible_version = v
elif k == '_ansible_module_name':
self._name = v
elif k == '_ansible_socket':
self._socket_path = v
elif k == '_ansible_shell_executable' and v:
self._shell = v
elif check_invalid_arguments and k not in legal_inputs:
unsupported_parameters.add(k) unsupported_parameters.add(k)
elif k.startswith('_ansible_'):
# handle setting internal properties from internal ansible vars
key = k.replace('_ansible_', '')
if key in PASS_BOOLS:
setattr(self, PASS_VARS[key], self.boolean(v))
else:
setattr(self, PASS_VARS[key], v)
# clean up internal params: # clean up internal params:
if k.startswith('_ansible_'):
del self.params[k] del self.params[k]
if unsupported_parameters: if unsupported_parameters:
@ -2202,7 +2192,7 @@ class AnsibleModule(object):
except: except:
# we don't have access to the cwd, probably because of sudo. # we don't have access to the cwd, probably because of sudo.
# Try and move to a neutral location to prevent errors # Try and move to a neutral location to prevent errors
for cwd in [os.path.expandvars('$HOME'), tempfile.gettempdir()]: for cwd in [self.tempdir, os.path.expandvars('$HOME'), tempfile.gettempdir()]:
try: try:
if os.access(cwd, os.F_OK | os.R_OK): if os.access(cwd, os.F_OK | os.R_OK):
os.chdir(cwd) os.chdir(cwd)

View file

@ -973,6 +973,9 @@ def fetch_url(module, url, data=None, headers=None, method=None,
if not HAS_URLPARSE: if not HAS_URLPARSE:
module.fail_json(msg='urlparse is not installed') module.fail_json(msg='urlparse is not installed')
# ensure we use proper tempdir
tempfile.tempdir = module.tempdir
# Get validate_certs from the module params # Get validate_certs from the module params
validate_certs = module.params.get('validate_certs', True) validate_certs = module.params.get('validate_certs', True)

View file

@ -97,32 +97,32 @@ notes:
''' '''
EXAMPLES = r''' EXAMPLES = r'''
# Example from Ansible Playbooks - name: example copying file with owner and permissions
- copy: copy:
src: /srv/myfiles/foo.conf src: /srv/myfiles/foo.conf
dest: /etc/foo.conf dest: /etc/foo.conf
owner: foo owner: foo
group: foo group: foo
mode: 0644 mode: 0644
# The same example as above, but using a symbolic mode equivalent to 0644 - name: The same example as above, but using a symbolic mode equivalent to 0644
- copy: copy:
src: /srv/myfiles/foo.conf src: /srv/myfiles/foo.conf
dest: /etc/foo.conf dest: /etc/foo.conf
owner: foo owner: foo
group: foo group: foo
mode: u=rw,g=r,o=r mode: u=rw,g=r,o=r
# Another symbolic mode example, adding some permissions and removing others - name: Another symbolic mode example, adding some permissions and removing others
- copy: copy:
src: /srv/myfiles/foo.conf src: /srv/myfiles/foo.conf
dest: /etc/foo.conf dest: /etc/foo.conf
owner: foo owner: foo
group: foo group: foo
mode: u+rw,g-wx,o-rwx mode: u+rw,g-wx,o-rwx
# Copy a new "ntp.conf file into place, backing up the original if it differs from the copied version - name: Copy a new "ntp.conf file into place, backing up the original if it differs from the copied version
- copy: copy:
src: /mine/ntp.conf src: /mine/ntp.conf
dest: /etc/ntp.conf dest: /etc/ntp.conf
owner: root owner: root
@ -130,33 +130,23 @@ EXAMPLES = r'''
mode: 0644 mode: 0644
backup: yes backup: yes
# Copy a new "sudoers" file into place, after passing validation with visudo - name: Copy a new "sudoers" file into place, after passing validation with visudo
- copy: copy:
src: /mine/sudoers src: /mine/sudoers
dest: /etc/sudoers dest: /etc/sudoers
validate: /usr/sbin/visudo -cf %s validate: /usr/sbin/visudo -cf %s
# Copy a "sudoers" file on the remote machine for editing - name: Copy a "sudoers" file on the remote machine for editing
- copy: copy:
src: /etc/sudoers src: /etc/sudoers
dest: /etc/sudoers.edit dest: /etc/sudoers.edit
remote_src: yes remote_src: yes
validate: /usr/sbin/visudo -cf %s validate: /usr/sbin/visudo -cf %s
# Create a CSV file from your complete inventory using an inline template - name: Copy using the 'content' for inline data
- hosts: all copy:
tasks: content: '# This file was moved to /etc/other.conf'
- copy: dest: /etc/mine.conf'
content: |
HOSTNAME;IPADDRESS;FQDN;OSNAME;OSVERSION;PROCESSOR;ARCHITECTURE;MEMORY;
{% for host in hostvars %}
{% set vars = hostvars[host|string] %}
{{ vars.ansible_hostname }};{{ vars.remote_host }};{{ vars.ansible_fqdn }};{{ vars.ansible_distribution }};{{ vars.ansible_distribution_version }};{{ vars.ansible_processor[1] }};{{ vars.ansible_architecture }};{{ (vars.ansible_memtotal_mb/1024)|round|int }}; # NOQA
{% endfor %}
dest: /some/path/systems.csv
backup: yes
run_once: yes
delegate_to: localhost
''' '''
RETURN = r''' RETURN = r'''

View file

@ -28,8 +28,7 @@ options:
required: true required: true
mode: mode:
description: description:
- if C(status), obtain the status; if C(cleanup), clean up the async job cache - if C(status), obtain the status; if C(cleanup), clean up the async job cache (by default in C(~/.ansible_async/)) for the specified job I(jid).
located in C(~/.ansible_async/) for the specified job I(jid).
choices: [ "status", "cleanup" ] choices: [ "status", "cleanup" ]
default: "status" default: "status"
notes: notes:
@ -57,8 +56,10 @@ def main():
mode = module.params['mode'] mode = module.params['mode']
jid = module.params['jid'] jid = module.params['jid']
async_dir = os.environ.get('ANSIBLE_ASYNC_DIR', '~/.ansible_async')
# setup logging directory # setup logging directory
logdir = os.path.expanduser("~/.ansible_async") logdir = os.path.expanduser(async_dir)
log_path = os.path.join(logdir, jid) log_path = os.path.join(logdir, jid)
if not os.path.exists(log_path): if not os.path.exists(log_path):

View file

@ -216,8 +216,10 @@ if __name__ == '__main__':
cmd = wrapped_module cmd = wrapped_module
step = 5 step = 5
async_dir = os.environ.get('ANSIBLE_ASYNC_DIR', '~/.ansible_async')
# setup job output directory # setup job output directory
jobdir = os.path.expanduser("~/.ansible_async") jobdir = os.path.expanduser(async_dir)
job_path = os.path.join(jobdir, jid) job_path = os.path.join(jobdir, jid)
if not os.path.exists(jobdir): if not os.path.exists(jobdir):

View file

@ -49,7 +49,6 @@ except ImportError:
__all__ = ['PlayContext'] __all__ = ['PlayContext']
# TODO: needs to be configurable # TODO: needs to be configurable
b_SU_PROMPT_LOCALIZATIONS = [ b_SU_PROMPT_LOCALIZATIONS = [
to_bytes('Password'), to_bytes('Password'),
@ -136,7 +135,6 @@ class PlayContext(Base):
# connection fields, some are inherited from Base: # connection fields, some are inherited from Base:
# (connection, port, remote_user, environment, no_log) # (connection, port, remote_user, environment, no_log)
_remote_addr = FieldAttribute(isa='string') _remote_addr = FieldAttribute(isa='string')
_remote_tmp_dir = FieldAttribute(isa='string', default=C.DEFAULT_REMOTE_TMP)
_password = FieldAttribute(isa='string') _password = FieldAttribute(isa='string')
_timeout = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT) _timeout = FieldAttribute(isa='int', default=C.DEFAULT_TIMEOUT)
_connection_user = FieldAttribute(isa='string') _connection_user = FieldAttribute(isa='string')

View file

@ -1,19 +1,6 @@
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # Copyright: (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# # Copyright: (c) 2018, Ansible Project
# This file is part of Ansible # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish # Make coding more python3-ish
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
@ -65,13 +52,14 @@ class ActionBase(with_metaclass(ABCMeta, object)):
self._loader = loader self._loader = loader
self._templar = templar self._templar = templar
self._shared_loader_obj = shared_loader_obj self._shared_loader_obj = shared_loader_obj
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
self._cleanup_remote_tmp = False self._cleanup_remote_tmp = False
self._supports_check_mode = True self._supports_check_mode = True
self._supports_async = False self._supports_async = False
# Backwards compat: self._display isn't really needed, just import the global display and use that.
self._display = display
@abstractmethod @abstractmethod
def run(self, tmp=None, task_vars=None): def run(self, tmp=None, task_vars=None):
""" Action Plugins should implement this method to perform their """ Action Plugins should implement this method to perform their
@ -99,6 +87,11 @@ class ActionBase(with_metaclass(ABCMeta, object)):
elif self._task.async_val and self._play_context.check_mode: elif self._task.async_val and self._play_context.check_mode:
raise AnsibleActionFail('check mode and async cannot be used on same task.') raise AnsibleActionFail('check mode and async cannot be used on same task.')
if not tmp and self._early_needs_tmp_path():
self._make_tmp_path()
else:
self._connection._shell.tempdir = tmp
return result return result
def _remote_file_exists(self, path): def _remote_file_exists(self, path):
@ -236,16 +229,20 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if remote_user is None: if remote_user is None:
remote_user = self._play_context.remote_user remote_user = self._play_context.remote_user
try:
admin_users = self._connection._shell.get_option('admin_users') + [remote_user]
except KeyError:
admin_users = ['root', remote_user] # plugin does not support admin_users
try:
remote_tmp = self._connection._shell.get_option('remote_temp')
except KeyError:
remote_tmp = '~/ansible'
# deal with tmpdir creation
basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48)) basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48))
use_system_tmp = False use_system_tmp = bool(self._play_context.become and self._play_context.become_user not in admin_users)
tmpdir = self._remote_expand_user(remote_tmp, sudoable=False)
if self._play_context.become and self._play_context.become_user not in ('root', remote_user): cmd = self._connection._shell.mkdtemp(basefile=basefile, system=use_system_tmp, tmpdir=tmpdir)
use_system_tmp = True
tmp_mode = 0o700
tmpdir = self._remote_expand_user(self._play_context.remote_tmp_dir, sudoable=False)
cmd = self._connection._shell.mkdtemp(basefile, use_system_tmp, tmp_mode, tmpdir)
result = self._low_level_execute_command(cmd, sudoable=False) result = self._low_level_execute_command(cmd, sudoable=False)
# error handling on this seems a little aggressive? # error handling on this seems a little aggressive?
@ -287,11 +284,14 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if rc == '/': if rc == '/':
raise AnsibleError('failed to resolve remote temporary directory from %s: `%s` returned empty string' % (basefile, cmd)) raise AnsibleError('failed to resolve remote temporary directory from %s: `%s` returned empty string' % (basefile, cmd))
self._connection._shell.tempdir = rc
if not use_system_tmp:
self._connection._shell.env.update({'ANSIBLE_REMOTE_TEMP': self._connection._shell.tempdir})
return rc return rc
def _should_remove_tmp_path(self, tmp_path): def _should_remove_tmp_path(self, tmp_path):
'''Determine if temporary path should be deleted or kept by user request/config''' '''Determine if temporary path should be deleted or kept by user request/config'''
return tmp_path and self._cleanup_remote_tmp and not C.DEFAULT_KEEP_REMOTE_FILES and "-tmp-" in tmp_path return tmp_path and self._cleanup_remote_tmp and not C.DEFAULT_KEEP_REMOTE_FILES and "-tmp-" in tmp_path
def _remove_tmp_path(self, tmp_path): def _remove_tmp_path(self, tmp_path):
@ -320,7 +320,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if isinstance(data, dict): if isinstance(data, dict):
data = jsonify(data) data = jsonify(data)
afd, afile = tempfile.mkstemp() afd, afile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
afo = os.fdopen(afd, 'wb') afo = os.fdopen(afd, 'wb')
try: try:
data = to_bytes(data, errors='surrogate_or_strict') data = to_bytes(data, errors='surrogate_or_strict')
@ -393,7 +393,12 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# we have a need for it, at which point we'll have to do something different. # we have a need for it, at which point we'll have to do something different.
return remote_paths return remote_paths
if self._play_context.become and self._play_context.become_user and self._play_context.become_user not in ('root', remote_user): try:
admin_users = self._connection._shell.get_option('admin_users')
except KeyError:
admin_users = ['root'] # plugin does not support admin users
if self._play_context.become and self._play_context.become_user and self._play_context.become_user not in admin_users + [remote_user]:
# Unprivileged user that's different than the ssh user. Let's get # Unprivileged user that's different than the ssh user. Let's get
# to work! # to work!
@ -420,12 +425,12 @@ class ActionBase(with_metaclass(ABCMeta, object)):
raise AnsibleError('Failed to set file mode on remote temporary files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr']))) raise AnsibleError('Failed to set file mode on remote temporary files (rc: {0}, err: {1})'.format(res['rc'], to_native(res['stderr'])))
res = self._remote_chown(remote_paths, self._play_context.become_user) res = self._remote_chown(remote_paths, self._play_context.become_user)
if res['rc'] != 0 and remote_user == 'root': if res['rc'] != 0 and remote_user in admin_users:
# chown failed even if remove_user is root # chown failed even if remove_user is root
raise AnsibleError('Failed to change ownership of the temporary files Ansible needs to create despite connecting as root. ' raise AnsibleError('Failed to change ownership of the temporary files Ansible needs to create despite connecting as a privileged user. '
'Unprivileged become user would be unable to read the file.') 'Unprivileged become user would be unable to read the file.')
elif res['rc'] != 0: elif res['rc'] != 0:
if C.ALLOW_WORLD_READABLE_TMPFILES: if self._connection._shell('allow_world_readable_temp'):
# chown and fs acls failed -- do things this insecure # chown and fs acls failed -- do things this insecure
# way only if the user opted in in the config file # way only if the user opted in in the config file
display.warning('Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user. ' display.warning('Using world-readable permissions for temporary files Ansible needs to create when becoming an unprivileged user. '
@ -534,33 +539,46 @@ class ActionBase(with_metaclass(ABCMeta, object)):
finally: finally:
return x # pylint: disable=lost-exception return x # pylint: disable=lost-exception
def _remote_expand_user(self, path, sudoable=True): def _remote_expand_user(self, path, sudoable=True, pathsep=None):
''' takes a remote path and performs tilde expansion on the remote host ''' ''' takes a remote path and performs tilde/$HOME expansion on the remote host '''
if not path.startswith('~'): # FIXME: Windows paths may start with "~ instead of just ~
# We only expand ~/path and ~username/path
if not path.startswith('~'):
return path return path
# FIXME: Can't use os.path.sep for Windows paths. # Per Jborean, we don't have to worry about Windows as we don't have a notion of user's home
# dir there.
split_path = path.split(os.path.sep, 1) split_path = path.split(os.path.sep, 1)
expand_path = split_path[0] expand_path = split_path[0]
if sudoable and expand_path == '~' and self._play_context.become and self._play_context.become_user: if sudoable and expand_path == '~' and self._play_context.become and self._play_context.become_user:
expand_path = '~%s' % self._play_context.become_user expand_path = '~%s' % self._play_context.become_user
# use shell to construct appropriate command and execute
cmd = self._connection._shell.expand_user(expand_path) cmd = self._connection._shell.expand_user(expand_path)
data = self._low_level_execute_command(cmd, sudoable=False) data = self._low_level_execute_command(cmd, sudoable=False)
try: try:
initial_fragment = data['stdout'].strip().splitlines()[-1] initial_fragment = data['stdout'].strip().splitlines()[-1]
except IndexError: except IndexError:
initial_fragment = None initial_fragment = None
if not initial_fragment: if not initial_fragment:
# Something went wrong trying to expand the path remotely. Return # Something went wrong trying to expand the path remotely. Try using pwd, if not, return
# the original string # the original string
return path cmd = self._connection._shell.pwd()
pwd = self._low_level_execute_command(cmd, sudoable=False).get('stdout', '').strip()
if pwd:
expanded = pwd
else:
expanded = path
if len(split_path) > 1: elif len(split_path) > 1:
return self._connection._shell.join_path(initial_fragment, *split_path[1:]) expanded = self._connection._shell.join_path(initial_fragment, *split_path[1:])
else: else:
return initial_fragment expanded = initial_fragment
return expanded
def _strip_success_message(self, data): def _strip_success_message(self, data):
''' '''
@ -655,8 +673,11 @@ class ActionBase(with_metaclass(ABCMeta, object)):
if not self._is_pipelining_enabled(module_style, wrap_async): if not self._is_pipelining_enabled(module_style, wrap_async):
# we might need remote tmp dir # we might need remote tmp dir
if not tmp or 'tmp' not in tmp: if not tmp:
tmp = self._make_tmp_path() if not self._connection._shell.tempdir or tmp is None or 'tmp' not in tmp:
tmp = self._make_tmp_path()
else:
tmp = self._connection._shell.tempdir
remote_module_filename = self._connection._shell.get_remote_filename(module_path) remote_module_filename = self._connection._shell.get_remote_filename(module_path)
remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename) remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename)
@ -733,14 +754,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
else: else:
cmd = remote_module_path cmd = remote_module_path
rm_tmp = None cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path).strip()
if self._should_remove_tmp_path(tmp) and not persist_files and delete_remote_tmp:
if not self._play_context.become or self._play_context.become_user == 'root':
# not sudoing or sudoing to root, so can cleanup files in the same step
rm_tmp = tmp
cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path, rm_tmp=rm_tmp).strip()
# Fix permissions of the tmp path and tmp files. This should be called after all files have been transferred. # Fix permissions of the tmp path and tmp files. This should be called after all files have been transferred.
if remote_files: if remote_files:
@ -756,15 +770,12 @@ class ActionBase(with_metaclass(ABCMeta, object)):
# NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE # NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
# get internal info before cleaning # get internal info before cleaning
tmpdir_delete = (not data.pop("_ansible_suppress_tmpdir_delete", False) and wrap_async) if data.pop("_ansible_suppress_tmpdir_delete", False):
self._cleanup_remote_tmp = False
# remove internal keys # remove internal keys
remove_internal_keys(data) remove_internal_keys(data)
# cleanup tmp?
if (self._play_context.become and self._play_context.become_user != 'root') and not persist_files and delete_remote_tmp or tmpdir_delete:
self._remove_tmp_path(tmp)
# FIXME: for backwards compat, figure out if still makes sense # FIXME: for backwards compat, figure out if still makes sense
if wrap_async: if wrap_async:
data['changed'] = True data['changed'] = True

View file

@ -25,7 +25,8 @@ import os.path
import re import re
import tempfile import tempfile
from ansible.errors import AnsibleError from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleAction, AnsibleActionDone, AnsibleActionFail
from ansible.module_utils._text import to_native, to_text from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.parsing.convert_bool import boolean
from ansible.plugins.action import ActionBase from ansible.plugins.action import ActionBase
@ -39,7 +40,7 @@ class ActionModule(ActionBase):
def _assemble_from_fragments(self, src_path, delimiter=None, compiled_regexp=None, ignore_hidden=False, decrypt=True): def _assemble_from_fragments(self, src_path, delimiter=None, compiled_regexp=None, ignore_hidden=False, decrypt=True):
''' assemble a file from a directory of fragments ''' ''' assemble a file from a directory of fragments '''
tmpfd, temp_path = tempfile.mkstemp() tmpfd, temp_path = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
tmp = os.fdopen(tmpfd, 'wb') tmp = os.fdopen(tmpfd, 'wb')
delimit_me = False delimit_me = False
add_newline = False add_newline = False
@ -96,78 +97,73 @@ class ActionModule(ActionBase):
ignore_hidden = self._task.args.get('ignore_hidden', False) ignore_hidden = self._task.args.get('ignore_hidden', False)
decrypt = self._task.args.get('decrypt', True) decrypt = self._task.args.get('decrypt', True)
if src is None or dest is None: try:
result['failed'] = True if src is None or dest is None:
result['msg'] = "src and dest are required" raise AnsibleActionFail("src and dest are required")
return result
if boolean(remote_src, strict=False): if boolean(remote_src, strict=False):
result.update(self._execute_module(tmp=tmp, task_vars=task_vars)) result.update(self._execute_module(tmp=tmp, task_vars=task_vars))
return result raise AnsibleActionDone()
else: else:
try: try:
src = self._find_needle('files', src) src = self._find_needle('files', src)
except AnsibleError as e: except AnsibleError as e:
result['failed'] = True raise AnsibleActionFail(to_native(e))
result['msg'] = to_native(e)
return result
if not tmp: if not os.path.isdir(src):
tmp = self._make_tmp_path() raise AnsibleActionFail(u"Source (%s) is not a directory" % src)
if not os.path.isdir(src): _re = None
result['failed'] = True if regexp is not None:
result['msg'] = u"Source (%s) is not a directory" % src _re = re.compile(regexp)
return result
_re = None # Does all work assembling the file
if regexp is not None: path = self._assemble_from_fragments(src, delimiter, _re, ignore_hidden, decrypt)
_re = re.compile(regexp)
# Does all work assembling the file path_checksum = checksum_s(path)
path = self._assemble_from_fragments(src, delimiter, _re, ignore_hidden, decrypt) dest = self._remote_expand_user(dest)
dest_stat = self._execute_remote_stat(dest, all_vars=task_vars, follow=follow, tmp=tmp)
path_checksum = checksum_s(path) diff = {}
dest = self._remote_expand_user(dest)
dest_stat = self._execute_remote_stat(dest, all_vars=task_vars, follow=follow, tmp=tmp)
diff = {} # setup args for running modules
new_module_args = self._task.args.copy()
# setup args for running modules # clean assemble specific options
new_module_args = self._task.args.copy() for opt in ['remote_src', 'regexp', 'delimiter', 'ignore_hidden', 'decrypt']:
if opt in new_module_args:
del new_module_args[opt]
# clean assemble specific options new_module_args.update(
for opt in ['remote_src', 'regexp', 'delimiter', 'ignore_hidden', 'decrypt']: dict(
if opt in new_module_args: dest=dest,
del new_module_args[opt] original_basename=os.path.basename(src),
)
new_module_args.update(
dict(
dest=dest,
original_basename=os.path.basename(src),
) )
)
if path_checksum != dest_stat['checksum']: if path_checksum != dest_stat['checksum']:
if self._play_context.diff: if self._play_context.diff:
diff = self._get_diff_data(dest, path, task_vars) diff = self._get_diff_data(dest, path, task_vars)
remote_path = self._connection._shell.join_path(tmp, 'src') remote_path = self._connection._shell.join_path(self._connection._shell.tempdir, 'src')
xfered = self._transfer_file(path, remote_path) xfered = self._transfer_file(path, remote_path)
# fix file permissions when the copy is done as a different user # fix file permissions when the copy is done as a different user
self._fixup_perms2((tmp, remote_path)) self._fixup_perms2((self._connection._shell.tempdir, remote_path))
new_module_args.update(dict(src=xfered,)) new_module_args.update(dict(src=xfered,))
res = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False) res = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
if diff: if diff:
res['diff'] = diff res['diff'] = diff
result.update(res) result.update(res)
else: else:
result.update(self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp, delete_remote_tmp=False)) result.update(self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp))
self._remove_tmp_path(tmp) except AnsibleAction as e:
result.update(e.result)
finally:
self._remove_tmp_path(self._connection._shell.tempdir)
return result return result

View file

@ -22,4 +22,8 @@ class ActionModule(ActionBase):
wrap_async = self._task.async_val and not self._connection.has_native_async wrap_async = self._task.async_val and not self._connection.has_native_async
results = merge_hash(results, self._execute_module(tmp=tmp, task_vars=task_vars, wrap_async=wrap_async)) results = merge_hash(results, self._execute_module(tmp=tmp, task_vars=task_vars, wrap_async=wrap_async))
if not wrap_async:
# remove a temporary path we created
self._remove_tmp_path(self._connection._shell.tempdir)
return results return results

View file

@ -26,8 +26,8 @@ import os.path
import stat import stat
import tempfile import tempfile
import traceback import traceback
from itertools import chain
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFileNotFound from ansible.errors import AnsibleError, AnsibleFileNotFound
from ansible.module_utils._text import to_bytes, to_native, to_text from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.parsing.convert_bool import boolean
@ -186,12 +186,13 @@ def _walk_dirs(topdir, base_path=None, local_follow=False, trailing_slash_detect
class ActionModule(ActionBase): class ActionModule(ActionBase):
TRANSFERS_FILES = True
def _create_remote_file_args(self, module_args): def _create_remote_file_args(self, module_args):
# remove action plugin only keys # remove action plugin only keys
return dict((k, v) for k, v in module_args.items() if k not in ('content', 'decrypt')) return dict((k, v) for k, v in module_args.items() if k not in ('content', 'decrypt'))
def _copy_file(self, source_full, source_rel, content, content_tempfile, def _copy_file(self, source_full, source_rel, content, content_tempfile, dest, task_vars, tmp):
dest, task_vars, tmp, delete_remote_tmp):
decrypt = boolean(self._task.args.get('decrypt', True), strict=False) decrypt = boolean(self._task.args.get('decrypt', True), strict=False)
follow = boolean(self._task.args.get('follow', False), strict=False) follow = boolean(self._task.args.get('follow', False), strict=False)
force = boolean(self._task.args.get('force', 'yes'), strict=False) force = boolean(self._task.args.get('force', 'yes'), strict=False)
@ -206,7 +207,6 @@ class ActionModule(ActionBase):
except AnsibleFileNotFound as e: except AnsibleFileNotFound as e:
result['failed'] = True result['failed'] = True
result['msg'] = "could not find src=%s, %s" % (source_full, to_text(e)) result['msg'] = "could not find src=%s, %s" % (source_full, to_text(e))
self._remove_tmp_path(tmp)
return result return result
# Get the local mode and set if user wanted it preserved # Get the local mode and set if user wanted it preserved
@ -221,13 +221,7 @@ class ActionModule(ActionBase):
if self._connection._shell.path_has_trailing_slash(dest): if self._connection._shell.path_has_trailing_slash(dest):
dest_file = self._connection._shell.join_path(dest, source_rel) dest_file = self._connection._shell.join_path(dest, source_rel)
else: else:
dest_file = self._connection._shell.join_path(dest) dest_file = dest
# Create a tmp path if missing only if this is not recursive.
# If this is recursive we already have a tmp path.
if delete_remote_tmp:
if tmp is None or "-tmp-" not in tmp:
tmp = self._make_tmp_path()
# Attempt to get remote file info # Attempt to get remote file info
dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, tmp=tmp, checksum=force) dest_status = self._execute_remote_stat(dest_file, all_vars=task_vars, follow=follow, tmp=tmp, checksum=force)
@ -237,7 +231,6 @@ class ActionModule(ActionBase):
if content is not None: if content is not None:
# If source was defined as content remove the temporary file and fail out. # If source was defined as content remove the temporary file and fail out.
self._remove_tempfile_if_content_defined(content, content_tempfile) self._remove_tempfile_if_content_defined(content, content_tempfile)
self._remove_tmp_path(tmp)
result['failed'] = True result['failed'] = True
result['msg'] = "can not use content with a dir as dest" result['msg'] = "can not use content with a dir as dest"
return result return result
@ -265,7 +258,7 @@ class ActionModule(ActionBase):
return result return result
# Define a remote directory that we will copy the file to. # Define a remote directory that we will copy the file to.
tmp_src = self._connection._shell.join_path(tmp, 'source') tmp_src = self._connection._shell.join_path(self._connection._shell.tempdir, 'source')
remote_path = None remote_path = None
@ -280,7 +273,7 @@ class ActionModule(ActionBase):
# fix file permissions when the copy is done as a different user # fix file permissions when the copy is done as a different user
if remote_path: if remote_path:
self._fixup_perms2((tmp, remote_path)) self._fixup_perms2((self._connection._shell.tempdir, remote_path))
if raw: if raw:
# Continue to next iteration if raw is defined. # Continue to next iteration if raw is defined.
@ -301,9 +294,7 @@ class ActionModule(ActionBase):
if lmode: if lmode:
new_module_args['mode'] = lmode new_module_args['mode'] = lmode
module_return = self._execute_module(module_name='copy', module_return = self._execute_module(module_name='copy', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
module_args=new_module_args, task_vars=task_vars,
tmp=tmp, delete_remote_tmp=delete_remote_tmp)
else: else:
# no need to transfer the file, already correct hash, but still need to call # no need to transfer the file, already correct hash, but still need to call
@ -312,8 +303,6 @@ class ActionModule(ActionBase):
self._loader.cleanup_tmp_file(source_full) self._loader.cleanup_tmp_file(source_full)
if raw: if raw:
# Continue to next iteration if raw is defined.
self._remove_tmp_path(tmp)
return None return None
# Fix for https://github.com/ansible/ansible-modules-core/issues/1568. # Fix for https://github.com/ansible/ansible-modules-core/issues/1568.
@ -339,9 +328,7 @@ class ActionModule(ActionBase):
new_module_args['mode'] = lmode new_module_args['mode'] = lmode
# Execute the file module. # Execute the file module.
module_return = self._execute_module(module_name='file', module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
module_args=new_module_args, task_vars=task_vars,
tmp=tmp, delete_remote_tmp=delete_remote_tmp)
if not module_return.get('checksum'): if not module_return.get('checksum'):
module_return['checksum'] = local_checksum module_return['checksum'] = local_checksum
@ -379,7 +366,7 @@ class ActionModule(ActionBase):
def _create_content_tempfile(self, content): def _create_content_tempfile(self, content):
''' Create a tempfile containing defined content ''' ''' Create a tempfile containing defined content '''
fd, content_tempfile = tempfile.mkstemp() fd, content_tempfile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
f = os.fdopen(fd, 'wb') f = os.fdopen(fd, 'wb')
content = to_bytes(content) content = to_bytes(content)
try: try:
@ -402,6 +389,9 @@ class ActionModule(ActionBase):
result = super(ActionModule, self).run(tmp, task_vars) result = super(ActionModule, self).run(tmp, task_vars)
if tmp is None:
tmp = self._connection._shell.tempdir
source = self._task.args.get('src', None) source = self._task.args.get('src', None)
content = self._task.args.get('content', None) content = self._task.args.get('content', None)
dest = self._task.args.get('dest', None) dest = self._task.args.get('dest', None)
@ -493,19 +483,6 @@ class ActionModule(ActionBase):
# Used to cut down on command calls when not recursive. # Used to cut down on command calls when not recursive.
module_executed = False module_executed = False
# Optimization: Can delete remote_tmp on the first call if we're only
# copying a single file. Otherwise we keep the remote_tmp until it
# is no longer needed.
delete_remote_tmp = False
if sum(len(f) for f in chain(source_files.values())) == 1:
# Tell _execute_module to delete the file if there is one file.
delete_remote_tmp = True
# If this is a recursive action create a tmp path that we can share as the _exec_module create is too late.
if not delete_remote_tmp:
if tmp is None or "-tmp-" not in tmp:
tmp = self._make_tmp_path()
# expand any user home dir specifier # expand any user home dir specifier
dest = self._remote_expand_user(dest) dest = self._remote_expand_user(dest)
@ -513,7 +490,7 @@ class ActionModule(ActionBase):
for source_full, source_rel in source_files['files']: for source_full, source_rel in source_files['files']:
# copy files over. This happens first as directories that have # copy files over. This happens first as directories that have
# a file do not need to be created later # a file do not need to be created later
module_return = self._copy_file(source_full, source_rel, content, content_tempfile, dest, task_vars, tmp, delete_remote_tmp) module_return = self._copy_file(source_full, source_rel, content, content_tempfile, dest, task_vars, tmp)
if module_return is None: if module_return is None:
continue continue
@ -539,9 +516,7 @@ class ActionModule(ActionBase):
new_module_args['state'] = 'directory' new_module_args['state'] = 'directory'
new_module_args['mode'] = self._task.args.get('directory_mode', None) new_module_args['mode'] = self._task.args.get('directory_mode', None)
module_return = self._execute_module(module_name='file', module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
module_args=new_module_args, task_vars=task_vars,
tmp=tmp, delete_remote_tmp=delete_remote_tmp)
module_executed = True module_executed = True
changed = changed or module_return.get('changed', False) changed = changed or module_return.get('changed', False)
@ -553,15 +528,11 @@ class ActionModule(ActionBase):
new_module_args['state'] = 'link' new_module_args['state'] = 'link'
new_module_args['force'] = True new_module_args['force'] = True
module_return = self._execute_module(module_name='file', module_return = self._execute_module(module_name='file', module_args=new_module_args, task_vars=task_vars, tmp=tmp)
module_args=new_module_args, task_vars=task_vars,
tmp=tmp, delete_remote_tmp=delete_remote_tmp)
module_executed = True module_executed = True
if module_return.get('failed'): if module_return.get('failed'):
result.update(module_return) result.update(module_return)
if not delete_remote_tmp:
self._remove_tmp_path(tmp)
return result return result
changed = changed or module_return.get('changed', False) changed = changed or module_return.get('changed', False)
@ -571,13 +542,12 @@ class ActionModule(ActionBase):
if 'path' in module_return and 'dest' not in module_return: if 'path' in module_return and 'dest' not in module_return:
module_return['dest'] = module_return['path'] module_return['dest'] = module_return['path']
# Delete tmp path if we were recursive or if we did not execute a module.
if not delete_remote_tmp or (delete_remote_tmp and not module_executed):
self._remove_tmp_path(tmp)
if module_executed and len(source_files['files']) == 1: if module_executed and len(source_files['files']) == 1:
result.update(module_return) result.update(module_return)
else: else:
result.update(dict(dest=dest, src=source, changed=changed)) result.update(dict(dest=dest, src=source, changed=changed))
# Delete tmp path
self._remove_tmp_path(self._connection._shell.tempdir)
return result return result

View file

@ -44,170 +44,174 @@ class ActionModule(ActionBase):
result = super(ActionModule, self).run(tmp, task_vars) result = super(ActionModule, self).run(tmp, task_vars)
if self._play_context.check_mode: try:
result['skipped'] = True if self._play_context.check_mode:
result['msg'] = 'check mode not (yet) supported for this module' result['skipped'] = True
return result result['msg'] = 'check mode not (yet) supported for this module'
return result
source = self._task.args.get('src', None) source = self._task.args.get('src', None)
dest = self._task.args.get('dest', None) dest = self._task.args.get('dest', None)
flat = boolean(self._task.args.get('flat'), strict=False) flat = boolean(self._task.args.get('flat'), strict=False)
fail_on_missing = boolean(self._task.args.get('fail_on_missing'), strict=False) fail_on_missing = boolean(self._task.args.get('fail_on_missing'), strict=False)
validate_checksum = boolean(self._task.args.get('validate_checksum', validate_checksum = boolean(self._task.args.get('validate_checksum',
self._task.args.get('validate_md5', True)), self._task.args.get('validate_md5', True)),
strict=False) strict=False)
# validate source and dest are strings FIXME: use basic.py and module specs # validate source and dest are strings FIXME: use basic.py and module specs
if not isinstance(source, string_types): if not isinstance(source, string_types):
result['msg'] = "Invalid type supplied for source option, it must be a string" result['msg'] = "Invalid type supplied for source option, it must be a string"
if not isinstance(dest, string_types): if not isinstance(dest, string_types):
result['msg'] = "Invalid type supplied for dest option, it must be a string" result['msg'] = "Invalid type supplied for dest option, it must be a string"
# validate_md5 is the deprecated way to specify validate_checksum # validate_md5 is the deprecated way to specify validate_checksum
if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args: if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args:
result['msg'] = "validate_checksum and validate_md5 cannot both be specified" result['msg'] = "validate_checksum and validate_md5 cannot both be specified"
if 'validate_md5' in self._task.args: if 'validate_md5' in self._task.args:
display.deprecated('Use validate_checksum instead of validate_md5', version='2.8') display.deprecated('Use validate_checksum instead of validate_md5', version='2.8')
if source is None or dest is None: if source is None or dest is None:
result['msg'] = "src and dest are required" result['msg'] = "src and dest are required"
if result.get('msg'): if result.get('msg'):
result['failed'] = True result['failed'] = True
return result return result
source = self._connection._shell.join_path(source) source = self._connection._shell.join_path(source)
source = self._remote_expand_user(source) source = self._remote_expand_user(source)
remote_checksum = None remote_checksum = None
if not self._play_context.become: if not self._play_context.become:
# calculate checksum for the remote file, don't bother if using become as slurp will be used # calculate checksum for the remote file, don't bother if using become as slurp will be used
# Force remote_checksum to follow symlinks because fetch always follows symlinks # Force remote_checksum to follow symlinks because fetch always follows symlinks
remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True) remote_checksum = self._remote_checksum(source, all_vars=task_vars, follow=True)
# use slurp if permissions are lacking or privilege escalation is needed # use slurp if permissions are lacking or privilege escalation is needed
remote_data = None remote_data = None
if remote_checksum in ('1', '2', None): if remote_checksum in ('1', '2', None):
slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars, tmp=tmp) slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), task_vars=task_vars, tmp=tmp)
if slurpres.get('failed'): if slurpres.get('failed'):
if not fail_on_missing and (slurpres.get('msg').startswith('file not found') or remote_checksum == '1'): if not fail_on_missing and (slurpres.get('msg').startswith('file not found') or remote_checksum == '1'):
result['msg'] = "the remote file does not exist, not transferring, ignored" result['msg'] = "the remote file does not exist, not transferring, ignored"
result['file'] = source result['file'] = source
result['changed'] = False result['changed'] = False
else:
result.update(slurpres)
return result
else: else:
result.update(slurpres) if slurpres['encoding'] == 'base64':
remote_data = base64.b64decode(slurpres['content'])
if remote_data is not None:
remote_checksum = checksum_s(remote_data)
# the source path may have been expanded on the
# target system, so we compare it here and use the
# expanded version if it's different
remote_source = slurpres.get('source')
if remote_source and remote_source != source:
source = remote_source
# calculate the destination name
if os.path.sep not in self._connection._shell.join_path('a', ''):
source = self._connection._shell._unquote(source)
source_local = source.replace('\\', '/')
else:
source_local = source
dest = os.path.expanduser(dest)
if flat:
if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')) and not dest.endswith(os.sep):
result['msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory"
result['file'] = dest
result['failed'] = True
return result
if dest.endswith(os.sep):
# if the path ends with "/", we'll use the source filename as the
# destination filename
base = os.path.basename(source_local)
dest = os.path.join(dest, base)
if not dest.startswith("/"):
# if dest does not start with "/", we'll assume a relative path
dest = self._loader.path_dwim(dest)
else:
# files are saved in dest dir, with a subdir for each host, then the filename
if 'inventory_hostname' in task_vars:
target_name = task_vars['inventory_hostname']
else:
target_name = self._play_context.remote_addr
dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)
dest = dest.replace("//", "/")
if remote_checksum in ('0', '1', '2', '3', '4', '5'):
result['changed'] = False
result['file'] = source
if remote_checksum == '0':
result['msg'] = "unable to calculate the checksum of the remote file"
elif remote_checksum == '1':
result['msg'] = "the remote file does not exist"
elif remote_checksum == '2':
result['msg'] = "no read permission on remote file"
elif remote_checksum == '3':
result['msg'] = "remote file is a directory, fetch cannot work on directories"
elif remote_checksum == '4':
result['msg'] = "python isn't present on the system. Unable to compute checksum"
elif remote_checksum == '5':
result['msg'] = "stdlib json or simplejson was not found on the remote machine. Only the raw module can work without those installed"
# Historically, these don't fail because you may want to transfer
# a log file that possibly MAY exist but keep going to fetch other
# log files. Today, this is better achieved by adding
# ignore_errors or failed_when to the task. Control the behaviour
# via fail_when_missing
if fail_on_missing:
result['failed'] = True
del result['changed']
else:
result['msg'] += ", not transferring, ignored"
return result return result
else:
if slurpres['encoding'] == 'base64':
remote_data = base64.b64decode(slurpres['content'])
if remote_data is not None:
remote_checksum = checksum_s(remote_data)
# the source path may have been expanded on the
# target system, so we compare it here and use the
# expanded version if it's different
remote_source = slurpres.get('source')
if remote_source and remote_source != source:
source = remote_source
# calculate the destination name # calculate checksum for the local file
if os.path.sep not in self._connection._shell.join_path('a', ''): local_checksum = checksum(dest)
source = self._connection._shell._unquote(source)
source_local = source.replace('\\', '/')
else:
source_local = source
dest = os.path.expanduser(dest) if remote_checksum != local_checksum:
if flat: # create the containing directories, if needed
if os.path.isdir(to_bytes(dest, errors='surrogate_or_strict')) and not dest.endswith(os.sep): makedirs_safe(os.path.dirname(dest))
result['msg'] = "dest is an existing directory, use a trailing slash if you want to fetch src into that directory"
result['file'] = dest
result['failed'] = True
return result
if dest.endswith(os.sep):
# if the path ends with "/", we'll use the source filename as the
# destination filename
base = os.path.basename(source_local)
dest = os.path.join(dest, base)
if not dest.startswith("/"):
# if dest does not start with "/", we'll assume a relative path
dest = self._loader.path_dwim(dest)
else:
# files are saved in dest dir, with a subdir for each host, then the filename
if 'inventory_hostname' in task_vars:
target_name = task_vars['inventory_hostname']
else:
target_name = self._play_context.remote_addr
dest = "%s/%s/%s" % (self._loader.path_dwim(dest), target_name, source_local)
dest = dest.replace("//", "/") # fetch the file and check for changes
if remote_data is None:
if remote_checksum in ('0', '1', '2', '3', '4', '5'): self._connection.fetch_file(source, dest)
result['changed'] = False else:
result['file'] = source try:
if remote_checksum == '0': f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb')
result['msg'] = "unable to calculate the checksum of the remote file" f.write(remote_data)
elif remote_checksum == '1': f.close()
result['msg'] = "the remote file does not exist" except (IOError, OSError) as e:
elif remote_checksum == '2': raise AnsibleError("Failed to fetch the file: %s" % e)
result['msg'] = "no read permission on remote file" new_checksum = secure_hash(dest)
elif remote_checksum == '3': # For backwards compatibility. We'll return None on FIPS enabled systems
result['msg'] = "remote file is a directory, fetch cannot work on directories"
elif remote_checksum == '4':
result['msg'] = "python isn't present on the system. Unable to compute checksum"
elif remote_checksum == '5':
result['msg'] = "stdlib json or simplejson was not found on the remote machine. Only the raw module can work without those installed"
# Historically, these don't fail because you may want to transfer
# a log file that possibly MAY exist but keep going to fetch other
# log files. Today, this is better achieved by adding
# ignore_errors or failed_when to the task. Control the behaviour
# via fail_when_missing
if fail_on_missing:
result['failed'] = True
del result['changed']
else:
result['msg'] += ", not transferring, ignored"
return result
# calculate checksum for the local file
local_checksum = checksum(dest)
if remote_checksum != local_checksum:
# create the containing directories, if needed
makedirs_safe(os.path.dirname(dest))
# fetch the file and check for changes
if remote_data is None:
self._connection.fetch_file(source, dest)
else:
try: try:
f = open(to_bytes(dest, errors='surrogate_or_strict'), 'wb') new_md5 = md5(dest)
f.write(remote_data) except ValueError:
f.close() new_md5 = None
except (IOError, OSError) as e:
raise AnsibleError("Failed to fetch the file: %s" % e)
new_checksum = secure_hash(dest)
# For backwards compatibility. We'll return None on FIPS enabled systems
try:
new_md5 = md5(dest)
except ValueError:
new_md5 = None
if validate_checksum and new_checksum != remote_checksum: if validate_checksum and new_checksum != remote_checksum:
result.update(dict(failed=True, md5sum=new_md5, result.update(dict(failed=True, md5sum=new_md5,
msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None,
checksum=new_checksum, remote_checksum=remote_checksum)) checksum=new_checksum, remote_checksum=remote_checksum))
else:
result.update({'changed': True, 'md5sum': new_md5, 'dest': dest,
'remote_md5sum': None, 'checksum': new_checksum,
'remote_checksum': remote_checksum})
else: else:
result.update({'changed': True, 'md5sum': new_md5, 'dest': dest, # For backwards compatibility. We'll return None on FIPS enabled systems
'remote_md5sum': None, 'checksum': new_checksum, try:
'remote_checksum': remote_checksum}) local_md5 = md5(dest)
else: except ValueError:
# For backwards compatibility. We'll return None on FIPS enabled systems local_md5 = None
try: result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))
local_md5 = md5(dest)
except ValueError: finally:
local_md5 = None self._remove_tmp_path(self._connection._shell.tempdir)
result.update(dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum))
return result return result

View file

@ -29,24 +29,28 @@ class ActionModule(ActionBase):
self._supports_check_mode = True self._supports_check_mode = True
self._supports_async = True self._supports_async = True
results = super(ActionModule, self).run(tmp, task_vars) result = super(ActionModule, self).run(tmp, task_vars)
if not results.get('skipped'): if not result.get('skipped'):
if results.get('invocation', {}).get('module_args'): if result.get('invocation', {}).get('module_args'):
# avoid passing to modules in case of no_log # avoid passing to modules in case of no_log
# should not be set anymore but here for backwards compatibility # should not be set anymore but here for backwards compatibility
del results['invocation']['module_args'] del result['invocation']['module_args']
# FUTURE: better to let _execute_module calculate this internally? # FUTURE: better to let _execute_module calculate this internally?
wrap_async = self._task.async_val and not self._connection.has_native_async wrap_async = self._task.async_val and not self._connection.has_native_async
# do work! # do work!
results = merge_hash(results, self._execute_module(tmp=tmp, task_vars=task_vars, wrap_async=wrap_async)) result = merge_hash(result, self._execute_module(tmp=tmp, task_vars=task_vars, wrap_async=wrap_async))
# hack to keep --verbose from showing all the setup module results # hack to keep --verbose from showing all the setup module result
# moved from setup module as now we filter out all _ansible_ from results # moved from setup module as now we filter out all _ansible_ from result
if self._task.action == 'setup': if self._task.action == 'setup':
results['_ansible_verbose_override'] = True result['_ansible_verbose_override'] = True
return results if not wrap_async:
# remove a temporary path we created
self._remove_tmp_path(self._connection._shell.tempdir)
return result

View file

@ -17,6 +17,7 @@
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
from ansible.errors import AnsibleAction, AnsibleActionFail
from ansible.plugins.action import ActionBase from ansible.plugins.action import ActionBase
try: try:
@ -46,29 +47,35 @@ class ActionModule(ActionBase):
module = self._templar.template("{{hostvars['%s']['ansible_facts']['pkg_mgr']}}" % self._task.delegate_to) module = self._templar.template("{{hostvars['%s']['ansible_facts']['pkg_mgr']}}" % self._task.delegate_to)
else: else:
module = self._templar.template('{{ansible_facts.pkg_mgr}}') module = self._templar.template('{{ansible_facts.pkg_mgr}}')
except: except Exception:
pass # could not get it from template! pass # could not get it from template!
if module == 'auto': try:
facts = self._execute_module(module_name='setup', module_args=dict(filter='ansible_pkg_mgr', gather_subset='!all'), task_vars=task_vars) if module == 'auto':
display.debug("Facts %s" % facts) facts = self._execute_module(module_name='setup', module_args=dict(filter='ansible_pkg_mgr', gather_subset='!all'), task_vars=task_vars)
module = facts.get('ansible_facts', {}).get('ansible_pkg_mgr', 'auto') display.debug("Facts %s" % facts)
module = facts.get('ansible_facts', {}).get('ansible_pkg_mgr', 'auto')
if module != 'auto': if module != 'auto':
if module not in self._shared_loader_obj.module_loader: if module not in self._shared_loader_obj.module_loader:
result['failed'] = True raise AnsibleActionFail('Could not find a module for %s.' % module)
result['msg'] = 'Could not find a module for %s.' % module else:
# run the 'package' module
new_module_args = self._task.args.copy()
if 'use' in new_module_args:
del new_module_args['use']
display.vvvv("Running %s" % module)
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async_val))
else: else:
# run the 'package' module raise AnsibleActionFail('Could not detect which package manager to use. Try gathering facts or setting the "use" option.')
new_module_args = self._task.args.copy()
if 'use' in new_module_args:
del new_module_args['use']
display.vvvv("Running %s" % module) except AnsibleAction as e:
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async_val)) result.update(e.result)
else: finally:
result['failed'] = True if not self._task.async_val:
result['msg'] = 'Could not detect which package manager to use. Try gathering facts or setting the "use" option.' # remove a temporary path we created
self._remove_tmp_path(self._connection._shell.tempdir)
return result return result

View file

@ -20,7 +20,7 @@ __metaclass__ = type
import os import os
from ansible.errors import AnsibleError from ansible.errors import AnsibleError, AnsibleAction, AnsibleActionDone, AnsibleActionFail
from ansible.module_utils._text import to_native from ansible.module_utils._text import to_native
from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.parsing.convert_bool import boolean
from ansible.plugins.action import ActionBase from ansible.plugins.action import ActionBase
@ -28,6 +28,8 @@ from ansible.plugins.action import ActionBase
class ActionModule(ActionBase): class ActionModule(ActionBase):
TRANSFERS_FILES = True
def run(self, tmp=None, task_vars=None): def run(self, tmp=None, task_vars=None):
if task_vars is None: if task_vars is None:
task_vars = dict() task_vars = dict()
@ -37,39 +39,33 @@ class ActionModule(ActionBase):
src = self._task.args.get('src', None) src = self._task.args.get('src', None)
remote_src = boolean(self._task.args.get('remote_src', 'no'), strict=False) remote_src = boolean(self._task.args.get('remote_src', 'no'), strict=False)
if src is None:
result['failed'] = True
result['msg'] = "src is required"
return result
elif remote_src:
# everything is remote, so we just execute the module
# without changing any of the module arguments
result.update(self._execute_module(task_vars=task_vars))
return result
try: try:
src = self._find_needle('files', src) if src is None:
except AnsibleError as e: raise AnsibleActionFail("src is required")
result['failed'] = True elif remote_src:
result['msg'] = to_native(e) # everything is remote, so we just execute the module
return result # without changing any of the module arguments
raise AnsibleActionDone(result=self._execute_module(task_vars=task_vars))
# create the remote tmp dir if needed, and put the source file there try:
if tmp is None or "-tmp-" not in tmp: src = self._find_needle('files', src)
tmp = self._make_tmp_path() except AnsibleError as e:
raise AnsibleActionFail(to_native(e))
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(src)) tmp_src = self._connection._shell.join_path(self._connection._shell.tempdir, os.path.basename(src))
self._transfer_file(src, tmp_src) self._transfer_file(src, tmp_src)
self._fixup_perms2((tmp_src,))
self._fixup_perms2((tmp, tmp_src)) new_module_args = self._task.args.copy()
new_module_args.update(
new_module_args = self._task.args.copy() dict(
new_module_args.update( src=tmp_src,
dict( )
src=tmp_src,
) )
)
result.update(self._execute_module('patch', module_args=new_module_args, task_vars=task_vars)) result.update(self._execute_module('patch', module_args=new_module_args, task_vars=task_vars))
self._remove_tmp_path(tmp) except AnsibleAction as e:
result.update(e.result)
finally:
self._remove_tmp_path(self._connection._shell.tempdir)
return result return result

View file

@ -21,12 +21,13 @@ import os
import re import re
import shlex import shlex
from ansible.errors import AnsibleError from ansible.errors import AnsibleError, AnsibleAction, AnsibleActionDone, AnsibleActionFail, AnsibleActionSkip
from ansible.module_utils._text import to_native, to_text from ansible.module_utils._text import to_native, to_text
from ansible.plugins.action import ActionBase from ansible.plugins.action import ActionBase
class ActionModule(ActionBase): class ActionModule(ActionBase):
TRANSFERS_FILES = True TRANSFERS_FILES = True
# On Windows platform, absolute paths begin with a (back)slash # On Windows platform, absolute paths begin with a (back)slash
@ -40,95 +41,91 @@ class ActionModule(ActionBase):
result = super(ActionModule, self).run(tmp, task_vars) result = super(ActionModule, self).run(tmp, task_vars)
if not tmp:
tmp = self._make_tmp_path()
creates = self._task.args.get('creates')
if creates:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of command executions.
if self._remote_file_exists(creates):
self._remove_tmp_path(tmp)
return dict(skipped=True, msg=("skipped, since %s exists" % creates))
removes = self._task.args.get('removes')
if removes:
# do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of command executions.
if not self._remote_file_exists(removes):
self._remove_tmp_path(tmp)
return dict(skipped=True, msg=("skipped, since %s does not exist" % removes))
# The chdir must be absolute, because a relative path would rely on
# remote node behaviour & user config.
chdir = self._task.args.get('chdir')
if chdir:
# Powershell is the only Windows-path aware shell
if self._connection._shell.SHELL_FAMILY == 'powershell' and \
not self.windows_absolute_path_detection.matches(chdir):
return dict(failed=True, msg='chdir %s must be an absolute path for a Windows remote node' % chdir)
# Every other shell is unix-path-aware.
if self._connection._shell.SHELL_FAMILY != 'powershell' and not chdir.startswith('/'):
return dict(failed=True, msg='chdir %s must be an absolute path for a Unix-aware remote node' % chdir)
# Split out the script as the first item in raw_params using
# shlex.split() in order to support paths and files with spaces in the name.
# Any arguments passed to the script will be added back later.
raw_params = to_native(self._task.args.get('_raw_params', ''), errors='surrogate_or_strict')
parts = [to_text(s, errors='surrogate_or_strict') for s in shlex.split(raw_params.strip())]
source = parts[0]
try: try:
source = self._loader.get_real_file(self._find_needle('files', source), decrypt=self._task.args.get('decrypt', True)) creates = self._task.args.get('creates')
except AnsibleError as e: if creates:
return dict(failed=True, msg=to_native(e)) # do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of command executions.
if self._remote_file_exists(creates):
raise AnsibleActionSkip("%s exists, matching creates option" % creates)
if not self._play_context.check_mode: removes = self._task.args.get('removes')
# transfer the file to a remote tmp location if removes:
tmp_src = self._connection._shell.join_path(tmp, os.path.basename(source)) # do not run the command if the line contains removes=filename
# and the filename does not exist. This allows idempotence
# of command executions.
if not self._remote_file_exists(removes):
raise AnsibleActionSkip("%s does not exist, matching removes option" % removes)
# Convert raw_params to text for the purpose of replacing the script since # The chdir must be absolute, because a relative path would rely on
# parts and tmp_src are both unicode strings and raw_params will be different # remote node behaviour & user config.
# depending on Python version. chdir = self._task.args.get('chdir')
# if chdir:
# Once everything is encoded consistently, replace the script path on the remote # Powershell is the only Windows-path aware shell
# system with the remainder of the raw_params. This preserves quoting in parameters if self._connection._shell.SHELL_FAMILY == 'powershell' and \
# that would have been removed by shlex.split(). not self.windows_absolute_path_detection.matches(chdir):
target_command = to_text(raw_params).strip().replace(parts[0], tmp_src) raise AnsibleActionFail('chdir %s must be an absolute path for a Windows remote node' % chdir)
# Every other shell is unix-path-aware.
if self._connection._shell.SHELL_FAMILY != 'powershell' and not chdir.startswith('/'):
raise AnsibleActionFail('chdir %s must be an absolute path for a Unix-aware remote node' % chdir)
self._transfer_file(source, tmp_src) # Split out the script as the first item in raw_params using
# shlex.split() in order to support paths and files with spaces in the name.
# Any arguments passed to the script will be added back later.
raw_params = to_native(self._task.args.get('_raw_params', ''), errors='surrogate_or_strict')
parts = [to_text(s, errors='surrogate_or_strict') for s in shlex.split(raw_params.strip())]
source = parts[0]
# set file permissions, more permissive when the copy is done as a different user try:
self._fixup_perms2((tmp, tmp_src), execute=True) source = self._loader.get_real_file(self._find_needle('files', source), decrypt=self._task.args.get('decrypt', True))
except AnsibleError as e:
raise AnsibleActionFail(to_native(e))
# add preparation steps to one ssh roundtrip executing the script # now we execute script, always assume changed.
env_dict = dict()
env_string = self._compute_environment_string(env_dict)
script_cmd = ' '.join([env_string, target_command])
if self._play_context.check_mode:
result['changed'] = True result['changed'] = True
self._remove_tmp_path(tmp)
return result
script_cmd = self._connection._shell.wrap_for_exec(script_cmd) if not self._play_context.check_mode:
# transfer the file to a remote tmp location
tmp_src = self._connection._shell.join_path(self._connection._shell.tempdir, os.path.basename(source))
exec_data = None # Convert raw_params to text for the purpose of replacing the script since
# HACK: come up with a sane way to pass around env outside the command # parts and tmp_src are both unicode strings and raw_params will be different
if self._connection.transport == "winrm": # depending on Python version.
exec_data = self._connection._create_raw_wrapper_payload(script_cmd, env_dict) #
# Once everything is encoded consistently, replace the script path on the remote
# system with the remainder of the raw_params. This preserves quoting in parameters
# that would have been removed by shlex.split().
target_command = to_text(raw_params).strip().replace(parts[0], tmp_src)
result.update(self._low_level_execute_command(cmd=script_cmd, in_data=exec_data, sudoable=True, chdir=chdir)) self._transfer_file(source, tmp_src)
# clean up after # set file permissions, more permissive when the copy is done as a different user
self._remove_tmp_path(tmp) self._fixup_perms2((tmp_src,), execute=True)
result['changed'] = True # add preparation steps to one ssh roundtrip executing the script
env_dict = dict()
env_string = self._compute_environment_string(env_dict)
script_cmd = ' '.join([env_string, target_command])
if 'rc' in result and result['rc'] != 0: if self._play_context.check_mode:
result['failed'] = True raise AnsibleActionDone()
result['msg'] = 'non-zero return code'
script_cmd = self._connection._shell.wrap_for_exec(script_cmd)
exec_data = None
# HACK: come up with a sane way to pass around env outside the command
if self._connection.transport == "winrm":
exec_data = self._connection._create_raw_wrapper_payload(script_cmd, env_dict)
result.update(self._low_level_execute_command(cmd=script_cmd, in_data=exec_data, sudoable=True, chdir=chdir))
if 'rc' in result and result['rc'] != 0:
raise AnsibleActionFail('non-zero return code')
except AnsibleAction as e:
result.update(e.result)
finally:
self._remove_tmp_path(self._connection._shell.tempdir)
return result return result

View file

@ -18,6 +18,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
from ansible.errors import AnsibleAction, AnsibleActionFail
from ansible.plugins.action import ActionBase from ansible.plugins.action import ActionBase
@ -48,35 +49,41 @@ class ActionModule(ActionBase):
except: except:
pass # could not get it from template! pass # could not get it from template!
if module == 'auto': try:
facts = self._execute_module(module_name='setup', module_args=dict(gather_subset='!all', filter='ansible_service_mgr'), task_vars=task_vars) if module == 'auto':
self._display.debug("Facts %s" % facts) facts = self._execute_module(module_name='setup', module_args=dict(gather_subset='!all', filter='ansible_service_mgr'), task_vars=task_vars)
module = facts.get('ansible_facts', {}).get('ansible_service_mgr', 'auto') self._display.debug("Facts %s" % facts)
module = facts.get('ansible_facts', {}).get('ansible_service_mgr', 'auto')
if not module or module == 'auto' or module not in self._shared_loader_obj.module_loader: if not module or module == 'auto' or module not in self._shared_loader_obj.module_loader:
module = 'service' module = 'service'
if module != 'auto': if module != 'auto':
# run the 'service' module # run the 'service' module
new_module_args = self._task.args.copy() new_module_args = self._task.args.copy()
if 'use' in new_module_args: if 'use' in new_module_args:
del new_module_args['use'] del new_module_args['use']
# for backwards compatibility # for backwards compatibility
if 'state' in new_module_args and new_module_args['state'] == 'running': if 'state' in new_module_args and new_module_args['state'] == 'running':
self._display.deprecated(msg="state=running is deprecated. Please use state=started", version="2.7") self._display.deprecated(msg="state=running is deprecated. Please use state=started", version="2.7")
new_module_args['state'] = 'started' new_module_args['state'] = 'started'
if module in self.UNUSED_PARAMS: if module in self.UNUSED_PARAMS:
for unused in self.UNUSED_PARAMS[module]: for unused in self.UNUSED_PARAMS[module]:
if unused in new_module_args: if unused in new_module_args:
del new_module_args[unused] del new_module_args[unused]
self._display.warning('Ignoring "%s" as it is not used in "%s"' % (unused, module)) self._display.warning('Ignoring "%s" as it is not used in "%s"' % (unused, module))
self._display.vvvv("Running %s" % module) self._display.vvvv("Running %s" % module)
result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async_val)) result.update(self._execute_module(module_name=module, module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async_val))
else: else:
result['failed'] = True raise AnsibleActionFail('Could not detect which service manager to use. Try gathering facts or setting the "use" option.')
result['msg'] = 'Could not detect which service manager to use. Try gathering facts or setting the "use" option.'
except AnsibleAction as e:
result.update(e.result)
finally:
if not self._task.async_val:
self._remove_tmp_path(self._connection._shell.tempdir)
return result return result

View file

@ -22,4 +22,9 @@ class ActionModule(ActionBase):
loader=self._loader, loader=self._loader,
templar=self._templar, templar=self._templar,
shared_loader_obj=self._shared_loader_obj) shared_loader_obj=self._shared_loader_obj)
return command_action.run(task_vars=task_vars) result = command_action.run(task_vars=task_vars)
# remove a temporary path we created
self._remove_tmp_path(self._connection._shell.tempdir)
return result

View file

@ -21,13 +21,11 @@ import os
import shutil import shutil
import tempfile import tempfile
from ansible import constants as C from ansible.errors import AnsibleError, AnsibleFileNotFound, AnsibleAction, AnsibleActionFail
from ansible.errors import AnsibleError, AnsibleFileNotFound
from ansible.module_utils._text import to_bytes, to_text from ansible.module_utils._text import to_bytes, to_text
from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.parsing.convert_bool import boolean
from ansible.plugins.action import ActionBase from ansible.plugins.action import ActionBase
from ansible.template import generate_ansible_template_vars from ansible.template import generate_ansible_template_vars
from ansible.utils.hashing import checksum_s
class ActionModule(ActionBase): class ActionModule(ActionBase):
@ -35,20 +33,6 @@ class ActionModule(ActionBase):
TRANSFERS_FILES = True TRANSFERS_FILES = True
DEFAULT_NEWLINE_SEQUENCE = "\n" DEFAULT_NEWLINE_SEQUENCE = "\n"
def get_checksum(self, dest, all_vars, try_directory=False, source=None, tmp=None):
try:
dest_stat = self._execute_remote_stat(dest, all_vars=all_vars, follow=False, tmp=tmp)
if dest_stat['exists'] and dest_stat['isdir'] and try_directory and source:
base = os.path.basename(source)
dest = os.path.join(dest, base)
dest_stat = self._execute_remote_stat(dest, all_vars=all_vars, follow=False, tmp=tmp)
except AnsibleError as e:
return dict(failed=True, msg=to_text(e))
return dest_stat['checksum']
def run(self, tmp=None, task_vars=None): def run(self, tmp=None, task_vars=None):
''' handler for template operations ''' ''' handler for template operations '''
@ -76,108 +60,103 @@ class ActionModule(ActionBase):
if newline_sequence in wrong_sequences: if newline_sequence in wrong_sequences:
newline_sequence = allowed_sequences[wrong_sequences.index(newline_sequence)] newline_sequence = allowed_sequences[wrong_sequences.index(newline_sequence)]
if state is not None: try:
result['failed'] = True if state is not None:
result['msg'] = "'state' cannot be specified on a template" raise AnsibleActionFail("'state' cannot be specified on a template")
elif source is None or dest is None: elif source is None or dest is None:
result['failed'] = True raise AnsibleActionFail("src and dest are required")
result['msg'] = "src and dest are required" elif newline_sequence not in allowed_sequences:
elif newline_sequence not in allowed_sequences: raise AnsibleActionFail("newline_sequence needs to be one of: \n, \r or \r\n")
result['failed'] = True else:
result['msg'] = "newline_sequence needs to be one of: \n, \r or \r\n" try:
else: source = self._find_needle('templates', source)
except AnsibleError as e:
raise AnsibleActionFail(to_text(e))
# Get vault decrypted tmp file
try: try:
source = self._find_needle('templates', source) tmp_source = self._loader.get_real_file(source)
except AnsibleError as e: except AnsibleFileNotFound as e:
result['failed'] = True raise AnsibleActionFail("could not find src=%s, %s" % (source, to_text(e)))
result['msg'] = to_text(e)
if 'failed' in result: # template the source data locally & get ready to transfer
return result try:
with open(tmp_source, 'r') as f:
template_data = to_text(f.read())
# Get vault decrypted tmp file # set jinja2 internal search path for includes
try: searchpath = task_vars.get('ansible_search_path', [])
tmp_source = self._loader.get_real_file(source) searchpath.extend([self._loader._basedir, os.path.dirname(source)])
except AnsibleFileNotFound as e:
result['failed'] = True
result['msg'] = "could not find src=%s, %s" % (source, e)
self._remove_tmp_path(tmp)
return result
# template the source data locally & get ready to transfer # We want to search into the 'templates' subdir of each search path in
try: # addition to our original search paths.
with open(tmp_source, 'r') as f: newsearchpath = []
template_data = to_text(f.read()) for p in searchpath:
newsearchpath.append(os.path.join(p, 'templates'))
newsearchpath.append(p)
searchpath = newsearchpath
# set jinja2 internal search path for includes self._templar.environment.loader.searchpath = searchpath
searchpath = task_vars.get('ansible_search_path', []) self._templar.environment.newline_sequence = newline_sequence
searchpath.extend([self._loader._basedir, os.path.dirname(source)]) if block_start_string is not None:
self._templar.environment.block_start_string = block_start_string
if block_end_string is not None:
self._templar.environment.block_end_string = block_end_string
if variable_start_string is not None:
self._templar.environment.variable_start_string = variable_start_string
if variable_end_string is not None:
self._templar.environment.variable_end_string = variable_end_string
if trim_blocks is not None:
self._templar.environment.trim_blocks = bool(trim_blocks)
# We want to search into the 'templates' subdir of each search path in # add ansible 'template' vars
# addition to our original search paths. temp_vars = task_vars.copy()
newsearchpath = [] temp_vars.update(generate_ansible_template_vars(source))
for p in searchpath:
newsearchpath.append(os.path.join(p, 'templates'))
newsearchpath.append(p)
searchpath = newsearchpath
self._templar.environment.loader.searchpath = searchpath old_vars = self._templar._available_variables
self._templar.environment.newline_sequence = newline_sequence self._templar.set_available_variables(temp_vars)
if block_start_string is not None: resultant = self._templar.do_template(template_data, preserve_trailing_newlines=True, escape_backslashes=False)
self._templar.environment.block_start_string = block_start_string self._templar.set_available_variables(old_vars)
if block_end_string is not None: except AnsibleAction:
self._templar.environment.block_end_string = block_end_string raise
if variable_start_string is not None: except Exception as e:
self._templar.environment.variable_start_string = variable_start_string raise AnsibleActionFail("%s: %s" % (type(e).__name__, to_text(e)))
if variable_end_string is not None: finally:
self._templar.environment.variable_end_string = variable_end_string self._loader.cleanup_tmp_file(tmp_source)
if trim_blocks is not None:
self._templar.environment.trim_blocks = bool(trim_blocks)
# add ansible 'template' vars new_task = self._task.copy()
temp_vars = task_vars.copy() new_task.args.pop('newline_sequence', None)
temp_vars.update(generate_ansible_template_vars(source)) new_task.args.pop('block_start_string', None)
new_task.args.pop('block_end_string', None)
new_task.args.pop('variable_start_string', None)
new_task.args.pop('variable_end_string', None)
new_task.args.pop('trim_blocks', None)
try:
tempdir = tempfile.mkdtemp()
result_file = os.path.join(tempdir, os.path.basename(source))
with open(result_file, 'wb') as f:
f.write(to_bytes(resultant, errors='surrogate_or_strict'))
old_vars = self._templar._available_variables new_task.args.update(
self._templar.set_available_variables(temp_vars) dict(
resultant = self._templar.do_template(template_data, preserve_trailing_newlines=True, escape_backslashes=False) src=result_file,
self._templar.set_available_variables(old_vars) dest=dest,
except Exception as e: follow=follow,
result['failed'] = True ),
result['msg'] = "%s: %s" % (type(e).__name__, to_text(e)) )
return result copy_action = self._shared_loader_obj.action_loader.get('copy',
task=new_task,
connection=self._connection,
play_context=self._play_context,
loader=self._loader,
templar=self._templar,
shared_loader_obj=self._shared_loader_obj)
result.update(copy_action.run(task_vars=task_vars))
finally:
shutil.rmtree(tempdir)
except AnsibleAction as e:
result.update(e.result)
finally: finally:
self._loader.cleanup_tmp_file(tmp_source) self._remove_tmp_path(self._connection._shell.tempdir)
new_task = self._task.copy()
new_task.args.pop('newline_sequence', None)
new_task.args.pop('block_start_string', None)
new_task.args.pop('block_end_string', None)
new_task.args.pop('variable_start_string', None)
new_task.args.pop('variable_end_string', None)
new_task.args.pop('trim_blocks', None)
try:
tempdir = tempfile.mkdtemp()
result_file = os.path.join(tempdir, os.path.basename(source))
with open(result_file, 'wb') as f:
f.write(to_bytes(resultant, errors='surrogate_or_strict'))
new_task.args.update(
dict(
src=result_file,
dest=dest,
follow=follow,
),
)
copy_action = self._shared_loader_obj.action_loader.get('copy',
task=new_task,
connection=self._connection,
play_context=self._play_context,
loader=self._loader,
templar=self._templar,
shared_loader_obj=self._shared_loader_obj)
result.update(copy_action.run(task_vars=task_vars))
finally:
shutil.rmtree(tempdir)
return result return result

View file

@ -20,7 +20,7 @@ __metaclass__ = type
import os import os
from ansible.errors import AnsibleError from ansible.errors import AnsibleError, AnsibleAction, AnsibleActionFail, AnsibleActionSkip
from ansible.module_utils._text import to_text from ansible.module_utils._text import to_text
from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.parsing.convert_bool import boolean
from ansible.plugins.action import ActionBase from ansible.plugins.action import ActionBase
@ -43,96 +43,81 @@ class ActionModule(ActionBase):
creates = self._task.args.get('creates', None) creates = self._task.args.get('creates', None)
decrypt = self._task.args.get('decrypt', True) decrypt = self._task.args.get('decrypt', True)
# "copy" is deprecated in favor of "remote_src".
if 'copy' in self._task.args:
# They are mutually exclusive.
if 'remote_src' in self._task.args:
result['failed'] = True
result['msg'] = "parameters are mutually exclusive: ('copy', 'remote_src')"
return result
# We will take the information from copy and store it in
# the remote_src var to use later in this file.
self._task.args['remote_src'] = remote_src = not boolean(self._task.args.pop('copy'), strict=False)
if source is None or dest is None:
result['failed'] = True
result['msg'] = "src (or content) and dest are required"
return result
if not tmp:
tmp = self._make_tmp_path()
if creates:
# do not run the command if the line contains creates=filename
# and the filename already exists. This allows idempotence
# of command executions.
creates = self._remote_expand_user(creates)
if self._remote_file_exists(creates):
result['skipped'] = True
result['msg'] = "skipped, since %s exists" % creates
self._remove_tmp_path(tmp)
return result
dest = self._remote_expand_user(dest) # CCTODO: Fix path for Windows hosts.
source = os.path.expanduser(source)
if not remote_src:
try:
source = self._loader.get_real_file(self._find_needle('files', source), decrypt=decrypt)
except AnsibleError as e:
result['failed'] = True
result['msg'] = to_text(e)
self._remove_tmp_path(tmp)
return result
try: try:
remote_stat = self._execute_remote_stat(dest, all_vars=task_vars, follow=True) # "copy" is deprecated in favor of "remote_src".
except AnsibleError as e: if 'copy' in self._task.args:
result['failed'] = True # They are mutually exclusive.
result['msg'] = to_text(e) if 'remote_src' in self._task.args:
self._remove_tmp_path(tmp) raise AnsibleActionFail("parameters are mutually exclusive: ('copy', 'remote_src')")
return result # We will take the information from copy and store it in
# the remote_src var to use later in this file.
self._task.args['remote_src'] = remote_src = not boolean(self._task.args.pop('copy'), strict=False)
if not remote_stat['exists'] or not remote_stat['isdir']: if source is None or dest is None:
result['failed'] = True raise AnsibleActionFail("src (or content) and dest are required")
result['msg'] = "dest '%s' must be an existing dir" % dest
self._remove_tmp_path(tmp)
return result
if not remote_src: if creates:
# transfer the file to a remote tmp location # do not run the command if the line contains creates=filename
tmp_src = self._connection._shell.join_path(tmp, 'source') # and the filename already exists. This allows idempotence
self._transfer_file(source, tmp_src) # of command executions.
creates = self._remote_expand_user(creates)
if self._remote_file_exists(creates):
raise AnsibleActionSkip("skipped, since %s exists" % creates)
# handle diff mode client side dest = self._remote_expand_user(dest) # CCTODO: Fix path for Windows hosts.
# handle check mode client side source = os.path.expanduser(source)
if not remote_src: if not remote_src:
# fix file permissions when the copy is done as a different user try:
self._fixup_perms2((tmp, tmp_src)) source = self._loader.get_real_file(self._find_needle('files', source), decrypt=decrypt)
# Build temporary module_args. except AnsibleError as e:
new_module_args = self._task.args.copy() raise AnsibleActionFail(to_text(e))
new_module_args.update(
dict(
src=tmp_src,
original_basename=os.path.basename(source),
),
)
else: try:
new_module_args = self._task.args.copy() remote_stat = self._execute_remote_stat(dest, all_vars=task_vars, follow=True)
new_module_args.update( except AnsibleError as e:
dict( raise AnsibleActionFail(to_text(e))
original_basename=os.path.basename(source),
),
)
# remove action plugin only key if not remote_stat['exists'] or not remote_stat['isdir']:
for key in ('decrypt',): raise AnsibleActionFail("dest '%s' must be an existing dir" % dest)
if key in new_module_args:
del new_module_args[key]
# execute the unarchive module now, with the updated args if not remote_src:
result.update(self._execute_module(module_args=new_module_args, task_vars=task_vars)) # transfer the file to a remote tmp location
self._remove_tmp_path(tmp) tmp_src = self._connection._shell.join_path(self._connection._shell.tempdir, 'source')
self._transfer_file(source, tmp_src)
# handle diff mode client side
# handle check mode client side
if not remote_src:
# fix file permissions when the copy is done as a different user
self._fixup_perms2((self._connection._shell.tempdir, tmp_src))
# Build temporary module_args.
new_module_args = self._task.args.copy()
new_module_args.update(
dict(
src=tmp_src,
original_basename=os.path.basename(source),
),
)
else:
new_module_args = self._task.args.copy()
new_module_args.update(
dict(
original_basename=os.path.basename(source),
),
)
# remove action plugin only key
for key in ('decrypt',):
if key in new_module_args:
del new_module_args[key]
# execute the unarchive module now, with the updated args
result.update(self._execute_module(module_args=new_module_args, task_vars=task_vars))
except AnsibleAction as e:
result.update(e.result)
finally:
self._remove_tmp_path(self._connection._shell.tempdir)
return result return result

View file

@ -15,6 +15,7 @@ import tempfile
import traceback import traceback
import zipfile import zipfile
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleFileNotFound from ansible.errors import AnsibleError, AnsibleFileNotFound
from ansible.module_utils._text import to_bytes, to_native, to_text from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.module_utils.parsing.convert_bool import boolean from ansible.module_utils.parsing.convert_bool import boolean
@ -218,7 +219,7 @@ class ActionModule(ActionBase):
def _create_content_tempfile(self, content): def _create_content_tempfile(self, content):
''' Create a tempfile containing defined content ''' ''' Create a tempfile containing defined content '''
fd, content_tempfile = tempfile.mkstemp() fd, content_tempfile = tempfile.mkstemp(dir=C.DEFAULT_LOCAL_TMP)
f = os.fdopen(fd, 'wb') f = os.fdopen(fd, 'wb')
content = to_bytes(content) content = to_bytes(content)
try: try:

View file

@ -60,7 +60,7 @@ class ConnectionBase(AnsiblePlugin):
supports_persistence = False supports_persistence = False
force_persistence = False force_persistence = False
def __init__(self, play_context, new_stdin, *args, **kwargs): def __init__(self, play_context, new_stdin, shell=None, *args, **kwargs):
super(ConnectionBase, self).__init__() super(ConnectionBase, self).__init__()
@ -78,9 +78,11 @@ class ConnectionBase(AnsiblePlugin):
self.success_key = None self.success_key = None
self.prompt = None self.prompt = None
self._connected = False self._connected = False
self._socket_path = None self._socket_path = None
if shell is not None:
self._shell = shell
# load the shell plugin for this action/connection # load the shell plugin for this action/connection
if play_context.shell: if play_context.shell:
shell_type = play_context.shell shell_type = play_context.shell

View file

@ -19,7 +19,7 @@ from collections import defaultdict
from ansible import constants as C from ansible import constants as C
from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE from ansible.plugins import get_plugin_class, MODULE_CACHE, PATH_CACHE, PLUGIN_PATH_CACHE
from ansible.module_utils._text import to_text from ansible.module_utils._text import to_text
from ansible.parsing.plugin_docs import read_docstring from ansible.utils.plugin_docs import get_docstring
try: try:
from __main__ import display from __main__ import display
@ -209,14 +209,14 @@ class PluginLoader:
if self.class_name: if self.class_name:
type_name = get_plugin_class(self.class_name) type_name = get_plugin_class(self.class_name)
# FIXME: expand from just connection and callback # FIXME: expand to other plugins, but never doc fragments
if type_name in ('callback', 'connection', 'inventory', 'lookup'): # if type name != 'module_doc_fragment':
dstring = read_docstring(path, verbose=False, ignore_errors=False) if type_name in ('callback', 'connection', 'inventory', 'lookup', 'shell'):
dstring = get_docstring(path, fragment_loader, verbose=False, ignore_errors=True)[0]
if dstring.get('doc', False): if 'options' in dstring and isinstance(dstring['options'], dict):
if 'options' in dstring['doc'] and isinstance(dstring['doc']['options'], dict): C.config.initialize_plugin_configuration_definitions(type_name, name, dstring['options'])
C.config.initialize_plugin_configuration_definitions(type_name, name, dstring['doc']['options']) display.debug('Loaded config def from plugin (%s/%s)' % (type_name, name))
display.debug('Loaded config def from plugin (%s/%s)' % (type_name, name))
def add_directory(self, directory, with_subdir=False): def add_directory(self, directory, with_subdir=False):
''' Adds an additional directory to the search path ''' ''' Adds an additional directory to the search path '''
@ -462,6 +462,14 @@ class PluginLoader:
self._update_object(obj, name, path) self._update_object(obj, name, path)
yield obj yield obj
# doc fragments first
fragment_loader = PluginLoader(
'ModuleDocFragment',
'ansible.utils.module_docs_fragments',
os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
'',
)
action_loader = PluginLoader( action_loader = PluginLoader(
'ActionModule', 'ActionModule',
'ansible.plugins.action', 'ansible.plugins.action',
@ -545,13 +553,6 @@ test_loader = PluginLoader(
'test_plugins' 'test_plugins'
) )
fragment_loader = PluginLoader(
'ModuleDocFragment',
'ansible.utils.module_docs_fragments',
os.path.join(os.path.dirname(__file__), 'module_docs_fragments'),
'',
)
strategy_loader = PluginLoader( strategy_loader = PluginLoader(
'StrategyModule', 'StrategyModule',
'ansible.plugins.strategy', 'ansible.plugins.strategy',

View file

@ -18,10 +18,10 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
import os import os
import re import os.path
import ansible.constants as C
import time
import random import random
import re
import time
from ansible.module_utils.six import text_type from ansible.module_utils.six import text_type
from ansible.module_utils.six.moves import shlex_quote from ansible.module_utils.six.moves import shlex_quote
@ -31,26 +31,32 @@ _USER_HOME_PATH_RE = re.compile(r'^~[_.A-Za-z0-9][-_.A-Za-z0-9]*$')
class ShellBase(AnsiblePlugin): class ShellBase(AnsiblePlugin):
def __init__(self): def __init__(self):
super(ShellBase, self).__init__() super(ShellBase, self).__init__()
self.env = dict() self.env = {}
if C.DEFAULT_MODULE_SET_LOCALE: self.tempdir = None
module_locale = C.DEFAULT_MODULE_LANG or os.getenv('LANG', 'en_US.UTF-8')
def set_options(self, task_keys=None, var_options=None, direct=None):
super(ShellBase, self).set_options(task_keys=task_keys, var_options=var_options, direct=direct)
# not all shell modules have this option
if self.get_option('set_module_language'):
self.env.update( self.env.update(
dict( dict(
LANG=module_locale, LANG=self.get_option('module_language'),
LC_ALL=module_locale, LC_ALL=self.get_option('module_language'),
LC_MESSAGES=module_locale, LC_MESSAGES=self.get_option('module_language'),
) )
) )
# set env
self.env.update(self.get_option('environment'))
def env_prefix(self, **kwargs): def env_prefix(self, **kwargs):
env = self.env.copy() return ' '.join(['%s=%s' % (k, shlex_quote(text_type(v))) for k, v in kwargs.items()])
env.update(kwargs)
return ' '.join(['%s=%s' % (k, shlex_quote(text_type(v))) for k, v in env.items()])
def join_path(self, *args): def join_path(self, *args):
return os.path.join(*args) return os.path.join(*args)
@ -96,32 +102,27 @@ class ShellBase(AnsiblePlugin):
cmd = ['test', '-e', shlex_quote(path)] cmd = ['test', '-e', shlex_quote(path)]
return ' '.join(cmd) return ' '.join(cmd)
def mkdtemp(self, basefile=None, system=False, mode=None, tmpdir=None): def mkdtemp(self, basefile=None, system=False, mode=0o700, tmpdir=None):
if not basefile: if not basefile:
basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48)) basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48))
# When system is specified we have to create this in a directory where # When system is specified we have to create this in a directory where
# other users can read and access the temp directory. This is because # other users can read and access the temp directory.
# we use system to create tmp dirs for unprivileged users who are # This is because we use system to create tmp dirs for unprivileged users who are
# sudo'ing to a second unprivileged user. The only dirctories where # sudo'ing to a second unprivileged user.
# that is standard are the tmp dirs, /tmp and /var/tmp. So we only # The 'system_temps' setting defines dirctories we can use for this purpose
# allow one of those two locations if system=True. However, users # the default are, /tmp and /var/tmp.
# might want to have some say over which of /tmp or /var/tmp is used # So we only allow one of those locations if system=True, using the
# (because /tmp may be a tmpfs and want to conserve RAM or persist the # passed in tmpdir if it is valid or the first one from the setting if not.
# tmp files beyond a reboot. So we check if the user set REMOTE_TMP
# to somewhere in or below /var/tmp and if so use /var/tmp. If
# anything else we use /tmp (because /tmp is specified by POSIX nad
# /var/tmp is not).
if system: if system:
# FIXME: create 'system tmp dirs' config/var and check tmpdir is in those values to allow for /opt/tmp, etc if tmpdir.startswith(tuple(self.get_option('system_temps'))):
if tmpdir.startswith('/var/tmp'): basetmpdir = tmpdir
basetmpdir = '/var/tmp'
else: else:
basetmpdir = '/tmp' basetmpdir = self.get_option('system_temps')[0]
else: else:
if tmpdir is None: if tmpdir is None:
basetmpdir = C.DEFAULT_REMOTE_TMP basetmpdir = self.get_option('remote_temp')
else: else:
basetmpdir = tmpdir basetmpdir = tmpdir
@ -138,13 +139,15 @@ class ShellBase(AnsiblePlugin):
return cmd return cmd
def expand_user(self, user_home_path): def expand_user(self, user_home_path, username=''):
''' Return a command to expand tildes in a path ''' Return a command to expand tildes in a path
It can be either "~" or "~username". We use the POSIX definition of It can be either "~" or "~username". We just ignore $HOME
a username: We use the POSIX definition of a username:
http://pubs.opengroup.org/onlinepubs/000095399/basedefs/xbd_chap03.html#tag_03_426 http://pubs.opengroup.org/onlinepubs/000095399/basedefs/xbd_chap03.html#tag_03_426
http://pubs.opengroup.org/onlinepubs/000095399/basedefs/xbd_chap03.html#tag_03_276 http://pubs.opengroup.org/onlinepubs/000095399/basedefs/xbd_chap03.html#tag_03_276
Falls back to 'current workind directory' as we assume 'home is where the remote user ends up'
''' '''
# Check that the user_path to expand is safe # Check that the user_path to expand is safe
@ -152,9 +155,17 @@ class ShellBase(AnsiblePlugin):
if not _USER_HOME_PATH_RE.match(user_home_path): if not _USER_HOME_PATH_RE.match(user_home_path):
# shlex_quote will make the shell return the string verbatim # shlex_quote will make the shell return the string verbatim
user_home_path = shlex_quote(user_home_path) user_home_path = shlex_quote(user_home_path)
elif username:
# if present the user name is appended to resolve "that user's home"
user_home_path += username
return 'echo %s' % user_home_path return 'echo %s' % user_home_path
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None): def pwd(self):
"""Return the working directory after connecting"""
return 'echo %spwd%s' % (self._SHELL_SUB_LEFT, self._SHELL_SUB_RIGHT)
def build_module_command(self, env_string, shebang, cmd, arg_path=None):
# don't quote the cmd if it's an empty string, because this will break pipelining mode # don't quote the cmd if it's an empty string, because this will break pipelining mode
if cmd.strip() != '': if cmd.strip() != '':
cmd = shlex_quote(cmd) cmd = shlex_quote(cmd)
@ -168,8 +179,6 @@ class ShellBase(AnsiblePlugin):
if arg_path is not None: if arg_path is not None:
cmd_parts.append(arg_path) cmd_parts.append(arg_path)
new_cmd = " ".join(cmd_parts) new_cmd = " ".join(cmd_parts)
if rm_tmp:
new_cmd = '%s; rm -rf "%s" %s' % (new_cmd, rm_tmp, self._SHELL_REDIRECT_ALLNULL)
return new_cmd return new_cmd
def append_command(self, cmd, cmd_to_append): def append_command(self, cmd, cmd_to_append):

View file

@ -1,24 +1,22 @@
# (c) 2014, Chris Church <chris@ninemoreminutes.com> # Copyright (c) 2014, Chris Church <chris@ninemoreminutes.com>
# # Copyright (c) 2017 Ansible Project
# This file is part of Ansible. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
from ansible.plugins.shell import ShellBase from ansible.plugins.shell import ShellBase
DOCUMENTATION = '''
name: csh
plugin_type: shell
version_added: ""
short_description: C shell (/bin/csh)
description:
- When you have no other option than to use csh
extends_documentation_fragment:
- shell_common
'''
class ShellModule(ShellBase): class ShellModule(ShellBase):

View file

@ -1,19 +1,6 @@
# (c) 2014, Chris Church <chris@ninemoreminutes.com> # Copyright (c) 2014, Chris Church <chris@ninemoreminutes.com>
# # Copyright (c) 2017 Ansible Project
# This file is part of Ansible. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
@ -21,6 +8,17 @@ from ansible.module_utils.six import text_type
from ansible.module_utils.six.moves import shlex_quote from ansible.module_utils.six.moves import shlex_quote
from ansible.plugins.shell.sh import ShellModule as ShModule from ansible.plugins.shell.sh import ShellModule as ShModule
DOCUMENTATION = '''
name: fish
plugin_type: shell
version_added: ""
short_description: fish shell (/bin/fish)
description:
- This is here because some people are restricted to fish.
extends_documentation_fragment:
- shell_common
'''
class ShellModule(ShModule): class ShellModule(ShModule):
@ -43,7 +41,7 @@ class ShellModule(ShModule):
env.update(kwargs) env.update(kwargs)
return ' '.join(['set -lx %s %s;' % (k, shlex_quote(text_type(v))) for k, v in env.items()]) return ' '.join(['set -lx %s %s;' % (k, shlex_quote(text_type(v))) for k, v in env.items()])
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None): def build_module_command(self, env_string, shebang, cmd, arg_path=None):
# don't quote the cmd if it's an empty string, because this will break pipelining mode # don't quote the cmd if it's an empty string, because this will break pipelining mode
if cmd.strip() != '': if cmd.strip() != '':
cmd = shlex_quote(cmd) cmd = shlex_quote(cmd)
@ -51,8 +49,6 @@ class ShellModule(ShModule):
if arg_path is not None: if arg_path is not None:
cmd_parts.append(arg_path) cmd_parts.append(arg_path)
new_cmd = " ".join(cmd_parts) new_cmd = " ".join(cmd_parts)
if rm_tmp:
new_cmd = 'begin ; %s; rm -rf "%s" %s ; end' % (new_cmd, rm_tmp, self._SHELL_REDIRECT_ALLNULL)
return new_cmd return new_cmd
def checksum(self, path, python_interp): def checksum(self, path, python_interp):

View file

@ -1,22 +1,18 @@
# (c) 2014, Chris Church <chris@ninemoreminutes.com> # Copyright (c) 2014, Chris Church <chris@ninemoreminutes.com>
# # Copyright (c) 2017 Ansible Project
# This file is part of Ansible. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
DOCUMENTATION = '''
name: powershell
plugin_type: shell
version_added: ""
short_description: Windows Powershell
description:
- The only option whne using 'winrm' as a connection plugin
'''
import base64 import base64
import os import os
import re import re
@ -1693,8 +1689,10 @@ Function Run($payload) {
''' # end async_watchdog ''' # end async_watchdog
from ansible.plugins import AnsiblePlugin
class ShellModule(object):
class ShellModule(AnsiblePlugin):
# Common shell filenames that this plugin handles # Common shell filenames that this plugin handles
# Powershell is handled differently. It's selected when winrm is the # Powershell is handled differently. It's selected when winrm is the
@ -1773,7 +1771,7 @@ class ShellModule(object):
# FIXME: Support system temp path and passed in tmpdir! # FIXME: Support system temp path and passed in tmpdir!
return self._encode_script('''(New-Item -Type Directory -Path $env:temp -Name "%s").FullName | Write-Host -Separator '';''' % basefile) return self._encode_script('''(New-Item -Type Directory -Path $env:temp -Name "%s").FullName | Write-Host -Separator '';''' % basefile)
def expand_user(self, user_home_path): def expand_user(self, user_home_path, username=''):
# PowerShell only supports "~" (not "~username"). Resolve-Path ~ does # PowerShell only supports "~" (not "~username"). Resolve-Path ~ does
# not seem to work remotely, though by default we are always starting # not seem to work remotely, though by default we are always starting
# in the user's home directory. # in the user's home directory.
@ -1823,7 +1821,7 @@ class ShellModule(object):
''' % dict(path=path) ''' % dict(path=path)
return self._encode_script(script) return self._encode_script(script)
def build_module_command(self, env_string, shebang, cmd, arg_path=None, rm_tmp=None): def build_module_command(self, env_string, shebang, cmd, arg_path=None):
# pipelining bypass # pipelining bypass
if cmd == '': if cmd == '':
return '-' return '-'
@ -1878,10 +1876,6 @@ class ShellModule(object):
Exit 1 Exit 1
} }
''' % (env_string, ' '.join(cmd_parts)) ''' % (env_string, ' '.join(cmd_parts))
if rm_tmp:
rm_tmp = self._escape(self._unquote(rm_tmp))
rm_cmd = 'Remove-Item "%s" -Force -Recurse -ErrorAction SilentlyContinue' % rm_tmp
script = '%s\nFinally { %s }' % (script, rm_cmd)
return self._encode_script(script, preserve_rc=False) return self._encode_script(script, preserve_rc=False)
def wrap_for_exec(self, cmd): def wrap_for_exec(self, cmd):

View file

@ -1,22 +1,19 @@
# (c) 2014, Chris Church <chris@ninemoreminutes.com> # Copyright (c) 2014, Chris Church <chris@ninemoreminutes.com>
# # Copyright (c) 2017 Ansible Project
# This file is part of Ansible. # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
DOCUMENTATION = '''
name: sh
plugin_type: shell
short_description: "POSIX shell (/bin/sh)"
version_added: historical
description:
- This shell plugin is the one you want to use on most Unix systems, it is the most compatible and widely installed shell.
extends_documentation_fragment:
- shell_common
'''
from ansible.module_utils.six.moves import shlex_quote from ansible.module_utils.six.moves import shlex_quote
from ansible.plugins.shell import ShellBase from ansible.plugins.shell import ShellBase
@ -26,6 +23,8 @@ class ShellModule(ShellBase):
# Common shell filenames that this plugin handles. # Common shell filenames that this plugin handles.
# Note: sh is the default shell plugin so this plugin may also be selected # Note: sh is the default shell plugin so this plugin may also be selected
# This code needs to be SH-compliant. BASH-isms will not work if /bin/sh points to a non-BASH shell.
# if the filename is not listed in any Shell plugin. # if the filename is not listed in any Shell plugin.
COMPATIBLE_SHELLS = frozenset(('sh', 'zsh', 'bash', 'dash', 'ksh')) COMPATIBLE_SHELLS = frozenset(('sh', 'zsh', 'bash', 'dash', 'ksh'))
# Family of shells this has. Must match the filename without extension # Family of shells this has. Must match the filename without extension
@ -42,22 +41,16 @@ class ShellModule(ShellBase):
_SHELL_GROUP_RIGHT = ')' _SHELL_GROUP_RIGHT = ')'
def checksum(self, path, python_interp): def checksum(self, path, python_interp):
# The following test needs to be SH-compliant. BASH-isms will
# not work if /bin/sh points to a non-BASH shell.
#
# In the following test, each condition is a check and logical # In the following test, each condition is a check and logical
# comparison (|| or &&) that sets the rc value. Every check is run so # comparison (|| or &&) that sets the rc value. Every check is run so
# the last check in the series to fail will be the rc that is # the last check in the series to fail will be the rc that is returned.
# returned.
# #
# If a check fails we error before invoking the hash functions because # If a check fails we error before invoking the hash functions because
# hash functions may successfully take the hash of a directory on BSDs # hash functions may successfully take the hash of a directory on BSDs
# (UFS filesystem?) which is not what the rest of the ansible code # (UFS filesystem?) which is not what the rest of the ansible code expects
# expects
# #
# If all of the available hashing methods fail we fail with an rc of # If all of the available hashing methods fail we fail with an rc of 0.
# 0. This logic is added to the end of the cmd at the bottom of this # This logic is added to the end of the cmd at the bottom of this function.
# function.
# Return codes: # Return codes:
# checksum: success! # checksum: success!

View file

@ -1,20 +1,5 @@
# # Copyright (c) 2017 Ansible Project
# (c) 2016, Sumit Kumar <sumit4@netapp.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object): class ModuleDocFragment(object):

View file

@ -0,0 +1,92 @@
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
# common shelldocumentation fragment
DOCUMENTATION = """
options:
remote_temp:
description:
- Temporary directory to use on targets when executing tasks.
default: '~/.ansible/tmp'
env: [{name: ANSIBLE_REMOTE_TEMP}]
ini:
- section: defaults
key: remote_tmp
vars:
- name: ansible_remote_tmp
system_temps:
description:
- List of valid system temporary directories for Ansible to choose when it cannot use ``remote_temp``, normally due to permission issues.
default: [ /var/tmp, /tmp ]
type: list
env: [{name: ANSIBLE_SYSTEM_TMPS}]
ini:
- section: defaults
key: system_tmps
vars:
- name: ansible_system_tmps
async_dir:
description:
- Directory in which ansible will keep async job inforamtion
default: '~/.ansible_async'
env: [{name: ANSIBLE_ASYNC_DIR}]
ini:
- section: defaults
key: async_dir
vars:
- name: ansible_async_dir
set_module_language:
default: False
description: Controls if we set locale for modules when executing on the target.
env:
- name: ANSIBLE_MODULE_SET_LOCALE
ini:
- section: defaults
key: module_set_locale
type: boolean
vars:
- name: ansible_module_set_locale
module_language:
description:
- "If 'set_module_language' is true, this is the language language/locale setting to use for modules when they execute on the target."
- "Defaults to match the controller's settings."
default: "{{CONTROLLER_LANG}}"
env:
- name: ANSIBLE_MODULE_LANG
ini:
- section: defaults
key: module_lang
vars:
- name: ansible_module_lang
environment:
type: dict
default: {}
description:
- dictionary of environment variables and their values to use when executing commands.
admin_users:
type: list
default: ['root', 'toor', 'admin']
description:
- list of users to be expected to have admin privileges, for BSD you might want to add 'toor' for windows 'Administrator'.
env:
- name: ANSIBLE_ADMIN_USERS
ini:
- section: defaults
key: admin_users
vars:
- name: ansible_admin_users
allow_world_readable_temp:
type: boolean
description:
- This makes the temporary files created on the machine to be world readable and will issue a warning instead of failing the task.
- It is useful when becoming an unprivileged user.
ini:
- section: defaults
key: allow_world_readable_tmpfiles
vars:
- name: ansible_world_readable_tmpfiles
version_added: "2.1"
"""

View file

@ -27,7 +27,6 @@ from ansible.module_utils.six import string_types
from ansible.module_utils._text import to_native from ansible.module_utils._text import to_native
from ansible.parsing.plugin_docs import read_docstring from ansible.parsing.plugin_docs import read_docstring
from ansible.parsing.yaml.loader import AnsibleLoader from ansible.parsing.yaml.loader import AnsibleLoader
from ansible.plugins.loader import fragment_loader
try: try:
from __main__ import display from __main__ import display
@ -59,7 +58,7 @@ def merge_fragment(target, source):
target[key] = value target[key] = value
def add_fragments(doc, filename): def add_fragments(doc, filename, fragment_loader):
fragments = doc.pop('extends_documentation_fragment', []) fragments = doc.pop('extends_documentation_fragment', [])
@ -99,6 +98,8 @@ def add_fragments(doc, filename):
merge_fragment(doc['options'], fragment.pop('options')) merge_fragment(doc['options'], fragment.pop('options'))
except Exception as e: except Exception as e:
raise AnsibleError("%s options (%s) of unknown type: %s" % (to_native(e), fragment_name, filename)) raise AnsibleError("%s options (%s) of unknown type: %s" % (to_native(e), fragment_name, filename))
else:
doc['options'] = fragment.pop('options')
# merge rest of the sections # merge rest of the sections
try: try:
@ -107,15 +108,15 @@ def add_fragments(doc, filename):
raise AnsibleError("%s (%s) of unknown type: %s" % (to_native(e), fragment_name, filename)) raise AnsibleError("%s (%s) of unknown type: %s" % (to_native(e), fragment_name, filename))
def get_docstring(filename, verbose=False): def get_docstring(filename, fragment_loader, verbose=False, ignore_errors=False):
""" """
DOCUMENTATION can be extended using documentation fragments loaded by the PluginLoader from the module_docs_fragments directory. DOCUMENTATION can be extended using documentation fragments loaded by the PluginLoader from the module_docs_fragments directory.
""" """
data = read_docstring(filename, verbose=verbose) data = read_docstring(filename, verbose=verbose, ignore_errors=ignore_errors)
# add fragments to documentation # add fragments to documentation
if data.get('doc', False): if data.get('doc', False):
add_fragments(data['doc'], filename) add_fragments(data['doc'], filename, fragment_loader=fragment_loader)
return data['doc'], data['plainexamples'], data['returndocs'], data['metadata'] return data['doc'], data['plainexamples'], data['returndocs'], data['metadata']

View file

@ -1,7 +1,7 @@
- block: - block:
- name: Create a local temporary directory - name: Create a local temporary directory
shell: mktemp -d "${TMPDIR:-/tmp}/ansible_test.XXXXXXXXX" shell: mktemp -d /tmp/ansible_test.XXXXXXXXX
register: tempfile_result register: tempfile_result
connection: local connection: local
@ -10,6 +10,9 @@
# output_dir is hardcoded in test/runner/lib/executor.py and created there # output_dir is hardcoded in test/runner/lib/executor.py and created there
remote_dir: '{{ output_dir }}' remote_dir: '{{ output_dir }}'
- file: path={{local_temp_dir}} state=directory
name: ensure temp dir exists
- name: Create remote unprivileged remote user - name: Create remote unprivileged remote user
user: user:
name: '{{ remote_unprivileged_user }}' name: '{{ remote_unprivileged_user }}'

View file

@ -198,7 +198,7 @@
assert: assert:
that: that:
- _check_mode_test2 is skipped - _check_mode_test2 is skipped
- '_check_mode_test2.msg == "skipped, since {{ output_dir_test | expanduser }}/afile2.txt exists"' - '_check_mode_test2.msg == "{{ output_dir_test | expanduser }}/afile2.txt exists, matching creates option"'
- name: Remove afile2.txt - name: Remove afile2.txt
file: file:
@ -220,4 +220,4 @@
assert: assert:
that: that:
- _check_mode_test3 is skipped - _check_mode_test3 is skipped
- '_check_mode_test3.msg == "skipped, since {{ output_dir_test | expanduser }}/afile2.txt does not exist"' - '_check_mode_test3.msg == "{{ output_dir_test | expanduser }}/afile2.txt does not exist, matching removes option"'

View file

@ -38,6 +38,7 @@ from fnmatch import fnmatch
from ansible import __version__ as ansible_version from ansible import __version__ as ansible_version
from ansible.executor.module_common import REPLACER_WINDOWS from ansible.executor.module_common import REPLACER_WINDOWS
from ansible.plugins.loader import fragment_loader
from ansible.utils.plugin_docs import BLACKLIST, get_docstring from ansible.utils.plugin_docs import BLACKLIST, get_docstring
from module_args import AnsibleModuleImportError, get_argument_spec from module_args import AnsibleModuleImportError, get_argument_spec
@ -829,7 +830,7 @@ class ModuleValidator(Validator):
if not errors and not traces: if not errors and not traces:
with CaptureStd(): with CaptureStd():
try: try:
get_docstring(self.path, verbose=True) get_docstring(self.path, fragment_loader, verbose=True)
except AssertionError: except AssertionError:
fragment = doc['extends_documentation_fragment'] fragment = doc['extends_documentation_fragment']
self.reporter.error( self.reporter.error(
@ -1026,7 +1027,7 @@ class ModuleValidator(Validator):
with CaptureStd(): with CaptureStd():
try: try:
existing_doc, _, _, _ = get_docstring(self.base_module, verbose=True) existing_doc = get_docstring(self.base_module, fragment_loader, verbose=True)[0]
existing_options = existing_doc.get('options', {}) or {} existing_options = existing_doc.get('options', {}) or {}
except AssertionError: except AssertionError:
fragment = doc['extends_documentation_fragment'] fragment = doc['extends_documentation_fragment']

View file

@ -21,6 +21,7 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
import os import os
import re
from ansible import constants as C from ansible import constants as C
from ansible.compat.tests import unittest from ansible.compat.tests import unittest
@ -229,11 +230,23 @@ class TestActionBase(unittest.TestCase):
# create our fake task # create our fake task
mock_task = MagicMock() mock_task = MagicMock()
def get_shell_opt(opt):
ret = None
if opt == 'admin_users':
ret = ['root', 'toor', 'Administrator']
elif opt == 'remote_temp':
ret = '~/.ansible/tmp'
return ret
# create a mock connection, so we don't actually try and connect to things # create a mock connection, so we don't actually try and connect to things
mock_connection = MagicMock() mock_connection = MagicMock()
mock_connection.transport = 'ssh' mock_connection.transport = 'ssh'
mock_connection._shell.mkdtemp.return_value = 'mkdir command' mock_connection._shell.mkdtemp.return_value = 'mkdir command'
mock_connection._shell.join_path.side_effect = os.path.join mock_connection._shell.join_path.side_effect = os.path.join
mock_connection._shell.get_option = get_shell_opt
mock_connection._shell.HOMES_RE = re.compile(r'(\'|\")?(~|\$HOME)(.*)')
# we're using a real play context here # we're using a real play context here
play_context = PlayContext() play_context = PlayContext()
@ -395,12 +408,10 @@ class TestActionBase(unittest.TestCase):
mock_task.args = dict(a=1, b=2, c=3) mock_task.args = dict(a=1, b=2, c=3)
# create a mock connection, so we don't actually try and connect to things # create a mock connection, so we don't actually try and connect to things
def build_module_command(env_string, shebang, cmd, arg_path=None, rm_tmp=None): def build_module_command(env_string, shebang, cmd, arg_path=None):
to_run = [env_string, cmd] to_run = [env_string, cmd]
if arg_path: if arg_path:
to_run.append(arg_path) to_run.append(arg_path)
if rm_tmp:
to_run.append(rm_tmp)
return " ".join(to_run) return " ".join(to_run)
mock_connection = MagicMock() mock_connection = MagicMock()

View file

@ -63,6 +63,11 @@ class ConnectionMock(object):
transport = None transport = None
_new_stdin = StdinMock() _new_stdin = StdinMock()
# my shell
_shell = MagicMock()
_shell.mkdtemp.return_value = 'mkdir command'
_shell.join_path.side_effect = os.path.join
class PlayContextMock(object): class PlayContextMock(object):
shell = None shell = None