Merge pull request #1 from ansible/devel

sync to head
This commit is contained in:
Greg DeKoenigsberg 2016-01-21 12:14:53 -05:00
commit 134873bada
17 changed files with 132 additions and 77 deletions

View file

@ -55,3 +55,12 @@
# will be ignored, and 4 will be used. Accepts a comma separated list, # will be ignored, and 4 will be used. Accepts a comma separated list,
# the first found wins. # the first found wins.
# access_ip_version = 4 # access_ip_version = 4
# Environment Variable: RAX_CACHE_MAX_AGE
# Default: 600
#
# A configuration the changes the behavior or the inventory cache.
# Inventory listing performed before this value will be returned from
# the cache instead of making a full request for all inventory. Setting
# this value to 0 will force a full request.
# cache_max_age = 600

View file

@ -355,9 +355,12 @@ def get_cache_file_path(regions):
def _list(regions, refresh_cache=True): def _list(regions, refresh_cache=True):
cache_max_age = int(get_config(p, 'rax', 'cache_max_age',
'RAX_CACHE_MAX_AGE', 600))
if (not os.path.exists(get_cache_file_path(regions)) or if (not os.path.exists(get_cache_file_path(regions)) or
refresh_cache or refresh_cache or
(time() - os.stat(get_cache_file_path(regions))[-1]) > 600): (time() - os.stat(get_cache_file_path(regions))[-1]) > cache_max_age):
# Cache file doesn't exist or older than 10m or refresh cache requested # Cache file doesn't exist or older than 10m or refresh cache requested
_list_into_cache(regions) _list_into_cache(regions)

View file

@ -78,6 +78,10 @@ class Inventory(object):
self._restriction = None self._restriction = None
self._subset = None self._subset = None
# clear the cache here, which is only useful if more than
# one Inventory objects are created when using the API directly
self.clear_pattern_cache()
self.parse_inventory(host_list) self.parse_inventory(host_list)
def serialize(self): def serialize(self):

View file

@ -31,7 +31,7 @@ from ansible.errors import AnsibleError
from ansible.inventory.host import Host from ansible.inventory.host import Host
from ansible.inventory.group import Group from ansible.inventory.group import Group
from ansible.module_utils.basic import json_dict_bytes_to_unicode from ansible.module_utils.basic import json_dict_bytes_to_unicode
from ansible.utils.unicode import to_str from ansible.utils.unicode import to_str, to_unicode
class InventoryScript: class InventoryScript:
@ -58,7 +58,13 @@ class InventoryScript:
if sp.returncode != 0: if sp.returncode != 0:
raise AnsibleError("Inventory script (%s) had an execution error: %s " % (filename,stderr)) raise AnsibleError("Inventory script (%s) had an execution error: %s " % (filename,stderr))
self.data = stdout # make sure script output is unicode so that json loader will output
# unicode strings itself
try:
self.data = to_unicode(stdout, errors="strict")
except Exception as e:
raise AnsibleError("inventory data from {0} contained characters that cannot be interpreted as UTF-8: {1}".format(to_str(self.filename), to_str(e)))
# see comment about _meta below # see comment about _meta below
self.host_vars_from_top = None self.host_vars_from_top = None
self._parse(stderr) self._parse(stderr)
@ -78,8 +84,6 @@ class InventoryScript:
sys.stderr.write(err + "\n") sys.stderr.write(err + "\n")
raise AnsibleError("failed to parse executable inventory script results from {0}: data needs to be formatted as a json dict".format(to_str(self.filename))) raise AnsibleError("failed to parse executable inventory script results from {0}: data needs to be formatted as a json dict".format(to_str(self.filename)))
self.raw = json_dict_bytes_to_unicode(self.raw)
group = None group = None
for (group_name, data) in self.raw.items(): for (group_name, data) in self.raw.items():

View file

@ -546,11 +546,10 @@ class AnsibleModule(object):
if no_log_object: if no_log_object:
self.no_log_values.update(return_values(no_log_object)) self.no_log_values.update(return_values(no_log_object))
# check the locale as set by the current environment, and # check the locale as set by the current environment, and reset to
# reset to LANG=C if it's an invalid/unavailable locale # a known valid (LANG=C) if it's an invalid/unavailable locale
self._check_locale() self._check_locale()
self._check_arguments(check_invalid_arguments) self._check_arguments(check_invalid_arguments)
# check exclusive early # check exclusive early
@ -1094,7 +1093,6 @@ class AnsibleModule(object):
# as it would be returned by locale.getdefaultlocale() # as it would be returned by locale.getdefaultlocale()
locale.setlocale(locale.LC_ALL, '') locale.setlocale(locale.LC_ALL, '')
except locale.Error: except locale.Error:
e = get_exception()
# fallback to the 'C' locale, which may cause unicode # fallback to the 'C' locale, which may cause unicode
# issues but is preferable to simply failing because # issues but is preferable to simply failing because
# of an unknown locale # of an unknown locale
@ -1757,25 +1755,29 @@ class AnsibleModule(object):
# rename might not preserve context # rename might not preserve context
self.set_context_if_different(dest, context, False) self.set_context_if_different(dest, context, False)
def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None): def run_command(self, args, check_rc=False, close_fds=True, executable=None, data=None, binary_data=False, path_prefix=None, cwd=None, use_unsafe_shell=False, prompt_regex=None, environ_update=None):
''' '''
Execute a command, returns rc, stdout, and stderr. Execute a command, returns rc, stdout, and stderr.
args is the command to run
If args is a list, the command will be run with shell=False. :arg args: is the command to run
If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False * If args is a list, the command will be run with shell=False.
If args is a string and use_unsafe_shell=True it run with shell=True. * If args is a string and use_unsafe_shell=False it will split args to a list and run with shell=False
Other arguments: * If args is a string and use_unsafe_shell=True it runs with shell=True.
- check_rc (boolean) Whether to call fail_json in case of :kw check_rc: Whether to call fail_json in case of non zero RC.
non zero RC. Default is False. Default False
- close_fds (boolean) See documentation for subprocess.Popen(). :kw close_fds: See documentation for subprocess.Popen(). Default True
Default is True. :kw executable: See documentation for subprocess.Popen(). Default None
- executable (string) See documentation for subprocess.Popen(). :kw data: If given, information to write to the stdin of the command
Default is None. :kw binary_data: If False, append a newline to the data. Default False
- prompt_regex (string) A regex string (not a compiled regex) which :kw path_prefix: If given, additional path to find the command in.
can be used to detect prompts in the stdout This adds to the PATH environment vairable so helper commands in
which would otherwise cause the execution the same directory can also be found
to hang (especially if no input data is :kw cwd: iIf given, working directory to run the command inside
specified) :kw use_unsafe_shell: See `args` parameter. Default False
:kw prompt_regex: Regex string (not a compiled regex) which can be
used to detect prompts in the stdout which would otherwise cause
the execution to hang (especially if no input data is specified)
:kwarg environ_update: dictionary to *update* os.environ with
''' '''
shell = False shell = False
@ -1806,10 +1808,15 @@ class AnsibleModule(object):
msg = None msg = None
st_in = None st_in = None
# Set a temporary env path if a prefix is passed # Manipulate the environ we'll send to the new process
env=os.environ old_env_vals = {}
if environ_update:
for key, val in environ_update.items():
old_env_vals[key] = os.environ.get(key, None)
os.environ[key] = val
if path_prefix: if path_prefix:
env['PATH']="%s:%s" % (path_prefix, env['PATH']) old_env_vals['PATH'] = os.environ['PATH']
os.environ['PATH'] = "%s:%s" % (path_prefix, os.environ['PATH'])
# create a printable version of the command for use # create a printable version of the command for use
# in reporting later, which strips out things like # in reporting later, which strips out things like
@ -1851,11 +1858,10 @@ class AnsibleModule(object):
close_fds=close_fds, close_fds=close_fds,
stdin=st_in, stdin=st_in,
stdout=subprocess.PIPE, stdout=subprocess.PIPE,
stderr=subprocess.PIPE stderr=subprocess.PIPE,
env=os.environ,
) )
if path_prefix:
kwargs['env'] = env
if cwd and os.path.isdir(cwd): if cwd and os.path.isdir(cwd):
kwargs['cwd'] = cwd kwargs['cwd'] = cwd
@ -1934,6 +1940,13 @@ class AnsibleModule(object):
except: except:
self.fail_json(rc=257, msg=traceback.format_exc(), cmd=clean_args) self.fail_json(rc=257, msg=traceback.format_exc(), cmd=clean_args)
# Restore env settings
for key, val in old_env_vals.items():
if val is None:
del os.environ[key]
else:
os.environ[key] = val
if rc != 0 and check_rc: if rc != 0 and check_rc:
msg = heuristic_log_sanitize(stderr.rstrip(), self.no_log_values) msg = heuristic_log_sanitize(stderr.rstrip(), self.no_log_values)
self.fail_json(cmd=clean_args, rc=rc, stdout=stdout, stderr=stderr, msg=msg) self.fail_json(cmd=clean_args, rc=rc, stdout=stdout, stderr=stderr, msg=msg)

View file

@ -559,9 +559,11 @@ class Facts(object):
# also other OSs other than linux might need to check across several possible candidates # also other OSs other than linux might need to check across several possible candidates
# try various forms of querying pid 1 # try various forms of querying pid 1
proc_1 = os.path.basename(get_file_content('/proc/1/comm')) proc_1 = get_file_content('/proc/1/comm')
if proc_1 is None: if proc_1 is None:
rc, proc_1, err = module.run_command("ps -p 1 -o comm|tail -n 1", use_unsafe_shell=True) rc, proc_1, err = module.run_command("ps -p 1 -o comm|tail -n 1", use_unsafe_shell=True)
else:
proc_1 = os.path.basename(proc_1)
if proc_1 == 'init' or proc_1.endswith('sh'): if proc_1 == 'init' or proc_1.endswith('sh'):
# many systems return init, so this cannot be trusted, if it ends in 'sh' it probalby is a shell in a container # many systems return init, so this cannot be trusted, if it ends in 'sh' it probalby is a shell in a container
@ -569,7 +571,7 @@ class Facts(object):
# if not init/None it should be an identifiable or custom init, so we are done! # if not init/None it should be an identifiable or custom init, so we are done!
if proc_1 is not None: if proc_1 is not None:
self.facts['service_mgr'] = proc_1 self.facts['service_mgr'] = proc_1.strip()
# start with the easy ones # start with the easy ones
elif self.facts['distribution'] == 'MacOSX': elif self.facts['distribution'] == 'MacOSX':

@ -1 +1 @@
Subproject commit ffea58ee86dbee20dc272c74cd5f8e02f6f317e6 Subproject commit d7fac82f97c153af08dbea2b2ae9718b19abeb8a

@ -1 +1 @@
Subproject commit e9450df878632531fae574b5eaf28bf0f7916948 Subproject commit f798240f436a16a828f48759bbd176b6bccdfe75

View file

@ -71,7 +71,7 @@ try:
except ImportError: except ImportError:
pass pass
from ansible.compat.six import PY3, byte2int from ansible.compat.six import PY3
from ansible.utils.unicode import to_unicode, to_bytes from ansible.utils.unicode import to_unicode, to_bytes
HAS_ANY_PBKDF2HMAC = HAS_PBKDF2 or HAS_PBKDF2HMAC HAS_ANY_PBKDF2HMAC = HAS_PBKDF2 or HAS_PBKDF2HMAC
@ -236,22 +236,24 @@ class VaultEditor:
""" """
file_len = os.path.getsize(tmp_path) file_len = os.path.getsize(tmp_path)
max_chunk_len = min(1024*1024*2, file_len)
passes = 3 if file_len > 0: # avoid work when file was empty
with open(tmp_path, "wb") as fh: max_chunk_len = min(1024*1024*2, file_len)
for _ in range(passes):
fh.seek(0, 0)
# get a random chunk of data, each pass with other length
chunk_len = random.randint(max_chunk_len//2, max_chunk_len)
data = os.urandom(chunk_len)
for _ in range(0, file_len // chunk_len): passes = 3
fh.write(data) with open(tmp_path, "wb") as fh:
fh.write(data[:file_len % chunk_len]) for _ in range(passes):
fh.seek(0, 0)
# get a random chunk of data, each pass with other length
chunk_len = random.randint(max_chunk_len//2, max_chunk_len)
data = os.urandom(chunk_len)
assert(fh.tell() == file_len) # FIXME remove this assert once we have unittests to check its accuracy for _ in range(0, file_len // chunk_len):
os.fsync(fh) fh.write(data)
fh.write(data[:file_len % chunk_len])
assert(fh.tell() == file_len) # FIXME remove this assert once we have unittests to check its accuracy
os.fsync(fh)
def _shred_file(self, tmp_path): def _shred_file(self, tmp_path):
@ -273,7 +275,7 @@ class VaultEditor:
try: try:
r = call(['shred', tmp_path]) r = call(['shred', tmp_path])
except OSError as e: except OSError:
# shred is not available on this system, or some other error occured. # shred is not available on this system, or some other error occured.
r = 1 r = 1

View file

@ -24,6 +24,7 @@ import json
import os import os
import pipes import pipes
import random import random
import re
import stat import stat
import tempfile import tempfile
import time import time
@ -356,6 +357,14 @@ class ActionBase(with_metaclass(ABCMeta, object)):
return data[idx:] return data[idx:]
def _strip_success_message(self, data):
'''
Removes the BECOME-SUCCESS message from the data.
'''
if data.strip().startswith('BECOME-SUCCESS-'):
data = re.sub(r'^((\r)?\n)?BECOME-SUCCESS.*(\r)?\n', '', data)
return data
def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True): def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True):
''' '''
Transfer and run a module along with its arguments. Transfer and run a module along with its arguments.
@ -481,8 +490,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
display.debug("done with _execute_module (%s, %s)" % (module_name, module_args)) display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
return data return data
def _low_level_execute_command(self, cmd, sudoable=True, in_data=None, def _low_level_execute_command(self, cmd, sudoable=True, in_data=None, executable=C.DEFAULT_EXECUTABLE, encoding_errors='replace'):
executable=None, encoding_errors='replace'):
''' '''
This is the function which executes the low level shell command, which This is the function which executes the low level shell command, which
may be commands to create/remove directories for temporary files, or to may be commands to create/remove directories for temporary files, or to
@ -498,7 +506,7 @@ class ActionBase(with_metaclass(ABCMeta, object)):
''' '''
if executable is not None: if executable is not None:
cmd = executable + ' -c ' + cmd cmd = executable + ' -c ' + pipes.quote(cmd)
display.debug("_low_level_execute_command(): starting") display.debug("_low_level_execute_command(): starting")
if not cmd: if not cmd:

View file

@ -75,4 +75,8 @@ class ActionModule(ActionBase):
result['changed'] = True result['changed'] = True
# be sure to strip out the BECOME-SUCCESS message, which may
# be there depending on the output of the module
result['stdout'] = self._strip_success_message(result.get('stdout', ''))
return result return result

View file

@ -19,8 +19,6 @@ __metaclass__ = type
from ansible.plugins.action import ActionBase from ansible.plugins.action import ActionBase
import re
class ActionModule(ActionBase): class ActionModule(ActionBase):
TRANSFERS_FILES = False TRANSFERS_FILES = False
@ -42,7 +40,6 @@ class ActionModule(ActionBase):
# for some modules (script, raw), the sudo success key # for some modules (script, raw), the sudo success key
# may leak into the stdout due to the way the sudo/su # may leak into the stdout due to the way the sudo/su
# command is constructed, so we filter that out here # command is constructed, so we filter that out here
if result.get('stdout','').strip().startswith('BECOME-SUCCESS-'): result['stdout'] = self._strip_success_message(result.get('stdout', ''))
result['stdout'] = re.sub(r'^((\r)?\n)?BECOME-SUCCESS.*(\r)?\n', '', result['stdout'])
return result return result

View file

@ -26,10 +26,15 @@ class LookupModule(LookupBase):
def get_hosts(self, variables, pattern): def get_hosts(self, variables, pattern):
hosts = [] hosts = []
if pattern in variables['groups']: if pattern[0] in ('!','&'):
hosts = variables['groups'][pattern] obj = pattern[1:]
elif pattern in variables['groups']['all']: else:
hosts = [pattern] obj = pattern
if obj in variables['groups']:
hosts = variables['groups'][obj]
elif obj in variables['groups']['all']:
hosts = [obj]
return hosts return hosts
def run(self, terms, variables=None, **kwargs): def run(self, terms, variables=None, **kwargs):

View file

@ -492,7 +492,7 @@ class StrategyBase:
tags = [ tags ] tags = [ tags ]
if len(tags) > 0: if len(tags) > 0:
if len(b._task_include.tags) > 0: if len(b._task_include.tags) > 0:
raise AnsibleParserError("Include tasks should not specify tags in more than one way (both via args and directly on the task)", raise AnsibleParserError("Include tasks should not specify tags in more than one way (both via args and directly on the task). Mixing tag specify styles is prohibited for whole import hierarchy, not only for single import statement",
obj=included_file._task._ds) obj=included_file._task._ds)
display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option") display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option")
b._task_include.tags = tags b._task_include.tags = tags

View file

@ -194,8 +194,6 @@ class StrategyModule(StrategyBase):
try: try:
action = action_loader.get(task.action, class_only=True) action = action_loader.get(task.action, class_only=True)
if task.run_once or getattr(action, 'BYPASS_HOST_LOOP', False):
run_once = True
except KeyError: except KeyError:
# we don't care here, because the action may simply not have a # we don't care here, because the action may simply not have a
# corresponding action plugin # corresponding action plugin
@ -227,6 +225,8 @@ class StrategyModule(StrategyBase):
templar = Templar(loader=self._loader, variables=task_vars) templar = Templar(loader=self._loader, variables=task_vars)
display.debug("done getting variables") display.debug("done getting variables")
run_once = templar.template(task.run_once)
if not callback_sent: if not callback_sent:
display.debug("sending task start callback, copying the task so we can template it temporarily") display.debug("sending task start callback, copying the task so we can template it temporarily")
saved_name = task.name saved_name = task.name
@ -249,7 +249,7 @@ class StrategyModule(StrategyBase):
self._queue_task(host, task, task_vars, play_context) self._queue_task(host, task, task_vars, play_context)
# if we're bypassing the host loop, break out now # if we're bypassing the host loop, break out now
if run_once: if run_once or getattr(action, 'BYPASS_HOST_LOOP', False):
break break
results += self._process_pending_results(iterator, one_pass=True) results += self._process_pending_results(iterator, one_pass=True)
@ -342,13 +342,20 @@ class StrategyModule(StrategyBase):
display.debug("results queue empty") display.debug("results queue empty")
display.debug("checking for any_errors_fatal") display.debug("checking for any_errors_fatal")
had_failure = include_failure failed_hosts = []
for res in results: for res in results:
if res.is_failed() or res.is_unreachable(): if res.is_failed() or res.is_unreachable():
had_failure = True failed_hosts.append(res._host.name)
break
if task and task.any_errors_fatal and had_failure: # if any_errors_fatal and we had an error, mark all hosts as failed
return False if task and task.any_errors_fatal and len(failed_hosts) > 0:
for host in hosts_left:
# don't double-mark hosts, or the iterator will potentially
# fail them out of the rescue/always states
if host.name not in failed_hosts:
self._tqm._failed_hosts[host.name] = True
iterator.mark_host_failed(host)
display.debug("done checking for any_errors_fatal")
except (IOError, EOFError) as e: except (IOError, EOFError) as e:
display.debug("got IOError/EOFError in task loop: %s" % e) display.debug("got IOError/EOFError in task loop: %s" % e)

View file

@ -39,6 +39,7 @@ class OpenStringIO(StringIO):
def close(self): def close(self):
pass pass
@unittest.skipIf(sys.version_info[0] >= 3, "Python 3 is not supported on targets (yet)") @unittest.skipIf(sys.version_info[0] >= 3, "Python 3 is not supported on targets (yet)")
class TestAnsibleModuleRunCommand(unittest.TestCase): class TestAnsibleModuleRunCommand(unittest.TestCase):
@ -111,10 +112,6 @@ class TestAnsibleModuleRunCommand(unittest.TestCase):
self.assertEqual(args, ('ls a " b" "c "', )) self.assertEqual(args, ('ls a " b" "c "', ))
self.assertEqual(kwargs['shell'], True) self.assertEqual(kwargs['shell'], True)
def test_path_prefix(self):
self.module.run_command('foo', path_prefix='/opt/bin')
self.assertEqual('/opt/bin', self.os.environ['PATH'].split(':')[0])
def test_cwd(self): def test_cwd(self):
self.os.getcwd.return_value = '/old' self.os.getcwd.return_value = '/old'
self.module.run_command('/bin/ls', cwd='/new') self.module.run_command('/bin/ls', cwd='/new')

View file

@ -49,7 +49,7 @@ class TestActionBase(unittest.TestCase):
play_context.remote_user = 'apo' play_context.remote_user = 'apo'
action_base._low_level_execute_command('ECHO', sudoable=True) action_base._low_level_execute_command('ECHO', sudoable=True)
play_context.make_become_cmd.assert_called_once_with('ECHO', executable=None) play_context.make_become_cmd.assert_called_once_with("/bin/sh -c ECHO", executable='/bin/sh')
play_context.make_become_cmd.reset_mock() play_context.make_become_cmd.reset_mock()
@ -58,6 +58,6 @@ class TestActionBase(unittest.TestCase):
try: try:
play_context.remote_user = 'root' play_context.remote_user = 'root'
action_base._low_level_execute_command('ECHO SAME', sudoable=True) action_base._low_level_execute_command('ECHO SAME', sudoable=True)
play_context.make_become_cmd.assert_called_once_with('ECHO SAME', executable=None) play_context.make_become_cmd.assert_called_once_with("/bin/sh -c 'ECHO SAME'", executable='/bin/sh')
finally: finally:
C.BECOME_ALLOW_SAME_USER = become_allow_same_user C.BECOME_ALLOW_SAME_USER = become_allow_same_user