Cleaning up FIXMEs

This commit is contained in:
James Cammarata 2015-10-22 16:03:37 -04:00
parent 1e50d31cdc
commit 86de1429e5
32 changed files with 49 additions and 105 deletions

View file

@ -38,7 +38,7 @@ from ansible.utils.display import Display
class SortedOptParser(optparse.OptionParser): class SortedOptParser(optparse.OptionParser):
'''Optparser which sorts the options by opt before outputting --help''' '''Optparser which sorts the options by opt before outputting --help'''
#FIXME: epilog parsing: OptionParser.format_epilog = lambda self, formatter: self.epilog # TODO: epilog parsing: OptionParser.format_epilog = lambda self, formatter: self.epilog
def format_help(self, formatter=None, epilog=None): def format_help(self, formatter=None, epilog=None):
self.option_list.sort(key=operator.methodcaller('get_opt_string')) self.option_list.sort(key=operator.methodcaller('get_opt_string'))
@ -223,8 +223,8 @@ class CLI(object):
async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, inventory_opts=False, epilog=None, fork_opts=False): async_opts=False, connect_opts=False, subset_opts=False, check_opts=False, inventory_opts=False, epilog=None, fork_opts=False):
''' create an options parser for most ansible scripts ''' ''' create an options parser for most ansible scripts '''
#FIXME: implemente epilog parsing # TODO: implement epilog parsing
#OptionParser.format_epilog = lambda self, formatter: self.epilog # OptionParser.format_epilog = lambda self, formatter: self.epilog
# base opts # base opts
parser = SortedOptParser(usage, version=CLI.version("%prog")) parser = SortedOptParser(usage, version=CLI.version("%prog"))

View file

@ -181,7 +181,7 @@ class PlaybookExecutor:
self.display.display("No issues encountered") self.display.display("No issues encountered")
return result return result
# FIXME: this stat summary stuff should be cleaned up and moved # TODO: this stat summary stuff should be cleaned up and moved
# to a new method, if it even belongs here... # to a new method, if it even belongs here...
self._display.banner("PLAY RECAP") self._display.banner("PLAY RECAP")

View file

@ -108,11 +108,11 @@ class ResultProcess(multiprocessing.Process):
self._send_result(('register_host_var', result._host, result._task.register, result._result)) self._send_result(('register_host_var', result._host, result._task.register, result._result))
# send callbacks, execute other options based on the result status # send callbacks, execute other options based on the result status
# FIXME: this should all be cleaned up and probably moved to a sub-function. # TODO: this should all be cleaned up and probably moved to a sub-function.
# the fact that this sometimes sends a TaskResult and other times # the fact that this sometimes sends a TaskResult and other times
# sends a raw dictionary back may be confusing, but the result vs. # sends a raw dictionary back may be confusing, but the result vs.
# results implementation for tasks with loops should be cleaned up # results implementation for tasks with loops should be cleaned up
# better than this # better than this
if result.is_unreachable(): if result.is_unreachable():
self._send_result(('host_unreachable', result)) self._send_result(('host_unreachable', result))
elif result.is_failed(): elif result.is_failed():
@ -163,8 +163,8 @@ class ResultProcess(multiprocessing.Process):
except (KeyboardInterrupt, IOError, EOFError): except (KeyboardInterrupt, IOError, EOFError):
break break
except: except:
# FIXME: we should probably send a proper callback here instead of # TODO: we should probably send a proper callback here instead of
# simply dumping a stack trace on the screen # simply dumping a stack trace on the screen
traceback.print_exc() traceback.print_exc()
break break

View file

@ -131,7 +131,6 @@ class WorkerProcess(multiprocessing.Process):
task_result = TaskResult(host, task, dict(unreachable=True)) task_result = TaskResult(host, task, dict(unreachable=True))
self._rslt_q.put(task_result, block=False) self._rslt_q.put(task_result, block=False)
except: except:
# FIXME: most likely an abort, catch those kinds of errors specifically
break break
except Exception as e: except Exception as e:
if isinstance(e, (IOError, EOFError, KeyboardInterrupt)) and not isinstance(e, TemplateNotFound): if isinstance(e, (IOError, EOFError, KeyboardInterrupt)) and not isinstance(e, TemplateNotFound):
@ -144,7 +143,6 @@ class WorkerProcess(multiprocessing.Process):
except: except:
debug("WORKER EXCEPTION: %s" % e) debug("WORKER EXCEPTION: %s" % e)
debug("WORKER EXCEPTION: %s" % traceback.format_exc()) debug("WORKER EXCEPTION: %s" % traceback.format_exc())
# FIXME: most likely an abort, catch those kinds of errors specifically
break break
debug("WORKER PROCESS EXITING") debug("WORKER PROCESS EXITING")

View file

@ -341,7 +341,6 @@ class TaskExecutor:
result = None result = None
for attempt in range(retries): for attempt in range(retries):
if attempt > 0: if attempt > 0:
# FIXME: this should use the self._display.callback/message passing mechanism
self._display.display("FAILED - RETRYING: %s (%d retries left). Result was: %s" % (self._task, retries-attempt, result), color="red") self._display.display("FAILED - RETRYING: %s (%d retries left). Result was: %s" % (self._task, retries-attempt, result), color="red")
result['attempts'] = attempt + 1 result['attempts'] = attempt + 1
@ -480,8 +479,6 @@ class TaskExecutor:
correct connection object from the list of connection plugins correct connection object from the list of connection plugins
''' '''
# FIXME: calculation of connection params/auth stuff should be done here
if not self._play_context.remote_addr: if not self._play_context.remote_addr:
self._play_context.remote_addr = self._host.address self._play_context.remote_addr = self._host.address

View file

@ -117,8 +117,6 @@ class Inventory(object):
self._vars_plugins = [ x for x in vars_loader.all(self) ] self._vars_plugins = [ x for x in vars_loader.all(self) ]
# FIXME: shouldn't be required, since the group/host vars file
# management will be done in VariableManager
# get group vars from group_vars/ files and vars plugins # get group vars from group_vars/ files and vars plugins
for group in self.groups.values(): for group in self.groups.values():
group.vars = combine_vars(group.vars, self.get_group_variables(group.name)) group.vars = combine_vars(group.vars, self.get_group_variables(group.name))
@ -648,11 +646,11 @@ class Inventory(object):
if dir_name != self._playbook_basedir: if dir_name != self._playbook_basedir:
self._playbook_basedir = dir_name self._playbook_basedir = dir_name
# get group vars from group_vars/ files # get group vars from group_vars/ files
# FIXME: excluding the new_pb_basedir directory may result in group_vars # TODO: excluding the new_pb_basedir directory may result in group_vars
# files loading more than they should, however with the file caching # files loading more than they should, however with the file caching
# we do this shouldn't be too much of an issue. Still, this should # we do this shouldn't be too much of an issue. Still, this should
# be fixed at some point to allow a "first load" to touch all of the # be fixed at some point to allow a "first load" to touch all of the
# directories, then later runs only touch the new basedir specified # directories, then later runs only touch the new basedir specified
for group in self.groups.values(): for group in self.groups.values():
#group.vars = combine_vars(group.vars, self.get_group_vars(group, new_pb_basedir=True)) #group.vars = combine_vars(group.vars, self.get_group_vars(group, new_pb_basedir=True))
group.vars = combine_vars(group.vars, self.get_group_vars(group)) group.vars = combine_vars(group.vars, self.get_group_vars(group))
@ -708,7 +706,6 @@ class Inventory(object):
if _basedir == self._playbook_basedir and scan_pass != 1: if _basedir == self._playbook_basedir and scan_pass != 1:
continue continue
# FIXME: these should go to VariableManager
if group and host is None: if group and host is None:
# load vars in dir/group_vars/name_of_group # load vars in dir/group_vars/name_of_group
base_path = os.path.realpath(os.path.join(basedir, "group_vars/%s" % group.name)) base_path = os.path.realpath(os.path.join(basedir, "group_vars/%s" % group.name))

View file

@ -243,7 +243,6 @@ class ModuleArgsParser:
# this is the 'extra gross' scenario detailed above, so we grab # this is the 'extra gross' scenario detailed above, so we grab
# the args and pass them in as additional arguments, which can/will # the args and pass them in as additional arguments, which can/will
# be overwritten via dict updates from the other arg sources below # be overwritten via dict updates from the other arg sources below
# FIXME: add test cases for this
additional_args = self._task_ds.get('args', dict()) additional_args = self._task_ds.get('args', dict())
# We can have one of action, local_action, or module specified # We can have one of action, local_action, or module specified

View file

@ -360,7 +360,7 @@ class VaultFile(object):
_, self.tmpfile = tempfile.mkstemp() _, self.tmpfile = tempfile.mkstemp()
### FIXME: ### TODO:
# __del__ can be problematic in python... For this use case, make # __del__ can be problematic in python... For this use case, make
# VaultFile a context manager instead (implement __enter__ and __exit__) # VaultFile a context manager instead (implement __enter__ and __exit__)
def __del__(self): def __del__(self):

View file

@ -183,8 +183,6 @@ class Base:
# Walk all attributes in the class. We sort them based on their priority # Walk all attributes in the class. We sort them based on their priority
# so that certain fields can be loaded before others, if they are dependent. # so that certain fields can be loaded before others, if they are dependent.
# FIXME: we currently don't do anything with private attributes but
# may later decide to filter them out of 'ds' here.
base_attributes = self._get_base_attributes() base_attributes = self._get_base_attributes()
for name, attr in sorted(base_attributes.items(), key=operator.itemgetter(1)): for name, attr in sorted(base_attributes.items(), key=operator.itemgetter(1)):
# copy the value over unless a _load_field method is defined # copy the value over unless a _load_field method is defined

View file

@ -233,7 +233,6 @@ class Play(Base, Taggable, Become):
vars_prompts.append(prompt_data) vars_prompts.append(prompt_data)
return vars_prompts return vars_prompts
# FIXME: post_validation needs to ensure that become/su/sudo have only 1 set
def _compile_roles(self): def _compile_roles(self):
''' '''
Handles the role compilation step, returning a flat list of tasks Handles the role compilation step, returning a flat list of tasks

View file

@ -472,7 +472,8 @@ class PlayContext(Base):
In case users need to access from the play, this is a legacy from runner. In case users need to access from the play, this is a legacy from runner.
''' '''
#FIXME: remove password? possibly add become/sudo settings # TODO: should we be setting the more generic values here rather than
# the more specific _ssh_ ones?
for special_var in ['ansible_connection', 'ansible_ssh_host', 'ansible_ssh_pass', 'ansible_ssh_port', 'ansible_ssh_user', 'ansible_ssh_private_key_file', 'ansible_ssh_pipelining']: for special_var in ['ansible_connection', 'ansible_ssh_host', 'ansible_ssh_pass', 'ansible_ssh_port', 'ansible_ssh_user', 'ansible_ssh_private_key_file', 'ansible_ssh_pipelining']:
if special_var not in variables: if special_var not in variables:
for prop, varnames in MAGIC_VARIABLE_MAPPING.items(): for prop, varnames in MAGIC_VARIABLE_MAPPING.items():

View file

@ -107,8 +107,6 @@ class PlaybookInclude(Base, Conditional, Taggable):
else: else:
# some basic error checking, to make sure vars are properly # some basic error checking, to make sure vars are properly
# formatted and do not conflict with k=v parameters # formatted and do not conflict with k=v parameters
# FIXME: we could merge these instead, but controlling the order
# in which they're encountered could be difficult
if k == 'vars': if k == 'vars':
if 'vars' in new_ds: if 'vars' in new_ds:
raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds) raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds)
@ -129,8 +127,6 @@ class PlaybookInclude(Base, Conditional, Taggable):
if len(items) == 0: if len(items) == 0:
raise AnsibleParserError("include statements must specify the file name to include", obj=ds) raise AnsibleParserError("include statements must specify the file name to include", obj=ds)
else: else:
# FIXME/TODO: validate that items[0] is a file, which also
# exists and is readable
new_ds['include'] = items[0] new_ds['include'] = items[0]
if len(items) > 1: if len(items) > 1:
# rejoin the parameter portion of the arguments and # rejoin the parameter portion of the arguments and
@ -139,7 +135,6 @@ class PlaybookInclude(Base, Conditional, Taggable):
if 'tags' in params: if 'tags' in params:
new_ds['tags'] = params.pop('tags') new_ds['tags'] = params.pop('tags')
if 'vars' in new_ds: if 'vars' in new_ds:
# FIXME: see fixme above regarding merging vars
raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds) raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds)
new_ds['vars'] = params new_ds['vars'] = params

View file

@ -37,10 +37,10 @@ from ansible.utils.vars import combine_vars
__all__ = ['Role', 'hash_params'] __all__ = ['Role', 'hash_params']
# FIXME: this should be a utility function, but can't be a member of # TODO: this should be a utility function, but can't be a member of
# the role due to the fact that it would require the use of self # the role due to the fact that it would require the use of self
# in a static method. This is also used in the base class for # in a static method. This is also used in the base class for
# strategies (ansible/plugins/strategy/__init__.py) # strategies (ansible/plugins/strategy/__init__.py)
def hash_params(params): def hash_params(params):
if not isinstance(params, dict): if not isinstance(params, dict):
return params return params
@ -128,7 +128,6 @@ class Role(Base, Become, Conditional, Taggable):
return r return r
except RuntimeError: except RuntimeError:
# FIXME: needs a better way to access the ds in the role include
raise AnsibleError("A recursion loop was detected with the roles specified. Make sure child roles do not have dependencies on parent roles", obj=role_include._ds) raise AnsibleError("A recursion loop was detected with the roles specified. Make sure child roles do not have dependencies on parent roles", obj=role_include._ds)
def _load_role_data(self, role_include, parent_role=None): def _load_role_data(self, role_include, parent_role=None):
@ -244,7 +243,6 @@ class Role(Base, Become, Conditional, Taggable):
return self._parents return self._parents
def get_default_vars(self): def get_default_vars(self):
# FIXME: get these from dependent roles too
default_vars = dict() default_vars = dict()
for dep in self.get_all_dependencies(): for dep in self.get_all_dependencies():
default_vars = combine_vars(default_vars, dep.get_default_vars()) default_vars = combine_vars(default_vars, dep.get_default_vars())

View file

@ -70,6 +70,9 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
if isinstance(ds, dict): if isinstance(ds, dict):
ds = super(RoleDefinition, self).preprocess_data(ds) ds = super(RoleDefinition, self).preprocess_data(ds)
# save the original ds for use later
self._ds = ds
# we create a new data structure here, using the same # we create a new data structure here, using the same
# object used internally by the YAML parsing code so we # object used internally by the YAML parsing code so we
# can preserve file:line:column information if it exists # can preserve file:line:column information if it exists
@ -97,9 +100,6 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
# we store the role path internally # we store the role path internally
self._role_path = role_path self._role_path = role_path
# save the original ds for use later
self._ds = ds
# and return the cleaned-up data structure # and return the cleaned-up data structure
return new_ds return new_ds
@ -176,10 +176,7 @@ class RoleDefinition(Base, Become, Conditional, Taggable):
if self._loader.path_exists(role_path): if self._loader.path_exists(role_path):
return (role_name, role_path) return (role_name, role_path)
# FIXME: make the parser smart about list/string entries in raise AnsibleError("the role '%s' was not found in %s" % (role_name, ":".join(role_search_paths)), obj=self._ds)
# the yaml so the error line/file can be reported here
raise AnsibleError("the role '%s' was not found in %s" % (role_name, ":".join(role_search_paths)))
def _split_role_params(self, ds): def _split_role_params(self, ds):
''' '''

View file

@ -36,7 +36,8 @@ __all__ = ['RoleInclude']
class RoleInclude(RoleDefinition): class RoleInclude(RoleDefinition):
""" """
FIXME: docstring A derivative of RoleDefinition, used by playbook code when a role
is included for execution in a play.
""" """
_delegate_to = FieldAttribute(isa='string') _delegate_to = FieldAttribute(isa='string')

View file

@ -50,7 +50,8 @@ except ImportError:
class RoleRequirement(RoleDefinition): class RoleRequirement(RoleDefinition):
""" """
FIXME: document various ways role specs can be specified Helper class for Galaxy, which is used to parse both dependencies
specified in meta/main.yml and requirements.yml files.
""" """
def __init__(self): def __init__(self):

View file

@ -232,13 +232,12 @@ class PluginLoader:
# they can have any suffix # they can have any suffix
suffix = '' suffix = ''
### FIXME: # TODO: Instead of using the self._paths cache (PATH_CACHE) and
# Instead of using the self._paths cache (PATH_CACHE) and # self._searched_paths we could use an iterator. Before enabling that
# self._searched_paths we could use an iterator. Before enabling that # we need to make sure we don't want to add additional directories
# we need to make sure we don't want to add additional directories # (add_directory()) once we start using the iterator. Currently, it
# (add_directory()) once we start using the iterator. Currently, it # looks like _get_paths() never forces a cache refresh so if we expect
# looks like _get_paths() never forces a cache refresh so if we expect # additional directories to be added later, it is buggy.
# additional directories to be added later, it is buggy.
for path in (p for p in self._get_paths() if p not in self._searched_paths and os.path.isdir(p)): for path in (p for p in self._get_paths() if p not in self._searched_paths and os.path.isdir(p)):
try: try:
full_paths = (os.path.join(path, f) for f in os.listdir(path)) full_paths = (os.path.join(path, f) for f in os.listdir(path))

View file

@ -88,12 +88,10 @@ class ActionBase:
module_path = self._shared_loader_obj.module_loader.find_plugin(module_name, mod_type) module_path = self._shared_loader_obj.module_loader.find_plugin(module_name, mod_type)
if module_path: if module_path:
break break
else: # This is a for-else: http://bit.ly/1ElPkyg else:
# FIXME: Why is it necessary to look for the windows version?
# Shouldn't all modules be installed?
#
# Use Windows version of ping module to check module paths when # Use Windows version of ping module to check module paths when
# using a connection that supports .ps1 suffixes. # using a connection that supports .ps1 suffixes. We check specifically
# for win_ping here, otherwise the code would look for ping.ps1
if '.ps1' in self._connection.module_implementation_preferences: if '.ps1' in self._connection.module_implementation_preferences:
ping_module = 'win_ping' ping_module = 'win_ping'
else: else:
@ -138,8 +136,6 @@ class ActionBase:
Determines if a temp path should be created before the action is executed. Determines if a temp path should be created before the action is executed.
''' '''
# FIXME: modified from original, needs testing? Since this is now inside
# the action plugin, it should make it just this simple
return getattr(self, 'TRANSFERS_FILES', False) return getattr(self, 'TRANSFERS_FILES', False)
def _late_needs_tmp_path(self, tmp, module_style): def _late_needs_tmp_path(self, tmp, module_style):
@ -158,8 +154,6 @@ class ActionBase:
return True return True
return False return False
# FIXME: return a datastructure in this function instead of raising errors -
# the new executor pipeline handles it much better that way
def _make_tmp_path(self): def _make_tmp_path(self):
''' '''
Create and return a temporary path on a remote box. Create and return a temporary path on a remote box.
@ -199,8 +193,6 @@ class ActionBase:
output = output + u": %s" % result['stdout'] output = output + u": %s" % result['stdout']
raise AnsibleConnectionFailure(output) raise AnsibleConnectionFailure(output)
# FIXME: do we still need to do this?
#rc = self._connection._shell.join_path(utils.last_non_blank_line(result['stdout']).strip(), '')
rc = self._connection._shell.join_path(result['stdout'].strip(), u'').splitlines()[-1] rc = self._connection._shell.join_path(result['stdout'].strip(), u'').splitlines()[-1]
# Catch failure conditions, files should never be # Catch failure conditions, files should never be

View file

@ -57,14 +57,10 @@ class ActionModule(ActionBase):
source = os.path.expanduser(source) source = os.path.expanduser(source)
if copy: if copy:
# FIXME: the original file stuff needs to be reworked if self._task._role is not None:
if '_original_file' in task_vars: source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
source = self._loader.path_dwim_relative(task_vars['_original_file'], 'files', source)
else: else:
if self._task._role is not None: source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source)
source = self._loader.path_dwim_relative(self._task._role._role_path, 'files', source)
else:
source = self._loader.path_dwim_relative(self._loader.get_basedir(), 'files', source)
remote_checksum = self._remote_checksum(dest, all_vars=task_vars) remote_checksum = self._remote_checksum(dest, all_vars=task_vars)
if remote_checksum != '3': if remote_checksum != '3':

View file

@ -17,7 +17,6 @@
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
# FIXME: can we store these as something else before we ship it?
import sys import sys
import time import time
import json import json
@ -95,7 +94,7 @@ class CacheModule(BaseCacheModule):
self.delete(key) self.delete(key)
def copy(self): def copy(self):
# FIXME: there is probably a better way to do this in redis # TODO: there is probably a better way to do this in redis
ret = dict() ret = dict()
for key in self.keys(): for key in self.keys():
ret[key] = self.get(key) ret[key] = self.get(key)

View file

@ -40,9 +40,6 @@ class CallbackBase:
custom actions. custom actions.
''' '''
# FIXME: the list of functions here needs to be updated once we have
# finalized the list of callback methods used in the default callback
def __init__(self, display): def __init__(self, display):
self._display = display self._display = display
if self._display.verbosity >= 4: if self._display.verbosity >= 4:

View file

@ -166,10 +166,6 @@ class Connection(ConnectionBase):
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable) super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
# FIXME:
#if sudoable and self..become and self.runner.become_method not in self.become_methods_supported:
# raise AnsibleError("Internal Error: this module does not support running commands via %s" % self.runner.become_method)
if in_data: if in_data:
raise AnsibleError("Internal Error: this module does not support optimized module pipelining") raise AnsibleError("Internal Error: this module does not support optimized module pipelining")

View file

@ -137,9 +137,7 @@ class Connection(ConnectionBase):
## Next, additional arguments based on the configuration. ## Next, additional arguments based on the configuration.
# sftp batch mode allows us to correctly catch failed transfers, but can # sftp batch mode allows us to correctly catch failed transfers, but can
# be disabled if the client side doesn't support the option. FIXME: is # be disabled if the client side doesn't support the option.
# this still a real concern?
if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE: if binary == 'sftp' and C.DEFAULT_SFTP_BATCH_MODE:
self._command += ['-b', '-'] self._command += ['-b', '-']

View file

@ -63,7 +63,6 @@ class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs): def run(self, terms, variables, **kwargs):
### FIXME: Is this needed now that listify is run on all lookup plugin terms?
if not isinstance(terms, list): if not isinstance(terms, list):
raise AnsibleError("with_flattened expects a list") raise AnsibleError("with_flattened expects a list")

View file

@ -27,7 +27,6 @@ class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs): def run(self, terms, variables, **kwargs):
### FIXME: Is this needed now that listify is run on all lookup plugin terms?
if not isinstance(terms, list): if not isinstance(terms, list):
raise AnsibleError("with_indexed_items expects a list") raise AnsibleError("with_indexed_items expects a list")

View file

@ -47,9 +47,9 @@ except ImportError:
__all__ = ['StrategyBase'] __all__ = ['StrategyBase']
# FIXME: this should probably be in the plugins/__init__.py, with # TODO: this should probably be in the plugins/__init__.py, with
# a smarter mechanism to set all of the attributes based on # a smarter mechanism to set all of the attributes based on
# the loaders created there # the loaders created there
class SharedPluginLoaderObj: class SharedPluginLoaderObj:
''' '''
A simple object to make pass the various plugin loaders to A simple object to make pass the various plugin loaders to
@ -327,7 +327,6 @@ class StrategyBase:
allgroup.add_host(new_host) allgroup.add_host(new_host)
# Set/update the vars for this host # Set/update the vars for this host
# FIXME: probably should have a set vars method for the host?
new_vars = host_info.get('host_vars', dict()) new_vars = host_info.get('host_vars', dict())
new_host.vars = self._inventory.get_host_vars(new_host) new_host.vars = self._inventory.get_host_vars(new_host)
new_host.vars.update(new_vars) new_host.vars.update(new_vars)
@ -351,7 +350,6 @@ class StrategyBase:
# clear pattern caching completely since it's unpredictable what # clear pattern caching completely since it's unpredictable what
# patterns may have referenced the group # patterns may have referenced the group
# FIXME: is this still required?
self._inventory.clear_pattern_cache() self._inventory.clear_pattern_cache()
def _add_group(self, host, result_item): def _add_group(self, host, result_item):

View file

@ -109,7 +109,6 @@ class StrategyModule(StrategyBase):
# since they do not use k=v pairs, so get that # since they do not use k=v pairs, so get that
meta_action = task.args.get('_raw_params') meta_action = task.args.get('_raw_params')
if meta_action == 'noop': if meta_action == 'noop':
# FIXME: issue a callback for the noop here?
continue continue
elif meta_action == 'flush_handlers': elif meta_action == 'flush_handlers':
# FIXME: in the 'free' mode, flushing handlers should result in # FIXME: in the 'free' mode, flushing handlers should result in
@ -170,8 +169,6 @@ class StrategyModule(StrategyBase):
results = self._wait_on_pending_results(iterator) results = self._wait_on_pending_results(iterator)
host_results.extend(results) host_results.extend(results)
except Exception as e: except Exception as e:
# FIXME: ctrl+c can cause some failures here, so catch them
# with the appropriate error type
pass pass
# run the base class run() method, which executes the cleanup function # run the base class run() method, which executes the cleanup function

View file

@ -15,7 +15,6 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>. # along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# FIXME: copied mostly from old code, needs py3 improvements
from __future__ import (absolute_import, division, print_function) from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
@ -227,7 +226,6 @@ class Display:
except OSError: except OSError:
self.warning("somebody cleverly deleted cowsay or something during the PB run. heh.") self.warning("somebody cleverly deleted cowsay or something during the PB run. heh.")
#FIXME: make this dynamic on tty size (look and ansible-doc)
msg = msg.strip() msg = msg.strip()
star_len = (79 - len(msg)) star_len = (79 - len(msg))
if star_len < 0: if star_len < 0:

View file

@ -28,12 +28,11 @@ from ansible.template.safe_eval import safe_eval
__all__ = ['listify_lookup_plugin_terms'] __all__ = ['listify_lookup_plugin_terms']
#FIXME: probably just move this into lookup plugin base class
def listify_lookup_plugin_terms(terms, templar, loader, fail_on_undefined=False, convert_bare=True): def listify_lookup_plugin_terms(terms, templar, loader, fail_on_undefined=False, convert_bare=True):
if isinstance(terms, string_types): if isinstance(terms, string_types):
stripped = terms.strip() stripped = terms.strip()
#FIXME: warn/deprecation on bare vars in with_ so we can eventually remove fail on undefined override # TODO: warn/deprecation on bare vars in with_ so we can eventually remove fail on undefined override
terms = templar.template(terms, convert_bare=convert_bare, fail_on_undefined=fail_on_undefined) terms = templar.template(terms, convert_bare=convert_bare, fail_on_undefined=fail_on_undefined)
else: else:
terms = templar.template(terms, fail_on_undefined=fail_on_undefined) terms = templar.template(terms, fail_on_undefined=fail_on_undefined)

View file

@ -16,7 +16,6 @@
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>. # along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# FIXME: tags should auto-apply to role dependencies
- name: cleanup old files - name: cleanup old files
shell: rm -rf {{output_dir}}/* shell: rm -rf {{output_dir}}/*

View file

@ -32,7 +32,6 @@ from ansible.parsing.yaml.objects import AnsibleMapping
class TestDataLoader(unittest.TestCase): class TestDataLoader(unittest.TestCase):
def setUp(self): def setUp(self):
# FIXME: need to add tests that utilize vault_password
self._loader = DataLoader() self._loader = DataLoader()
def tearDown(self): def tearDown(self):

View file

@ -70,8 +70,6 @@ class TestTemplar(unittest.TestCase):
self.assertEqual(templar.template("{{bad_dict}}"), "{a='b'") self.assertEqual(templar.template("{{bad_dict}}"), "{a='b'")
self.assertEqual(templar.template("{{var_list}}"), [1]) self.assertEqual(templar.template("{{var_list}}"), [1])
self.assertEqual(templar.template(1, convert_bare=True), 1) self.assertEqual(templar.template(1, convert_bare=True), 1)
#FIXME: lookup ignores fake file and returns error
#self.assertEqual(templar.template("{{lookup('file', '/path/to/my_file.txt')}}"), "foo")
# force errors # force errors
self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{bad_var}}") self.assertRaises(AnsibleUndefinedVariable, templar.template, "{{bad_var}}")