Indentation cleanup (partial)

This commit is contained in:
Michael DeHaan 2012-07-15 09:32:47 -04:00
parent 7b177e8ebb
commit 68a9adc1be
9 changed files with 45 additions and 52 deletions

View file

@ -241,7 +241,7 @@ class PlaybookCallbacks(object):
def on_task_start(self, name, is_conditional):
msg = "TASK: [%s]" % name
if is_conditional:
msg = "NOTIFIED: [%s]" % name
msg = "NOTIFIED: [%s]" % name
print banner(msg)
def on_vars_prompt(self, varname, private=True):

View file

@ -52,7 +52,7 @@ class Inventory(object):
if type(host_list) in [ str, unicode ]:
if host_list.find(",") != -1:
host_list = host_list.split(",")
host_list = host_list.split(",")
if type(host_list) == list:
all = Group('all')
@ -108,8 +108,8 @@ class Inventory(object):
def get_host(self, hostname):
for group in self.groups:
for host in group.get_hosts():
if hostname == host.name:
for host in group.get_hosts():
if hostname == host.name:
return host
return None
@ -161,10 +161,9 @@ class Inventory(object):
def restrict_to(self, restriction, append_missing=False):
""" Restrict list operations to the hosts given in restriction """
if type(restriction) != list:
restriction = [ restriction ]
self._restriction = restriction
if type(restriction) != list:
restriction = [ restriction ]
self._restriction = restriction
def lift_restriction(self):
""" Do not restrict list operations """

View file

@ -31,7 +31,7 @@ class Group(object):
self.child_groups = []
self.parent_groups = []
if self.name is None:
raise Exception("group name is required")
raise Exception("group name is required")
def add_child_group(self, group):
if self == group:
@ -57,7 +57,7 @@ class Group(object):
vars = {}
# FIXME: verify this variable override order is what we want
for ancestor in self.get_ancestors():
vars.update(ancestor.get_variables())
vars.update(ancestor.get_variables())
vars.update(self.vars)
return vars

View file

@ -39,7 +39,7 @@ class Host(object):
self.groups.append(group)
def set_variable(self, key, value):
self.vars[key]=value;
self.vars[key]=value
def get_groups(self):
groups = {}

View file

@ -79,9 +79,9 @@ class InventoryParser(object):
hostname = tokens[0]
port = C.DEFAULT_REMOTE_PORT
if hostname.find(":") != -1:
tokens2 = hostname.split(":")
hostname = tokens2[0]
port = tokens2[1]
tokens2 = hostname.split(":")
hostname = tokens2[0]
port = tokens2[1]
host = None
if hostname in self.hosts:
host = self.hosts[hostname]
@ -89,9 +89,9 @@ class InventoryParser(object):
host = Host(name=hostname, port=port)
self.hosts[hostname] = host
if len(tokens) > 1:
for t in tokens[1:]:
(k,v) = t.split("=")
host.set_variable(k,v)
for t in tokens[1:]:
(k,v) = t.split("=")
host.set_variable(k,v)
self.groups[active_group_name].add_host(host)
# [southeast:children]
@ -134,7 +134,7 @@ class InventoryParser(object):
line = line.replace("[","").replace(":vars]","")
group = self.groups.get(line, None)
if group is None:
raise errors.AnsibleError("can't add vars to undefined group: %s" % line)
raise errors.AnsibleError("can't add vars to undefined group: %s" % line)
elif line.startswith("#"):
pass
elif line.startswith("["):

View file

@ -73,8 +73,8 @@ class InventoryParserYaml(object):
vars = subresult.get('vars',{})
if type(vars) == list:
for subitem in vars:
for (k,v) in subitem.items():
host.set_variable(k,v)
for (k,v) in subitem.items():
host.set_variable(k,v)
elif type(vars) == dict:
for (k,v) in subresult.get('vars',{}).items():
host.set_variable(k,v)
@ -113,7 +113,7 @@ class InventoryParserYaml(object):
for subitem in varlist:
vars.update(subitem)
for (k,v) in vars.items():
host.set_variable(k,v)
host.set_variable(k,v)
groups = item.get('groups', {})
if type(groups) in [ str, unicode ]:

View file

@ -133,25 +133,19 @@ class PlayBook(object):
accumulated_plays = []
if type(playbook_data) != list:
raise errors.AnsibleError(
"parse error: playbooks must be formatted as a YAML list"
)
raise errors.AnsibleError("parse error: playbooks must be formatted as a YAML list")
for play in playbook_data:
if type(play) != dict:
raise errors.AnsibleError(
"parse error: each play in a playbook must a YAML dictionary (hash), recieved: %s" % play
)
if 'include' in play:
if len(play.keys()) == 1:
included_path = utils.path_dwim(self.basedir, play['include'])
accumulated_plays.extend(self._load_playbook_from_file(included_path))
else:
raise errors.AnsibleError(
"parse error: top level includes cannot be used with other directives: %s" % play
)
else:
accumulated_plays.append(play)
if type(play) != dict:
raise errors.AnsibleError("parse error: each play in a playbook must a YAML dictionary (hash), recieved: %s" % play)
if 'include' in play:
if len(play.keys()) == 1:
included_path = utils.path_dwim(self.basedir, play['include'])
accumulated_plays.extend(self._load_playbook_from_file(included_path))
else:
raise errors.AnsibleError("parse error: top level includes cannot be used with other directives: %s" % play)
else:
accumulated_plays.append(play)
return accumulated_plays

View file

@ -291,7 +291,7 @@ class Runner(object):
break
msg = 'All items succeeded'
if all_failed:
msg = "One or more items failed."
msg = "One or more items failed."
rd_result = dict(
failed = all_failed,
changed = all_changed,
@ -345,20 +345,20 @@ class Runner(object):
# *****************************************************
def _save_setup_result_to_disk(self, conn, result):
''' cache results of calling setup '''
''' cache results of calling setup '''
dest = os.path.expanduser("~/.ansible_setup_data")
user = getpass.getuser()
if user == 'root':
dest = "/var/lib/ansible/setup_data"
if not os.path.exists(dest):
os.makedirs(dest)
dest = os.path.expanduser("~/.ansible_setup_data")
user = getpass.getuser()
if user == 'root':
dest = "/var/lib/ansible/setup_data"
if not os.path.exists(dest):
os.makedirs(dest)
fh = open(os.path.join(dest, conn.host), "w")
fh.write(result)
fh.close()
fh = open(os.path.join(dest, conn.host), "w")
fh.write(result)
fh.close()
return result
return result
# *****************************************************

View file

@ -177,11 +177,11 @@ def parse_json(data):
# not JSON, but try "Baby JSON" which allows many of our modules to not
# require JSON and makes writing modules in bash much simpler
results = {}
try :
try:
tokens = shlex.split(data)
except:
print "failed to parse json: "+ data
raise;
raise
for t in tokens:
if t.find("=") == -1: