2014-01-04 19:31:44 +01:00
|
|
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
2012-04-13 14:39:54 +02:00
|
|
|
#
|
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
#############################################
|
|
|
|
import fnmatch
|
2012-05-06 20:47:05 +02:00
|
|
|
import os
|
2014-01-21 00:26:14 +01:00
|
|
|
import sys
|
2012-11-27 16:36:58 +01:00
|
|
|
import re
|
2012-05-06 20:47:05 +02:00
|
|
|
import subprocess
|
2013-10-07 21:06:15 +02:00
|
|
|
|
2012-05-26 01:34:13 +02:00
|
|
|
import ansible.constants as C
|
|
|
|
from ansible.inventory.ini import InventoryParser
|
|
|
|
from ansible.inventory.script import InventoryScript
|
2013-02-27 21:24:45 +01:00
|
|
|
from ansible.inventory.dir import InventoryDirectory
|
2012-05-26 01:34:13 +02:00
|
|
|
from ansible.inventory.group import Group
|
|
|
|
from ansible.inventory.host import Host
|
2012-04-13 14:39:54 +02:00
|
|
|
from ansible import errors
|
|
|
|
from ansible import utils
|
|
|
|
|
|
|
|
class Inventory(object):
|
2012-08-07 02:07:02 +02:00
|
|
|
"""
|
2012-05-05 22:31:03 +02:00
|
|
|
Host inventory for ansible.
|
2012-04-13 14:39:54 +02:00
|
|
|
"""
|
|
|
|
|
2013-02-27 20:35:08 +01:00
|
|
|
__slots__ = [ 'host_list', 'groups', '_restriction', '_also_restriction', '_subset',
|
2013-04-20 15:59:40 +02:00
|
|
|
'parser', '_vars_per_host', '_vars_per_group', '_hosts_cache', '_groups_list',
|
2014-03-17 18:05:30 +01:00
|
|
|
'_pattern_cache', '_vault_password', '_vars_plugins', '_playbook_basedir']
|
2012-07-22 17:40:02 +02:00
|
|
|
|
2014-03-17 18:05:30 +01:00
|
|
|
def __init__(self, host_list=C.DEFAULT_HOST_LIST, vault_password=None):
|
2012-04-13 14:39:54 +02:00
|
|
|
|
2012-05-08 06:27:37 +02:00
|
|
|
# the host file file, or script path, or list of hosts
|
|
|
|
# if a list, inventory data will NOT be loaded
|
2012-05-06 20:47:05 +02:00
|
|
|
self.host_list = host_list
|
2014-03-17 18:05:30 +01:00
|
|
|
self._vault_password=vault_password
|
2012-05-08 05:16:20 +02:00
|
|
|
|
2012-07-22 17:53:19 +02:00
|
|
|
# caching to avoid repeated calculations, particularly with
|
|
|
|
# external inventory scripts.
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2012-07-22 17:53:19 +02:00
|
|
|
self._vars_per_host = {}
|
|
|
|
self._vars_per_group = {}
|
|
|
|
self._hosts_cache = {}
|
2012-09-08 00:07:52 +02:00
|
|
|
self._groups_list = {}
|
2013-10-12 04:24:37 +02:00
|
|
|
self._pattern_cache = {}
|
2012-07-22 17:53:19 +02:00
|
|
|
|
2014-03-19 11:09:38 +01:00
|
|
|
# to be set by calling set_playbook_basedir by playbook code
|
2013-06-01 16:38:16 +02:00
|
|
|
self._playbook_basedir = None
|
|
|
|
|
2012-05-08 05:16:20 +02:00
|
|
|
# the inventory object holds a list of groups
|
2012-05-05 22:31:03 +02:00
|
|
|
self.groups = []
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2012-05-08 05:16:20 +02:00
|
|
|
# a list of host(names) to contain current inquiries to
|
2012-05-05 22:31:03 +02:00
|
|
|
self._restriction = None
|
2012-08-18 15:52:13 +02:00
|
|
|
self._also_restriction = None
|
2012-08-10 08:45:29 +02:00
|
|
|
self._subset = None
|
2012-05-08 05:16:20 +02:00
|
|
|
|
2013-05-23 18:33:29 +02:00
|
|
|
if isinstance(host_list, basestring):
|
2013-05-23 18:37:30 +02:00
|
|
|
if "," in host_list:
|
2012-07-15 15:32:47 +02:00
|
|
|
host_list = host_list.split(",")
|
2012-07-18 22:56:41 +02:00
|
|
|
host_list = [ h for h in host_list if h and h.strip() ]
|
2012-05-10 05:26:45 +02:00
|
|
|
|
2013-09-25 14:59:11 +02:00
|
|
|
if host_list is None:
|
|
|
|
self.parser = None
|
|
|
|
elif isinstance(host_list, list):
|
2013-03-04 12:37:15 +01:00
|
|
|
self.parser = None
|
2012-05-08 06:27:37 +02:00
|
|
|
all = Group('all')
|
|
|
|
self.groups = [ all ]
|
2013-08-19 22:22:44 +02:00
|
|
|
ipv6_re = re.compile('\[([a-f:A-F0-9]*[%[0-z]+]?)\](?::(\d+))?')
|
2012-05-08 06:27:37 +02:00
|
|
|
for x in host_list:
|
2013-08-18 23:02:28 +02:00
|
|
|
m = ipv6_re.match(x)
|
|
|
|
if m:
|
|
|
|
all.add_host(Host(m.groups()[0], m.groups()[1]))
|
2012-05-08 06:27:37 +02:00
|
|
|
else:
|
2013-08-18 23:02:28 +02:00
|
|
|
if ":" in x:
|
|
|
|
tokens = x.rsplit(":", 1)
|
|
|
|
# if there is ':' in the address, then this is a ipv6
|
|
|
|
if ':' in tokens[0]:
|
|
|
|
all.add_host(Host(x))
|
|
|
|
else:
|
|
|
|
all.add_host(Host(tokens[0], tokens[1]))
|
|
|
|
else:
|
|
|
|
all.add_host(Host(x))
|
2013-02-04 05:19:37 +01:00
|
|
|
elif os.path.exists(host_list):
|
2013-02-27 21:24:45 +01:00
|
|
|
if os.path.isdir(host_list):
|
2013-02-28 16:58:09 +01:00
|
|
|
# Ensure basedir is inside the directory
|
|
|
|
self.host_list = os.path.join(self.host_list, "")
|
2013-02-27 21:24:45 +01:00
|
|
|
self.parser = InventoryDirectory(filename=host_list)
|
|
|
|
self.groups = self.parser.groups.values()
|
2012-08-07 02:07:02 +02:00
|
|
|
else:
|
2014-03-25 17:41:00 +01:00
|
|
|
# check to see if the specified file starts with a
|
|
|
|
# shebang (#!/), so if an error is raised by the parser
|
|
|
|
# class we can show a more apropos error
|
|
|
|
shebang_present = False
|
|
|
|
try:
|
|
|
|
inv_file = open(host_list)
|
|
|
|
first_line = inv_file.readlines()[0]
|
|
|
|
inv_file.close()
|
2014-03-27 19:30:09 +01:00
|
|
|
if first_line.startswith('#!'):
|
2014-03-25 17:41:00 +01:00
|
|
|
shebang_present = True
|
|
|
|
except:
|
|
|
|
pass
|
|
|
|
|
|
|
|
if utils.is_executable(host_list):
|
|
|
|
try:
|
|
|
|
self.parser = InventoryScript(filename=host_list)
|
|
|
|
self.groups = self.parser.groups.values()
|
|
|
|
except:
|
|
|
|
if not shebang_present:
|
|
|
|
raise errors.AnsibleError("The file %s is marked as executable, but failed to execute correctly. " % host_list + \
|
|
|
|
"If this is not supposed to be an executable script, correct this with `chmod -x %s`." % host_list)
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
self.parser = InventoryParser(filename=host_list)
|
|
|
|
self.groups = self.parser.groups.values()
|
|
|
|
except:
|
|
|
|
if shebang_present:
|
|
|
|
raise errors.AnsibleError("The file %s looks like it should be an executable inventory script, but is not marked executable. " % host_list + \
|
|
|
|
"Perhaps you want to correct this with `chmod +x %s`?" % host_list)
|
|
|
|
else:
|
|
|
|
raise
|
2013-02-28 16:58:09 +01:00
|
|
|
|
2013-03-01 23:22:07 +01:00
|
|
|
utils.plugins.vars_loader.add_directory(self.basedir(), with_subdir=True)
|
2013-02-04 05:19:37 +01:00
|
|
|
else:
|
2013-02-09 17:37:55 +01:00
|
|
|
raise errors.AnsibleError("Unable to find an inventory file, specify one with -i ?")
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2013-04-20 15:59:40 +02:00
|
|
|
self._vars_plugins = [ x for x in utils.plugins.vars_loader.all(self) ]
|
|
|
|
|
2014-03-21 19:05:18 +01:00
|
|
|
# get group vars from group_vars/ files and vars plugins
|
2014-03-17 18:05:30 +01:00
|
|
|
for group in self.groups:
|
|
|
|
group.vars = utils.combine_vars(group.vars, self.get_group_variables(group.name, self._vault_password))
|
|
|
|
|
2014-03-21 19:05:18 +01:00
|
|
|
# get host vars from host_vars/ files and vars plugins
|
2014-03-17 18:05:30 +01:00
|
|
|
for host in self.get_hosts():
|
|
|
|
host.vars = utils.combine_vars(host.vars, self.get_variables(host.name, self._vault_password))
|
|
|
|
|
2013-04-20 15:59:40 +02:00
|
|
|
|
2012-05-05 22:31:03 +02:00
|
|
|
def _match(self, str, pattern_str):
|
2012-11-27 16:36:58 +01:00
|
|
|
if pattern_str.startswith('~'):
|
|
|
|
return re.search(pattern_str[1:], str)
|
|
|
|
else:
|
|
|
|
return fnmatch.fnmatch(str, pattern_str)
|
2012-05-05 22:31:03 +02:00
|
|
|
|
2014-06-27 05:40:31 +02:00
|
|
|
def _match_list(self, items, item_attr, pattern_str):
|
|
|
|
results = []
|
|
|
|
if not pattern_str.startswith('~'):
|
|
|
|
pattern = re.compile(fnmatch.translate(pattern_str))
|
|
|
|
else:
|
|
|
|
pattern = re.compile(pattern_str[1:])
|
|
|
|
for item in items:
|
|
|
|
if pattern.search(getattr(item, item_attr)):
|
|
|
|
results.append(item)
|
|
|
|
return results
|
|
|
|
|
2012-05-05 22:31:03 +02:00
|
|
|
def get_hosts(self, pattern="all"):
|
2012-08-11 17:36:59 +02:00
|
|
|
"""
|
|
|
|
find all host names matching a pattern string, taking into account any inventory restrictions or
|
|
|
|
applied subsets.
|
|
|
|
"""
|
|
|
|
|
2012-09-11 19:00:40 +02:00
|
|
|
# process patterns
|
|
|
|
if isinstance(pattern, list):
|
|
|
|
pattern = ';'.join(pattern)
|
2012-05-05 22:31:03 +02:00
|
|
|
patterns = pattern.replace(";",":").split(":")
|
2012-12-10 15:54:07 +01:00
|
|
|
hosts = self._get_hosts(patterns)
|
2012-08-11 17:36:59 +02:00
|
|
|
|
|
|
|
# exclude hosts not in a subset, if defined
|
|
|
|
if self._subset:
|
2012-12-10 15:54:07 +01:00
|
|
|
subset = self._get_hosts(self._subset)
|
2014-01-04 03:07:00 +01:00
|
|
|
hosts = [ h for h in hosts if h in subset ]
|
2012-08-11 17:36:59 +02:00
|
|
|
|
|
|
|
# exclude hosts mentioned in any restriction (ex: failed hosts)
|
|
|
|
if self._restriction is not None:
|
2012-08-11 18:20:16 +02:00
|
|
|
hosts = [ h for h in hosts if h.name in self._restriction ]
|
2012-08-18 15:52:13 +02:00
|
|
|
if self._also_restriction is not None:
|
|
|
|
hosts = [ h for h in hosts if h.name in self._also_restriction ]
|
2012-08-11 17:36:59 +02:00
|
|
|
|
2013-10-07 21:06:15 +02:00
|
|
|
return hosts
|
2012-08-11 17:36:59 +02:00
|
|
|
|
|
|
|
def _get_hosts(self, patterns):
|
2012-12-10 15:54:07 +01:00
|
|
|
"""
|
|
|
|
finds hosts that match a list of patterns. Handles negative
|
|
|
|
matches as well as intersection matches.
|
2012-08-11 17:36:59 +02:00
|
|
|
"""
|
2013-08-08 18:07:57 +02:00
|
|
|
|
|
|
|
# Host specifiers should be sorted to ensure consistent behavior
|
|
|
|
pattern_regular = []
|
|
|
|
pattern_intersection = []
|
|
|
|
pattern_exclude = []
|
|
|
|
for p in patterns:
|
|
|
|
if p.startswith("!"):
|
|
|
|
pattern_exclude.append(p)
|
|
|
|
elif p.startswith("&"):
|
|
|
|
pattern_intersection.append(p)
|
2014-07-08 04:08:39 +02:00
|
|
|
elif p:
|
2013-08-08 18:07:57 +02:00
|
|
|
pattern_regular.append(p)
|
|
|
|
|
|
|
|
# if no regular pattern was given, hence only exclude and/or intersection
|
|
|
|
# make that magically work
|
|
|
|
if pattern_regular == []:
|
|
|
|
pattern_regular = ['all']
|
|
|
|
|
|
|
|
# when applying the host selectors, run those without the "&" or "!"
|
|
|
|
# first, then the &s, then the !s.
|
|
|
|
patterns = pattern_regular + pattern_intersection + pattern_exclude
|
2012-08-11 17:36:59 +02:00
|
|
|
|
2013-10-07 21:06:15 +02:00
|
|
|
hosts = []
|
|
|
|
|
2012-08-11 17:36:59 +02:00
|
|
|
for p in patterns:
|
2014-03-26 16:24:54 +01:00
|
|
|
# avoid resolving a pattern that is a plain host
|
|
|
|
if p in self._hosts_cache:
|
|
|
|
hosts.append(self.get_host(p))
|
2012-12-10 15:54:07 +01:00
|
|
|
else:
|
2014-03-26 16:24:54 +01:00
|
|
|
that = self.__get_hosts(p)
|
|
|
|
if p.startswith("!"):
|
|
|
|
hosts = [ h for h in hosts if h not in that ]
|
|
|
|
elif p.startswith("&"):
|
|
|
|
hosts = [ h for h in hosts if h in that ]
|
|
|
|
else:
|
|
|
|
to_append = [ h for h in that if h.name not in [ y.name for y in hosts ] ]
|
|
|
|
hosts.extend(to_append)
|
2012-12-10 15:54:07 +01:00
|
|
|
return hosts
|
2012-08-11 17:36:59 +02:00
|
|
|
|
2012-12-10 15:54:07 +01:00
|
|
|
def __get_hosts(self, pattern):
|
|
|
|
"""
|
|
|
|
finds hosts that postively match a particular pattern. Does not
|
|
|
|
take into account negative matches.
|
|
|
|
"""
|
2012-08-11 17:36:59 +02:00
|
|
|
|
2013-10-12 04:24:37 +02:00
|
|
|
if pattern in self._pattern_cache:
|
|
|
|
return self._pattern_cache[pattern]
|
|
|
|
|
2012-12-10 15:54:07 +01:00
|
|
|
(name, enumeration_details) = self._enumeration_info(pattern)
|
|
|
|
hpat = self._hosts_in_unenumerated_pattern(name)
|
2013-10-12 04:24:37 +02:00
|
|
|
result = self._apply_ranges(pattern, hpat)
|
|
|
|
self._pattern_cache[pattern] = result
|
|
|
|
return result
|
2012-08-11 17:36:59 +02:00
|
|
|
|
|
|
|
def _enumeration_info(self, pattern):
|
|
|
|
"""
|
|
|
|
returns (pattern, limits) taking a regular pattern and finding out
|
|
|
|
which parts of it correspond to start/stop offsets. limits is
|
|
|
|
a tuple of (start, stop) or None
|
|
|
|
"""
|
|
|
|
|
2014-07-10 02:08:12 +02:00
|
|
|
# Do not parse regexes for enumeration info
|
|
|
|
if pattern.startswith('~'):
|
|
|
|
return (pattern, None)
|
|
|
|
|
2014-02-05 20:43:52 +01:00
|
|
|
# The regex used to match on the range, which can be [x] or [x-y].
|
2014-03-07 23:34:04 +01:00
|
|
|
pattern_re = re.compile("^(.*)\[([-]?[0-9]+)(?:(?:-)([0-9]+))?\](.*)$")
|
2014-02-05 20:43:52 +01:00
|
|
|
m = pattern_re.match(pattern)
|
|
|
|
if m:
|
|
|
|
(target, first, last, rest) = m.groups()
|
|
|
|
first = int(first)
|
|
|
|
if last:
|
2014-03-07 23:34:04 +01:00
|
|
|
if first < 0:
|
|
|
|
raise errors.AnsibleError("invalid range: negative indices cannot be used as the first item in a range")
|
2014-02-05 20:43:52 +01:00
|
|
|
last = int(last)
|
|
|
|
else:
|
|
|
|
last = first
|
|
|
|
return (target, (first, last))
|
2013-01-07 18:20:09 +01:00
|
|
|
else:
|
2014-02-05 20:43:52 +01:00
|
|
|
return (pattern, None)
|
2012-08-11 17:36:59 +02:00
|
|
|
|
|
|
|
def _apply_ranges(self, pat, hosts):
|
2012-08-11 19:49:18 +02:00
|
|
|
"""
|
|
|
|
given a pattern like foo, that matches hosts, return all of hosts
|
|
|
|
given a pattern like foo[0:5], where foo matches hosts, return the first 6 hosts
|
|
|
|
"""
|
|
|
|
|
2014-03-03 22:23:27 +01:00
|
|
|
# If there are no hosts to select from, just return the
|
|
|
|
# empty set. This prevents trying to do selections on an empty set.
|
|
|
|
# issue#6258
|
|
|
|
if not hosts:
|
|
|
|
return hosts
|
|
|
|
|
2012-08-11 17:36:59 +02:00
|
|
|
(loose_pattern, limits) = self._enumeration_info(pat)
|
|
|
|
if not limits:
|
|
|
|
return hosts
|
2012-08-11 19:49:18 +02:00
|
|
|
|
|
|
|
(left, right) = limits
|
2013-10-07 21:06:15 +02:00
|
|
|
|
2012-08-11 19:49:18 +02:00
|
|
|
if left == '':
|
|
|
|
left = 0
|
|
|
|
if right == '':
|
|
|
|
right = 0
|
|
|
|
left=int(left)
|
|
|
|
right=int(right)
|
2014-03-07 23:34:04 +01:00
|
|
|
try:
|
|
|
|
if left != right:
|
|
|
|
return hosts[left:right]
|
|
|
|
else:
|
|
|
|
return [ hosts[left] ]
|
|
|
|
except IndexError:
|
|
|
|
raise errors.AnsibleError("no hosts matching the pattern '%s' were found" % pat)
|
2012-08-11 17:36:59 +02:00
|
|
|
|
2014-01-30 19:45:19 +01:00
|
|
|
def _create_implicit_localhost(self, pattern):
|
|
|
|
new_host = Host(pattern)
|
|
|
|
new_host.set_variable("ansible_python_interpreter", sys.executable)
|
|
|
|
new_host.set_variable("ansible_connection", "local")
|
|
|
|
ungrouped = self.get_group("ungrouped")
|
|
|
|
if ungrouped is None:
|
|
|
|
self.add_group(Group('ungrouped'))
|
|
|
|
ungrouped = self.get_group('ungrouped')
|
|
|
|
ungrouped.add_host(new_host)
|
|
|
|
return new_host
|
|
|
|
|
2012-08-11 17:36:59 +02:00
|
|
|
def _hosts_in_unenumerated_pattern(self, pattern):
|
|
|
|
""" Get all host names matching the pattern """
|
|
|
|
|
2014-06-27 05:40:31 +02:00
|
|
|
results = []
|
2013-10-07 21:06:15 +02:00
|
|
|
hosts = []
|
2014-01-07 02:07:31 +01:00
|
|
|
hostnames = set()
|
2014-01-06 20:19:20 +01:00
|
|
|
|
2012-08-11 17:36:59 +02:00
|
|
|
# ignore any negative checks here, this is handled elsewhere
|
2012-12-10 15:54:07 +01:00
|
|
|
pattern = pattern.replace("!","").replace("&", "")
|
2012-05-05 22:31:03 +02:00
|
|
|
|
2014-06-27 05:40:31 +02:00
|
|
|
def __append_host_to_results(host):
|
|
|
|
if host not in results and host.name not in hostnames:
|
|
|
|
hostnames.add(host.name)
|
|
|
|
results.append(host)
|
|
|
|
|
2013-10-12 04:24:37 +02:00
|
|
|
groups = self.get_groups()
|
|
|
|
for group in groups:
|
2014-06-27 05:40:31 +02:00
|
|
|
if pattern == 'all':
|
|
|
|
for host in group.get_hosts():
|
|
|
|
__append_host_to_results(host)
|
|
|
|
else:
|
|
|
|
if self._match(group.name, pattern):
|
|
|
|
for host in group.get_hosts():
|
|
|
|
__append_host_to_results(host)
|
|
|
|
else:
|
|
|
|
matching_hosts = self._match_list(group.get_hosts(), 'name', pattern)
|
|
|
|
for host in matching_hosts:
|
|
|
|
__append_host_to_results(host)
|
2014-01-21 00:26:14 +01:00
|
|
|
|
|
|
|
if pattern in ["localhost", "127.0.0.1"] and len(results) == 0:
|
2014-01-30 19:45:19 +01:00
|
|
|
new_host = self._create_implicit_localhost(pattern)
|
2014-01-21 00:26:14 +01:00
|
|
|
results.append(new_host)
|
2013-10-12 04:24:37 +02:00
|
|
|
return results
|
2012-05-05 22:31:03 +02:00
|
|
|
|
2013-10-11 22:36:48 +02:00
|
|
|
def clear_pattern_cache(self):
|
|
|
|
''' called exclusively by the add_host plugin to allow patterns to be recalculated '''
|
2013-10-12 04:24:37 +02:00
|
|
|
self._pattern_cache = {}
|
2013-10-11 22:36:48 +02:00
|
|
|
|
2012-08-15 01:48:33 +02:00
|
|
|
def groups_for_host(self, host):
|
2014-07-08 04:08:39 +02:00
|
|
|
if host in self._hosts_cache:
|
|
|
|
return self._hosts_cache[host].get_groups()
|
|
|
|
else:
|
|
|
|
return []
|
2012-08-15 01:48:33 +02:00
|
|
|
|
2012-09-08 00:07:52 +02:00
|
|
|
def groups_list(self):
|
|
|
|
if not self._groups_list:
|
|
|
|
groups = {}
|
|
|
|
for g in self.groups:
|
|
|
|
groups[g.name] = [h.name for h in g.get_hosts()]
|
|
|
|
ancestors = g.get_ancestors()
|
|
|
|
for a in ancestors:
|
2013-05-28 12:53:51 +02:00
|
|
|
if a.name not in groups:
|
|
|
|
groups[a.name] = [h.name for h in a.get_hosts()]
|
2012-09-08 00:07:52 +02:00
|
|
|
self._groups_list = groups
|
|
|
|
return self._groups_list
|
|
|
|
|
2012-05-05 22:31:03 +02:00
|
|
|
def get_groups(self):
|
|
|
|
return self.groups
|
|
|
|
|
|
|
|
def get_host(self, hostname):
|
2012-07-22 17:53:19 +02:00
|
|
|
if hostname not in self._hosts_cache:
|
|
|
|
self._hosts_cache[hostname] = self._get_host(hostname)
|
|
|
|
return self._hosts_cache[hostname]
|
|
|
|
|
|
|
|
def _get_host(self, hostname):
|
2013-05-08 20:11:40 +02:00
|
|
|
if hostname in ['localhost','127.0.0.1']:
|
|
|
|
for host in self.get_group('all').get_hosts():
|
|
|
|
if host.name in ['localhost', '127.0.0.1']:
|
2012-05-05 22:31:03 +02:00
|
|
|
return host
|
2014-01-30 19:45:19 +01:00
|
|
|
return self._create_implicit_localhost(hostname)
|
2013-05-08 20:11:40 +02:00
|
|
|
else:
|
|
|
|
for group in self.groups:
|
|
|
|
for host in group.get_hosts():
|
|
|
|
if hostname == host.name:
|
|
|
|
return host
|
2012-05-05 22:31:03 +02:00
|
|
|
return None
|
|
|
|
|
|
|
|
def get_group(self, groupname):
|
|
|
|
for group in self.groups:
|
|
|
|
if group.name == groupname:
|
|
|
|
return group
|
|
|
|
return None
|
2012-08-07 02:07:02 +02:00
|
|
|
|
2014-03-21 19:05:18 +01:00
|
|
|
def get_group_variables(self, groupname, update_cached=False, vault_password=None):
|
|
|
|
if groupname not in self._vars_per_group or update_cached:
|
2014-03-17 18:05:30 +01:00
|
|
|
self._vars_per_group[groupname] = self._get_group_variables(groupname, vault_password=vault_password)
|
2012-07-22 17:53:19 +02:00
|
|
|
return self._vars_per_group[groupname]
|
|
|
|
|
2014-03-17 18:05:30 +01:00
|
|
|
def _get_group_variables(self, groupname, vault_password=None):
|
|
|
|
|
2012-05-05 22:31:03 +02:00
|
|
|
group = self.get_group(groupname)
|
|
|
|
if group is None:
|
|
|
|
raise Exception("group not found: %s" % groupname)
|
|
|
|
|
2014-03-17 18:05:30 +01:00
|
|
|
vars = {}
|
2014-03-21 19:05:18 +01:00
|
|
|
|
|
|
|
# plugin.get_group_vars retrieves just vars for specific group
|
2014-03-17 18:05:30 +01:00
|
|
|
vars_results = [ plugin.get_group_vars(group, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_group_vars')]
|
|
|
|
for updated in vars_results:
|
|
|
|
if updated is not None:
|
2014-03-21 19:05:18 +01:00
|
|
|
vars = utils.combine_vars(vars, updated)
|
|
|
|
|
|
|
|
# get group variables set by Inventory Parsers
|
|
|
|
vars = utils.combine_vars(vars, group.get_variables())
|
|
|
|
|
|
|
|
# Read group_vars/ files
|
|
|
|
vars = utils.combine_vars(vars, self.get_group_vars(group))
|
2014-03-17 18:05:30 +01:00
|
|
|
|
|
|
|
return vars
|
2012-05-05 22:31:03 +02:00
|
|
|
|
2014-03-21 19:05:18 +01:00
|
|
|
def get_variables(self, hostname, update_cached=False, vault_password=None):
|
|
|
|
if hostname not in self._vars_per_host or update_cached:
|
2014-02-11 18:03:11 +01:00
|
|
|
self._vars_per_host[hostname] = self._get_variables(hostname, vault_password=vault_password)
|
2012-07-22 17:53:19 +02:00
|
|
|
return self._vars_per_host[hostname]
|
|
|
|
|
2014-02-11 18:03:11 +01:00
|
|
|
def _get_variables(self, hostname, vault_password=None):
|
2012-05-06 20:47:05 +02:00
|
|
|
|
2012-10-19 16:26:12 +02:00
|
|
|
host = self.get_host(hostname)
|
|
|
|
if host is None:
|
|
|
|
raise errors.AnsibleError("host not found: %s" % hostname)
|
|
|
|
|
2012-10-27 01:55:59 +02:00
|
|
|
vars = {}
|
2014-03-17 18:05:30 +01:00
|
|
|
|
2014-03-21 19:05:18 +01:00
|
|
|
# plugin.run retrieves all vars (also from groups) for host
|
|
|
|
vars_results = [ plugin.run(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'run')]
|
2014-03-17 18:05:30 +01:00
|
|
|
for updated in vars_results:
|
|
|
|
if updated is not None:
|
|
|
|
vars = utils.combine_vars(vars, updated)
|
|
|
|
|
2014-03-21 19:05:18 +01:00
|
|
|
# plugin.get_host_vars retrieves just vars for specific host
|
|
|
|
vars_results = [ plugin.get_host_vars(host, vault_password=vault_password) for plugin in self._vars_plugins if hasattr(plugin, 'get_host_vars')]
|
2013-04-20 15:59:40 +02:00
|
|
|
for updated in vars_results:
|
2012-10-29 17:11:57 +01:00
|
|
|
if updated is not None:
|
2014-03-16 21:48:03 +01:00
|
|
|
vars = utils.combine_vars(vars, updated)
|
2012-10-27 01:55:59 +02:00
|
|
|
|
2014-03-21 19:05:18 +01:00
|
|
|
# get host variables set by Inventory Parsers
|
2014-03-16 21:48:03 +01:00
|
|
|
vars = utils.combine_vars(vars, host.get_variables())
|
2014-03-17 18:05:30 +01:00
|
|
|
|
|
|
|
# still need to check InventoryParser per host vars
|
|
|
|
# which actually means InventoryScript per host,
|
|
|
|
# which is not performant
|
2013-03-04 12:37:15 +01:00
|
|
|
if self.parser is not None:
|
2013-12-16 19:55:20 +01:00
|
|
|
vars = utils.combine_vars(vars, self.parser.get_host_variables(host))
|
2014-03-17 18:05:30 +01:00
|
|
|
|
2014-03-21 19:05:18 +01:00
|
|
|
# Read host_vars/ files
|
|
|
|
vars = utils.combine_vars(vars, self.get_host_vars(host))
|
|
|
|
|
2012-10-27 01:55:59 +02:00
|
|
|
return vars
|
2012-05-05 22:31:03 +02:00
|
|
|
|
|
|
|
def add_group(self, group):
|
2014-03-10 13:06:04 +01:00
|
|
|
if group.name not in self.groups_list():
|
|
|
|
self.groups.append(group)
|
|
|
|
self._groups_list = None # invalidate internal cache
|
|
|
|
else:
|
|
|
|
raise errors.AnsibleError("group already in inventory: %s" % group.name)
|
2012-04-13 14:39:54 +02:00
|
|
|
|
|
|
|
def list_hosts(self, pattern="all"):
|
2014-01-21 00:26:14 +01:00
|
|
|
|
|
|
|
""" return a list of hostnames for a pattern """
|
|
|
|
|
|
|
|
result = [ h.name for h in self.get_hosts(pattern) ]
|
|
|
|
if len(result) == 0 and pattern in ["localhost", "127.0.0.1"]:
|
|
|
|
result = [pattern]
|
|
|
|
return result
|
2012-05-05 22:31:03 +02:00
|
|
|
|
|
|
|
def list_groups(self):
|
2013-01-23 17:43:24 +01:00
|
|
|
return sorted([ g.name for g in self.groups ], key=lambda x: x)
|
2012-04-13 14:39:54 +02:00
|
|
|
|
2012-08-18 15:52:13 +02:00
|
|
|
# TODO: remove this function
|
2012-05-08 05:16:20 +02:00
|
|
|
def get_restriction(self):
|
|
|
|
return self._restriction
|
|
|
|
|
2012-08-10 08:45:29 +02:00
|
|
|
def restrict_to(self, restriction):
|
|
|
|
"""
|
|
|
|
Restrict list operations to the hosts given in restriction. This is used
|
|
|
|
to exclude failed hosts in main playbook code, don't use this for other
|
|
|
|
reasons.
|
|
|
|
"""
|
2013-05-23 18:33:29 +02:00
|
|
|
if not isinstance(restriction, list):
|
2012-07-15 15:32:47 +02:00
|
|
|
restriction = [ restriction ]
|
|
|
|
self._restriction = restriction
|
2012-04-13 14:39:54 +02:00
|
|
|
|
2012-08-18 15:52:13 +02:00
|
|
|
def also_restrict_to(self, restriction):
|
|
|
|
"""
|
|
|
|
Works like restict_to but offers an additional restriction. Playbooks use this
|
|
|
|
to implement serial behavior.
|
|
|
|
"""
|
2013-05-23 18:33:29 +02:00
|
|
|
if not isinstance(restriction, list):
|
2012-08-18 15:52:13 +02:00
|
|
|
restriction = [ restriction ]
|
|
|
|
self._also_restriction = restriction
|
|
|
|
|
2012-08-10 08:45:29 +02:00
|
|
|
def subset(self, subset_pattern):
|
|
|
|
"""
|
|
|
|
Limits inventory results to a subset of inventory that matches a given
|
|
|
|
pattern, such as to select a given geographic of numeric slice amongst
|
|
|
|
a previous 'hosts' selection that only select roles, or vice versa.
|
|
|
|
Corresponds to --limit parameter to ansible-playbook
|
|
|
|
"""
|
|
|
|
if subset_pattern is None:
|
|
|
|
self._subset = None
|
|
|
|
else:
|
2012-10-23 03:24:25 +02:00
|
|
|
subset_pattern = subset_pattern.replace(',',':')
|
2013-04-08 18:36:01 +02:00
|
|
|
subset_pattern = subset_pattern.replace(";",":").split(":")
|
|
|
|
results = []
|
|
|
|
# allow Unix style @filename data
|
|
|
|
for x in subset_pattern:
|
2013-04-10 22:37:49 +02:00
|
|
|
if x.startswith("@"):
|
|
|
|
fd = open(x[1:])
|
|
|
|
results.extend(fd.read().split("\n"))
|
|
|
|
fd.close()
|
|
|
|
else:
|
|
|
|
results.append(x)
|
2013-04-08 18:36:01 +02:00
|
|
|
self._subset = results
|
2012-08-10 08:45:29 +02:00
|
|
|
|
2012-04-13 14:39:54 +02:00
|
|
|
def lift_restriction(self):
|
|
|
|
""" Do not restrict list operations """
|
2012-05-08 05:16:20 +02:00
|
|
|
self._restriction = None
|
2012-08-18 15:52:13 +02:00
|
|
|
|
|
|
|
def lift_also_restriction(self):
|
|
|
|
""" Clears the also restriction """
|
|
|
|
self._also_restriction = None
|
2012-07-20 15:43:45 +02:00
|
|
|
|
|
|
|
def is_file(self):
|
|
|
|
""" did inventory come from a file? """
|
|
|
|
if not isinstance(self.host_list, basestring):
|
|
|
|
return False
|
|
|
|
return os.path.exists(self.host_list)
|
|
|
|
|
|
|
|
def basedir(self):
|
|
|
|
""" if inventory came from a file, what's the directory? """
|
|
|
|
if not self.is_file():
|
|
|
|
return None
|
2013-06-02 22:01:09 +02:00
|
|
|
dname = os.path.dirname(self.host_list)
|
2013-08-11 04:50:47 +02:00
|
|
|
if dname is None or dname == '' or dname == '.':
|
2013-06-02 22:01:09 +02:00
|
|
|
cwd = os.getcwd()
|
2013-10-31 01:48:53 +01:00
|
|
|
return os.path.abspath(cwd)
|
|
|
|
return os.path.abspath(dname)
|
2013-06-01 16:38:16 +02:00
|
|
|
|
2013-08-10 12:59:17 +02:00
|
|
|
def src(self):
|
|
|
|
""" if inventory came from a file, what's the directory and file name? """
|
|
|
|
if not self.is_file():
|
|
|
|
return None
|
|
|
|
return self.host_list
|
|
|
|
|
2013-06-01 16:38:16 +02:00
|
|
|
def playbook_basedir(self):
|
|
|
|
""" returns the directory of the current playbook """
|
|
|
|
return self._playbook_basedir
|
|
|
|
|
|
|
|
def set_playbook_basedir(self, dir):
|
|
|
|
"""
|
2014-03-21 19:05:18 +01:00
|
|
|
sets the base directory of the playbook so inventory can use it as a
|
|
|
|
basedir for host_ and group_vars, and other things.
|
|
|
|
"""
|
|
|
|
# Only update things if dir is a different playbook basedir
|
|
|
|
if dir != self._playbook_basedir:
|
|
|
|
self._playbook_basedir = dir
|
|
|
|
# get group vars from group_vars/ files
|
|
|
|
for group in self.groups:
|
|
|
|
group.vars = utils.combine_vars(group.vars, self.get_group_vars(group, new_pb_basedir=True))
|
|
|
|
# get host vars from host_vars/ files
|
|
|
|
for host in self.get_hosts():
|
|
|
|
host.vars = utils.combine_vars(host.vars, self.get_host_vars(host, new_pb_basedir=True))
|
|
|
|
|
|
|
|
def get_host_vars(self, host, new_pb_basedir=False):
|
|
|
|
""" Read host_vars/ files """
|
2014-07-14 15:12:53 +02:00
|
|
|
return self._get_hostgroup_vars(host=host, group=None, new_pb_basedir=new_pb_basedir)
|
2013-06-01 16:38:16 +02:00
|
|
|
|
2014-03-21 19:05:18 +01:00
|
|
|
def get_group_vars(self, group, new_pb_basedir=False):
|
|
|
|
""" Read group_vars/ files """
|
2014-07-14 15:12:53 +02:00
|
|
|
return self._get_hostgroup_vars(host=None, group=group, new_pb_basedir=new_pb_basedir)
|
2014-03-21 19:05:18 +01:00
|
|
|
|
|
|
|
def _get_hostgroup_vars(self, host=None, group=None, new_pb_basedir=False):
|
|
|
|
"""
|
|
|
|
Loads variables from group_vars/<groupname> and host_vars/<hostname> in directories parallel
|
|
|
|
to the inventory base directory or in the same directory as the playbook. Variables in the playbook
|
|
|
|
dir will win over the inventory dir if files are in both.
|
|
|
|
"""
|
|
|
|
|
|
|
|
results = {}
|
|
|
|
scan_pass = 0
|
|
|
|
_basedir = self.basedir()
|
2013-06-01 16:38:16 +02:00
|
|
|
|
2014-03-21 19:05:18 +01:00
|
|
|
# look in both the inventory base directory and the playbook base directory
|
|
|
|
# unless we do an update for a new playbook base dir
|
|
|
|
if not new_pb_basedir:
|
|
|
|
basedirs = [_basedir, self._playbook_basedir]
|
|
|
|
else:
|
|
|
|
basedirs = [self._playbook_basedir]
|
2013-06-01 16:38:16 +02:00
|
|
|
|
2014-03-21 19:05:18 +01:00
|
|
|
for basedir in basedirs:
|
|
|
|
|
|
|
|
# this can happen from particular API usages, particularly if not run
|
|
|
|
# from /usr/bin/ansible-playbook
|
|
|
|
if basedir is None:
|
|
|
|
continue
|
|
|
|
|
|
|
|
scan_pass = scan_pass + 1
|
|
|
|
|
|
|
|
# it's not an eror if the directory does not exist, keep moving
|
|
|
|
if not os.path.exists(basedir):
|
|
|
|
continue
|
|
|
|
|
|
|
|
# save work of second scan if the directories are the same
|
|
|
|
if _basedir == self._playbook_basedir and scan_pass != 1:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if group and host is None:
|
|
|
|
# load vars in dir/group_vars/name_of_group
|
|
|
|
base_path = os.path.join(basedir, "group_vars/%s" % group.name)
|
|
|
|
results = utils.load_vars(base_path, results, vault_password=self._vault_password)
|
|
|
|
|
|
|
|
elif host and group is None:
|
|
|
|
# same for hostvars in dir/host_vars/name_of_host
|
|
|
|
base_path = os.path.join(basedir, "host_vars/%s" % host.name)
|
|
|
|
results = utils.load_vars(base_path, results, vault_password=self._vault_password)
|
|
|
|
|
|
|
|
# all done, results is a dictionary of variables for this particular host.
|
|
|
|
return results
|
2013-06-01 16:38:16 +02:00
|
|
|
|