2017-05-23 23:16:49 +02:00
|
|
|
# (c) 2017, Red Hat, inc
|
2012-07-24 21:43:35 +02:00
|
|
|
#
|
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
2017-05-23 23:16:49 +02:00
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
2012-07-24 21:43:35 +02:00
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
2017-05-23 23:16:49 +02:00
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
2012-07-24 21:43:35 +02:00
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
2018-04-23 21:22:36 +02:00
|
|
|
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
|
2012-07-24 21:43:35 +02:00
|
|
|
|
2017-05-23 23:16:49 +02:00
|
|
|
# Make coding more python3-ish
|
2015-05-04 04:47:26 +02:00
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
|
|
|
|
2017-05-23 23:16:49 +02:00
|
|
|
import hashlib
|
2017-08-26 04:00:07 +02:00
|
|
|
import os
|
2012-12-11 03:48:38 +01:00
|
|
|
import string
|
2012-07-24 21:43:35 +02:00
|
|
|
|
2018-03-29 23:10:32 +02:00
|
|
|
from ansible.errors import AnsibleError, AnsibleParserError
|
2019-03-06 17:49:40 +01:00
|
|
|
from ansible.inventory.group import to_safe_group_name as original_safe
|
Add toml inventory plugin (#41593)
* First pass at a toml inventory
* Make EXAMPLES yaml
* Remove unnecessary comment
* Small formatting changes
* Add ansible-inventory option to list as TOML
* TOML inventory improvements, to allow a more simple inventory, specifically related to children
* changelog
* Simplify logic
* Dedupe _expand_hostpattern, making it available to all inventory plugins
* Don't make the TOML inventory dependent on the YAML inventory
* Quote IP address values
* Add more TOML examples
* Further cleanups
* Enable the toml inventory to run by default
* Create toml specific dumper
* 2.8
* Clean up imports
* No toml pygments lexer
* Don't raise an exception early when toml isn't present, and move toml to the end, since it requires an external dep
* Require toml>=0.10.0
* Further clean up of empty data
* Don't require toml>=0.10.0, but prefer it, add code for fallback in older versions
* Ensure we actually pass an encoder to toml.dumps
* Simplify recursive data converter
* Appease tests, since we haven't limited controller testing to 2.7+
* Update docstring for convert_yaml_objects_to_native
* remove outdated catching of AttributeError
* We don't need to catch ImportError when import ansible.plugins.inventory.toml
* Add note about what self.dump_funcs.update is doing
* Address some things
* A little extra comment
* Fix toml availability check
* Don't create an intermediate list
* Require toml file extension
* Add metadata
* Remove TOML docs from intro_inventory to prevent people from getting the wrong idea
* It's in defaults, remove note
* core supported, indicate very clearly that this is preview status
2018-11-06 17:02:36 +01:00
|
|
|
from ansible.parsing.utils.addresses import parse_address
|
2017-11-16 19:49:57 +01:00
|
|
|
from ansible.plugins import AnsiblePlugin
|
2019-03-06 19:12:35 +01:00
|
|
|
from ansible.plugins.cache import CachePluginAdjudicator as CacheObject
|
2017-08-26 04:00:07 +02:00
|
|
|
from ansible.module_utils._text import to_bytes, to_native
|
2018-10-05 10:22:25 +02:00
|
|
|
from ansible.module_utils.common._collections_compat import Mapping
|
2017-08-26 04:00:07 +02:00
|
|
|
from ansible.module_utils.parsing.convert_bool import boolean
|
|
|
|
from ansible.module_utils.six import string_types
|
2017-05-23 23:16:49 +02:00
|
|
|
from ansible.template import Templar
|
2018-11-21 00:06:51 +01:00
|
|
|
from ansible.utils.display import Display
|
2019-03-11 17:43:31 +01:00
|
|
|
from ansible.utils.vars import combine_vars
|
2017-05-23 23:16:49 +02:00
|
|
|
|
2018-11-21 00:06:51 +01:00
|
|
|
display = Display()
|
2017-05-23 23:16:49 +02:00
|
|
|
|
|
|
|
|
2017-11-16 19:49:57 +01:00
|
|
|
# Helper methods
|
|
|
|
def to_safe_group_name(name):
|
2019-03-06 17:49:40 +01:00
|
|
|
# placeholder for backwards compat
|
|
|
|
return original_safe(name, force=True, silent=True)
|
2017-11-16 19:49:57 +01:00
|
|
|
|
|
|
|
|
|
|
|
def detect_range(line=None):
|
|
|
|
'''
|
|
|
|
A helper function that checks a given host line to see if it contains
|
|
|
|
a range pattern described in the docstring above.
|
|
|
|
|
|
|
|
Returns True if the given line contains a pattern, else False.
|
|
|
|
'''
|
|
|
|
return '[' in line
|
|
|
|
|
|
|
|
|
|
|
|
def expand_hostname_range(line=None):
|
|
|
|
'''
|
|
|
|
A helper function that expands a given line that contains a pattern
|
|
|
|
specified in top docstring, and returns a list that consists of the
|
|
|
|
expanded version.
|
|
|
|
|
|
|
|
The '[' and ']' characters are used to maintain the pseudo-code
|
|
|
|
appearance. They are replaced in this function with '|' to ease
|
|
|
|
string splitting.
|
|
|
|
|
2018-04-23 21:22:36 +02:00
|
|
|
References: https://docs.ansible.com/ansible/latest/user_guide/intro_inventory.html#hosts-and-groups
|
2017-11-16 19:49:57 +01:00
|
|
|
'''
|
|
|
|
all_hosts = []
|
|
|
|
if line:
|
|
|
|
# A hostname such as db[1:6]-node is considered to consists
|
|
|
|
# three parts:
|
|
|
|
# head: 'db'
|
|
|
|
# nrange: [1:6]; range() is a built-in. Can't use the name
|
|
|
|
# tail: '-node'
|
|
|
|
|
|
|
|
# Add support for multiple ranges in a host so:
|
|
|
|
# db[01:10:3]node-[01:10]
|
|
|
|
# - to do this we split off at the first [...] set, getting the list
|
|
|
|
# of hosts and then repeat until none left.
|
|
|
|
# - also add an optional third parameter which contains the step. (Default: 1)
|
|
|
|
# so range can be [01:10:2] -> 01 03 05 07 09
|
|
|
|
|
|
|
|
(head, nrange, tail) = line.replace('[', '|', 1).replace(']', '|', 1).split('|')
|
|
|
|
bounds = nrange.split(":")
|
|
|
|
if len(bounds) != 2 and len(bounds) != 3:
|
|
|
|
raise AnsibleError("host range must be begin:end or begin:end:step")
|
|
|
|
beg = bounds[0]
|
|
|
|
end = bounds[1]
|
|
|
|
if len(bounds) == 2:
|
|
|
|
step = 1
|
|
|
|
else:
|
|
|
|
step = bounds[2]
|
|
|
|
if not beg:
|
|
|
|
beg = "0"
|
|
|
|
if not end:
|
|
|
|
raise AnsibleError("host range must specify end value")
|
|
|
|
if beg[0] == '0' and len(beg) > 1:
|
|
|
|
rlen = len(beg) # range length formatting hint
|
|
|
|
if rlen != len(end):
|
|
|
|
raise AnsibleError("host range must specify equal-length begin and end formats")
|
|
|
|
|
|
|
|
def fill(x):
|
|
|
|
return str(x).zfill(rlen) # range sequence
|
|
|
|
|
|
|
|
else:
|
|
|
|
fill = str
|
|
|
|
|
|
|
|
try:
|
|
|
|
i_beg = string.ascii_letters.index(beg)
|
|
|
|
i_end = string.ascii_letters.index(end)
|
|
|
|
if i_beg > i_end:
|
|
|
|
raise AnsibleError("host range must have begin <= end")
|
|
|
|
seq = list(string.ascii_letters[i_beg:i_end + 1:int(step)])
|
|
|
|
except ValueError: # not an alpha range
|
|
|
|
seq = range(int(beg), int(end) + 1, int(step))
|
|
|
|
|
|
|
|
for rseq in seq:
|
|
|
|
hname = ''.join((head, fill(rseq), tail))
|
|
|
|
|
|
|
|
if detect_range(hname):
|
|
|
|
all_hosts.extend(expand_hostname_range(hname))
|
|
|
|
else:
|
|
|
|
all_hosts.append(hname)
|
|
|
|
|
|
|
|
return all_hosts
|
|
|
|
|
|
|
|
|
2019-03-06 19:12:35 +01:00
|
|
|
def get_cache_plugin(plugin_name, **kwargs):
|
|
|
|
try:
|
|
|
|
cache = CacheObject(plugin_name, **kwargs)
|
|
|
|
except AnsibleError as e:
|
|
|
|
if 'fact_caching_connection' in to_native(e):
|
|
|
|
raise AnsibleError("error, '%s' inventory cache plugin requires the one of the following to be set "
|
|
|
|
"to a writeable directory path:\nansible.cfg:\n[default]: fact_caching_connection,\n"
|
|
|
|
"[inventory]: cache_connection;\nEnvironment:\nANSIBLE_INVENTORY_CACHE_CONNECTION,\n"
|
|
|
|
"ANSIBLE_CACHE_PLUGIN_CONNECTION." % plugin_name)
|
|
|
|
else:
|
|
|
|
raise e
|
|
|
|
|
|
|
|
if plugin_name != 'memory' and kwargs and not getattr(cache._plugin, '_options', None):
|
|
|
|
raise AnsibleError('Unable to use cache plugin {0} for inventory. Cache options were provided but may not reconcile '
|
|
|
|
'correctly unless set via set_options. Refer to the porting guide if the plugin derives user settings '
|
|
|
|
'from ansible.constants.'.format(plugin_name))
|
|
|
|
return cache
|
|
|
|
|
|
|
|
|
2017-11-16 19:49:57 +01:00
|
|
|
class BaseInventoryPlugin(AnsiblePlugin):
|
2017-05-23 23:16:49 +02:00
|
|
|
""" Parses an Inventory Source"""
|
|
|
|
|
|
|
|
TYPE = 'generator'
|
2019-03-12 17:03:20 +01:00
|
|
|
_sanitize_group_name = staticmethod(to_safe_group_name)
|
2017-05-23 23:16:49 +02:00
|
|
|
|
2017-10-19 20:23:22 +02:00
|
|
|
def __init__(self):
|
2017-05-23 23:16:49 +02:00
|
|
|
|
2017-11-16 19:49:57 +01:00
|
|
|
super(BaseInventoryPlugin, self).__init__()
|
|
|
|
|
|
|
|
self._options = {}
|
2017-05-23 23:16:49 +02:00
|
|
|
self.inventory = None
|
|
|
|
self.display = display
|
|
|
|
|
|
|
|
def parse(self, inventory, loader, path, cache=True):
|
2018-07-05 23:30:46 +02:00
|
|
|
''' Populates inventory from the given data. Raises an error on any parse failure
|
|
|
|
:arg inventory: a copy of the previously accumulated inventory data,
|
|
|
|
to be updated with any new data this plugin provides.
|
|
|
|
The inventory can be empty if no other source/plugin ran successfully.
|
|
|
|
:arg loader: a reference to the DataLoader, which can read in YAML and JSON files,
|
|
|
|
it also has Vault support to automatically decrypt files.
|
|
|
|
:arg path: the string that represents the 'inventory source',
|
|
|
|
normally a path to a configuration file for this inventory,
|
|
|
|
but it can also be a raw string for this plugin to consume
|
|
|
|
:arg cache: a boolean that indicates if the plugin should use the cache or not
|
|
|
|
you can ignore if this plugin does not implement caching.
|
|
|
|
'''
|
2017-05-23 23:16:49 +02:00
|
|
|
|
|
|
|
self.loader = loader
|
|
|
|
self.inventory = inventory
|
2017-08-21 22:06:15 +02:00
|
|
|
self.templar = Templar(loader=loader)
|
2017-05-23 23:16:49 +02:00
|
|
|
|
|
|
|
def verify_file(self, path):
|
2018-07-27 12:08:19 +02:00
|
|
|
''' Verify if file is usable by this plugin, base does minimal accessibility check
|
2018-07-05 23:30:46 +02:00
|
|
|
:arg path: a string that was passed as an inventory source,
|
|
|
|
it normally is a path to a config file, but this is not a requirement,
|
|
|
|
it can also be parsed itself as the inventory data to process.
|
|
|
|
So only call this base class if you expect it to be a file.
|
|
|
|
'''
|
2017-05-23 23:16:49 +02:00
|
|
|
|
2018-10-12 17:27:29 +02:00
|
|
|
valid = False
|
2017-09-21 02:01:07 +02:00
|
|
|
b_path = to_bytes(path, errors='surrogate_or_strict')
|
2018-10-12 17:27:29 +02:00
|
|
|
if (os.path.exists(b_path) and os.access(b_path, os.R_OK)):
|
|
|
|
valid = True
|
|
|
|
else:
|
|
|
|
self.display.vvv('Skipping due to inventory source not existing or not being readable by the current user')
|
|
|
|
return valid
|
2017-05-23 23:16:49 +02:00
|
|
|
|
2017-11-16 19:49:57 +01:00
|
|
|
def _populate_host_vars(self, hosts, variables, group=None, port=None):
|
2018-03-29 23:10:32 +02:00
|
|
|
if not isinstance(variables, Mapping):
|
2017-11-16 19:49:57 +01:00
|
|
|
raise AnsibleParserError("Invalid data from file, expected dictionary and got:\n\n%s" % to_native(variables))
|
|
|
|
|
|
|
|
for host in hosts:
|
|
|
|
self.inventory.add_host(host, group=group, port=port)
|
|
|
|
for k in variables:
|
|
|
|
self.inventory.set_variable(host, k, variables[k])
|
|
|
|
|
|
|
|
def _read_config_data(self, path):
|
2018-07-05 23:30:46 +02:00
|
|
|
''' validate config and set options as appropriate
|
|
|
|
:arg path: path to common yaml format config file for this plugin
|
|
|
|
'''
|
2017-11-16 19:49:57 +01:00
|
|
|
|
|
|
|
config = {}
|
|
|
|
try:
|
2018-04-11 04:15:52 +02:00
|
|
|
# avoid loader cache so meta: refresh_inventory can pick up config changes
|
|
|
|
# if we read more than once, fs cache should be good enough
|
|
|
|
config = self.loader.load_from_file(path, cache=False)
|
2017-11-16 19:49:57 +01:00
|
|
|
except Exception as e:
|
|
|
|
raise AnsibleParserError(to_native(e))
|
|
|
|
|
|
|
|
if not config:
|
|
|
|
# no data
|
|
|
|
raise AnsibleParserError("%s is empty" % (to_native(path)))
|
|
|
|
elif config.get('plugin') != self.NAME:
|
|
|
|
# this is not my config file
|
|
|
|
raise AnsibleParserError("Incorrect plugin name in file: %s" % config.get('plugin', 'none found'))
|
2018-03-29 23:10:32 +02:00
|
|
|
elif not isinstance(config, Mapping):
|
2017-11-16 19:49:57 +01:00
|
|
|
# configs are dictionaries
|
|
|
|
raise AnsibleParserError('inventory source has invalid structure, it should be a dictionary, got: %s' % type(config))
|
|
|
|
|
|
|
|
self.set_options(direct=config)
|
2019-03-06 19:12:35 +01:00
|
|
|
if 'cache' in self._options and self.get_option('cache'):
|
|
|
|
cache_option_keys = [('_uri', 'cache_connection'), ('_timeout', 'cache_timeout'), ('_prefix', 'cache_prefix')]
|
|
|
|
cache_options = dict((opt[0], self.get_option(opt[1])) for opt in cache_option_keys if self.get_option(opt[1]))
|
|
|
|
self._cache = get_cache_plugin(self.get_option('cache_plugin'), **cache_options)
|
2017-11-16 19:49:57 +01:00
|
|
|
|
|
|
|
return config
|
|
|
|
|
|
|
|
def _consume_options(self, data):
|
2018-07-05 23:30:46 +02:00
|
|
|
''' update existing options from alternate configuration sources not normally used by Ansible.
|
|
|
|
Many API libraries already have existing configuration sources, this allows plugin author to leverage them.
|
|
|
|
:arg data: key/value pairs that correspond to configuration options for this plugin
|
|
|
|
'''
|
2017-11-16 19:49:57 +01:00
|
|
|
|
|
|
|
for k in self._options:
|
|
|
|
if k in data:
|
|
|
|
self._options[k] = data.pop(k)
|
|
|
|
|
Add toml inventory plugin (#41593)
* First pass at a toml inventory
* Make EXAMPLES yaml
* Remove unnecessary comment
* Small formatting changes
* Add ansible-inventory option to list as TOML
* TOML inventory improvements, to allow a more simple inventory, specifically related to children
* changelog
* Simplify logic
* Dedupe _expand_hostpattern, making it available to all inventory plugins
* Don't make the TOML inventory dependent on the YAML inventory
* Quote IP address values
* Add more TOML examples
* Further cleanups
* Enable the toml inventory to run by default
* Create toml specific dumper
* 2.8
* Clean up imports
* No toml pygments lexer
* Don't raise an exception early when toml isn't present, and move toml to the end, since it requires an external dep
* Require toml>=0.10.0
* Further clean up of empty data
* Don't require toml>=0.10.0, but prefer it, add code for fallback in older versions
* Ensure we actually pass an encoder to toml.dumps
* Simplify recursive data converter
* Appease tests, since we haven't limited controller testing to 2.7+
* Update docstring for convert_yaml_objects_to_native
* remove outdated catching of AttributeError
* We don't need to catch ImportError when import ansible.plugins.inventory.toml
* Add note about what self.dump_funcs.update is doing
* Address some things
* A little extra comment
* Fix toml availability check
* Don't create an intermediate list
* Require toml file extension
* Add metadata
* Remove TOML docs from intro_inventory to prevent people from getting the wrong idea
* It's in defaults, remove note
* core supported, indicate very clearly that this is preview status
2018-11-06 17:02:36 +01:00
|
|
|
def _expand_hostpattern(self, hostpattern):
|
|
|
|
'''
|
|
|
|
Takes a single host pattern and returns a list of hostnames and an
|
|
|
|
optional port number that applies to all of them.
|
|
|
|
'''
|
|
|
|
# Can the given hostpattern be parsed as a host with an optional port
|
|
|
|
# specification?
|
|
|
|
|
|
|
|
try:
|
|
|
|
(pattern, port) = parse_address(hostpattern, allow_ranges=True)
|
|
|
|
except Exception:
|
|
|
|
# not a recognizable host pattern
|
|
|
|
pattern = hostpattern
|
|
|
|
port = None
|
|
|
|
|
|
|
|
# Once we have separated the pattern, we expand it into list of one or
|
|
|
|
# more hostnames, depending on whether it contains any [x:y] ranges.
|
|
|
|
|
|
|
|
if detect_range(pattern):
|
|
|
|
hostnames = expand_hostname_range(pattern)
|
|
|
|
else:
|
|
|
|
hostnames = [pattern]
|
|
|
|
|
|
|
|
return (hostnames, port)
|
|
|
|
|
2017-11-16 19:49:57 +01:00
|
|
|
|
|
|
|
class BaseFileInventoryPlugin(BaseInventoryPlugin):
|
|
|
|
""" Parses a File based Inventory Source"""
|
|
|
|
|
|
|
|
TYPE = 'storage'
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
|
|
|
|
super(BaseFileInventoryPlugin, self).__init__()
|
|
|
|
|
|
|
|
|
2019-03-06 19:12:35 +01:00
|
|
|
class DeprecatedCache(object):
|
|
|
|
def __init__(self, real_cacheable):
|
|
|
|
self.real_cacheable = real_cacheable
|
|
|
|
|
|
|
|
def get(self, key):
|
|
|
|
display.deprecated('InventoryModule should utilize self._cache as a dict instead of self.cache. '
|
|
|
|
'When expecting a KeyError, use self._cache[key] instead of using self.cache.get(key). '
|
|
|
|
'self._cache is a dictionary and will return a default value instead of raising a KeyError '
|
|
|
|
'when the key does not exist', version='2.12')
|
|
|
|
return self.real_cacheable._cache[key]
|
|
|
|
|
|
|
|
def set(self, key, value):
|
|
|
|
display.deprecated('InventoryModule should utilize self._cache as a dict instead of self.cache. '
|
|
|
|
'To set the self._cache dictionary, use self._cache[key] = value instead of self.cache.set(key, value). '
|
|
|
|
'To force update the underlying cache plugin with the contents of self._cache before parse() is complete, '
|
|
|
|
'call self.set_cache_plugin and it will use the self._cache dictionary to update the cache plugin', version='2.12')
|
|
|
|
self.real_cacheable._cache[key] = value
|
|
|
|
self.real_cacheable.set_cache_plugin()
|
|
|
|
|
|
|
|
def __getattr__(self, name):
|
|
|
|
display.deprecated('InventoryModule should utilize self._cache instead of self.cache', version='2.12')
|
|
|
|
return self.real_cacheable._cache.__getattribute__(name)
|
|
|
|
|
|
|
|
|
2017-11-16 19:49:57 +01:00
|
|
|
class Cacheable(object):
|
|
|
|
|
2019-03-06 19:12:35 +01:00
|
|
|
_cache = CacheObject()
|
|
|
|
|
|
|
|
@property
|
|
|
|
def cache(self):
|
|
|
|
return DeprecatedCache(self)
|
|
|
|
|
|
|
|
def load_cache_plugin(self):
|
|
|
|
plugin_name = self.get_option('cache_plugin')
|
|
|
|
cache_option_keys = [('_uri', 'cache_connection'), ('_timeout', 'cache_timeout'), ('_prefix', 'cache_prefix')]
|
|
|
|
cache_options = dict((opt[0], self.get_option(opt[1])) for opt in cache_option_keys if self.get_option(opt[1]))
|
|
|
|
self._cache = get_cache_plugin(plugin_name, **cache_options)
|
2017-11-16 19:49:57 +01:00
|
|
|
|
2018-01-23 01:33:14 +01:00
|
|
|
def get_cache_key(self, path):
|
2018-10-31 20:34:05 +01:00
|
|
|
return "{0}_{1}".format(self.NAME, self._get_cache_prefix(path))
|
2018-01-23 01:33:14 +01:00
|
|
|
|
2017-11-16 19:49:57 +01:00
|
|
|
def _get_cache_prefix(self, path):
|
2017-05-23 23:16:49 +02:00
|
|
|
''' create predictable unique prefix for plugin/inventory '''
|
|
|
|
|
|
|
|
m = hashlib.sha1()
|
2017-09-21 02:01:07 +02:00
|
|
|
m.update(to_bytes(self.NAME, errors='surrogate_or_strict'))
|
2017-05-23 23:16:49 +02:00
|
|
|
d1 = m.hexdigest()
|
2012-07-25 02:16:35 +02:00
|
|
|
|
2017-05-23 23:16:49 +02:00
|
|
|
n = hashlib.sha1()
|
2017-09-21 02:01:07 +02:00
|
|
|
n.update(to_bytes(path, errors='surrogate_or_strict'))
|
2017-05-23 23:16:49 +02:00
|
|
|
d2 = n.hexdigest()
|
|
|
|
|
|
|
|
return 's_'.join([d1[:5], d2[:5]])
|
|
|
|
|
|
|
|
def clear_cache(self):
|
2019-03-06 19:12:35 +01:00
|
|
|
self._cache.flush()
|
|
|
|
|
|
|
|
def update_cache_if_changed(self):
|
|
|
|
self._cache.update_cache_if_changed()
|
|
|
|
|
|
|
|
def set_cache_plugin(self):
|
|
|
|
self._cache.set_cache()
|
2017-05-23 23:16:49 +02:00
|
|
|
|
2017-09-30 00:51:27 +02:00
|
|
|
|
2017-11-16 19:49:57 +01:00
|
|
|
class Constructable(object):
|
2017-05-23 23:16:49 +02:00
|
|
|
|
|
|
|
def _compose(self, template, variables):
|
2018-07-27 12:08:19 +02:00
|
|
|
''' helper method for plugins to compose variables for Ansible based on jinja2 expression and inventory vars'''
|
2017-08-21 22:06:15 +02:00
|
|
|
t = self.templar
|
|
|
|
t.set_available_variables(variables)
|
2018-03-29 23:10:32 +02:00
|
|
|
return t.template('%s%s%s' % (t.environment.variable_start_string, template, t.environment.variable_end_string), disable_lookups=True)
|
2017-05-23 23:16:49 +02:00
|
|
|
|
2017-08-26 04:00:07 +02:00
|
|
|
def _set_composite_vars(self, compose, variables, host, strict=False):
|
2017-08-21 22:06:15 +02:00
|
|
|
''' loops over compose entries to create vars for hosts '''
|
|
|
|
if compose and isinstance(compose, dict):
|
|
|
|
for varname in compose:
|
2017-08-26 04:00:07 +02:00
|
|
|
try:
|
|
|
|
composite = self._compose(compose[varname], variables)
|
|
|
|
except Exception as e:
|
|
|
|
if strict:
|
2018-08-31 10:33:23 +02:00
|
|
|
raise AnsibleError("Could not set %s for host %s: %s" % (varname, host, to_native(e)))
|
2017-08-26 04:00:07 +02:00
|
|
|
continue
|
2017-08-21 22:06:15 +02:00
|
|
|
self.inventory.set_variable(host, varname, composite)
|
|
|
|
|
2017-08-26 04:00:07 +02:00
|
|
|
def _add_host_to_composed_groups(self, groups, variables, host, strict=False):
|
2018-07-27 12:08:19 +02:00
|
|
|
''' helper to create complex groups for plugins based on jinja2 conditionals, hosts that meet the conditional are added to group'''
|
2017-08-21 22:06:15 +02:00
|
|
|
# process each 'group entry'
|
|
|
|
if groups and isinstance(groups, dict):
|
2019-03-11 17:43:31 +01:00
|
|
|
variables = combine_vars(variables, self.inventory.get_host(host).get_vars())
|
2017-08-21 22:06:15 +02:00
|
|
|
self.templar.set_available_variables(variables)
|
|
|
|
for group_name in groups:
|
|
|
|
conditional = "{%% if %s %%} True {%% else %%} False {%% endif %%}" % groups[group_name]
|
2019-03-12 17:03:20 +01:00
|
|
|
group_name = self._sanitize_group_name(group_name)
|
2017-08-26 04:00:07 +02:00
|
|
|
try:
|
|
|
|
result = boolean(self.templar.template(conditional))
|
|
|
|
except Exception as e:
|
|
|
|
if strict:
|
2018-03-29 23:10:32 +02:00
|
|
|
raise AnsibleParserError("Could not add host %s to group %s: %s" % (host, group_name, to_native(e)))
|
2017-08-26 04:00:07 +02:00
|
|
|
continue
|
2018-03-29 23:10:32 +02:00
|
|
|
|
2017-08-26 04:00:07 +02:00
|
|
|
if result:
|
2019-03-12 17:03:20 +01:00
|
|
|
# ensure group exists, use sanitized name
|
2019-03-06 17:49:40 +01:00
|
|
|
group_name = self.inventory.add_group(group_name)
|
2017-08-21 22:06:15 +02:00
|
|
|
# add host to group
|
|
|
|
self.inventory.add_child(group_name, host)
|
|
|
|
|
2017-08-26 04:00:07 +02:00
|
|
|
def _add_host_to_keyed_groups(self, keys, variables, host, strict=False):
|
|
|
|
''' helper to create groups for plugins based on variable values and add the corresponding hosts to it'''
|
|
|
|
if keys and isinstance(keys, list):
|
|
|
|
for keyed in keys:
|
|
|
|
if keyed and isinstance(keyed, dict):
|
2018-03-29 23:10:32 +02:00
|
|
|
|
2019-03-11 17:43:31 +01:00
|
|
|
variables = combine_vars(variables, self.inventory.get_host(host).get_vars())
|
2018-03-29 23:10:32 +02:00
|
|
|
try:
|
|
|
|
key = self._compose(keyed.get('key'), variables)
|
|
|
|
except Exception as e:
|
|
|
|
if strict:
|
2018-12-07 16:18:25 +01:00
|
|
|
raise AnsibleParserError("Could not generate group for host %s from %s entry: %s" % (host, keyed.get('key'), to_native(e)))
|
2018-03-29 23:10:32 +02:00
|
|
|
continue
|
|
|
|
|
|
|
|
if key:
|
|
|
|
prefix = keyed.get('prefix', '')
|
|
|
|
sep = keyed.get('separator', '_')
|
2019-03-05 19:34:34 +01:00
|
|
|
raw_parent_name = keyed.get('parent_group', None)
|
2019-03-14 19:22:18 +01:00
|
|
|
if raw_parent_name:
|
|
|
|
try:
|
|
|
|
raw_parent_name = self.templar.template(raw_parent_name)
|
|
|
|
except AnsibleError as e:
|
|
|
|
if strict:
|
|
|
|
raise AnsibleParserError("Could not generate parent group %s for group %s: %s" % (raw_parent_name, key, to_native(e)))
|
|
|
|
continue
|
2018-03-29 23:10:32 +02:00
|
|
|
|
2019-03-05 19:34:34 +01:00
|
|
|
new_raw_group_names = []
|
2018-03-29 23:10:32 +02:00
|
|
|
if isinstance(key, string_types):
|
2019-03-05 19:34:34 +01:00
|
|
|
new_raw_group_names.append(key)
|
2018-03-29 23:10:32 +02:00
|
|
|
elif isinstance(key, list):
|
|
|
|
for name in key:
|
2019-03-05 19:34:34 +01:00
|
|
|
new_raw_group_names.append(name)
|
2018-03-29 23:10:32 +02:00
|
|
|
elif isinstance(key, Mapping):
|
|
|
|
for (gname, gval) in key.items():
|
|
|
|
name = '%s%s%s' % (gname, sep, gval)
|
2019-03-05 19:34:34 +01:00
|
|
|
new_raw_group_names.append(name)
|
2017-10-06 06:10:41 +02:00
|
|
|
else:
|
2018-03-29 23:10:32 +02:00
|
|
|
raise AnsibleParserError("Invalid group name format, expected a string or a list of them or dictionary, got: %s" % type(key))
|
2019-03-05 19:34:34 +01:00
|
|
|
|
|
|
|
for bare_name in new_raw_group_names:
|
2019-03-12 17:03:20 +01:00
|
|
|
gname = self._sanitize_group_name('%s%s%s' % (prefix, sep, bare_name))
|
2019-03-14 16:07:57 +01:00
|
|
|
result_gname = self.inventory.add_group(gname)
|
|
|
|
self.inventory.add_child(result_gname, host)
|
2019-03-05 19:34:34 +01:00
|
|
|
|
|
|
|
if raw_parent_name:
|
2019-03-12 17:03:20 +01:00
|
|
|
parent_name = self._sanitize_group_name(raw_parent_name)
|
2019-03-05 19:34:34 +01:00
|
|
|
self.inventory.add_group(parent_name)
|
2019-03-14 16:07:57 +01:00
|
|
|
self.inventory.add_child(parent_name, result_gname)
|
2019-03-05 19:34:34 +01:00
|
|
|
|
2017-08-26 04:00:07 +02:00
|
|
|
else:
|
2019-03-14 17:27:56 +01:00
|
|
|
# exclude case of empty list and dictionary, because these are valid constructions
|
|
|
|
# simply no groups need to be constructed, but are still falsy
|
|
|
|
if strict and key not in ([], {}):
|
|
|
|
raise AnsibleParserError("No key or key resulted empty for %s in host %s, invalid entry" % (keyed.get('key'), host))
|
2017-08-26 04:00:07 +02:00
|
|
|
else:
|
2018-03-29 23:10:32 +02:00
|
|
|
raise AnsibleParserError("Invalid keyed group entry, it must be a dictionary: %s " % keyed)
|