2014-10-02 19:07:05 +02:00
|
|
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
|
|
|
|
#
|
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
2014-10-16 01:22:54 +02:00
|
|
|
# Make coding more python3-ish
|
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
2015-01-23 04:45:25 +01:00
|
|
|
|
2015-01-28 22:03:06 +01:00
|
|
|
import os
|
|
|
|
|
2015-10-16 02:55:23 +02:00
|
|
|
from ansible.compat.six import iteritems
|
2016-01-20 07:32:45 +01:00
|
|
|
from ansible.errors import AnsibleParserError, AnsibleError
|
2015-01-26 18:29:59 +01:00
|
|
|
from ansible.parsing.splitter import split_args, parse_kv
|
|
|
|
from ansible.parsing.yaml.objects import AnsibleBaseYAMLObject, AnsibleMapping
|
|
|
|
from ansible.playbook.attribute import FieldAttribute
|
2015-01-23 04:45:25 +01:00
|
|
|
from ansible.playbook.base import Base
|
2015-08-04 21:24:28 +02:00
|
|
|
from ansible.playbook.conditional import Conditional
|
2015-01-26 18:29:59 +01:00
|
|
|
from ansible.playbook.taggable import Taggable
|
2015-08-04 21:24:28 +02:00
|
|
|
from ansible.template import Templar
|
2015-01-23 04:45:25 +01:00
|
|
|
|
2015-08-04 21:24:28 +02:00
|
|
|
class PlaybookInclude(Base, Conditional, Taggable):
|
2015-01-26 18:29:59 +01:00
|
|
|
|
|
|
|
_name = FieldAttribute(isa='string')
|
|
|
|
_include = FieldAttribute(isa='string')
|
2015-12-09 17:22:58 +01:00
|
|
|
_vars = FieldAttribute(isa='dict', default=dict())
|
2015-01-26 18:29:59 +01:00
|
|
|
|
|
|
|
@staticmethod
|
2015-01-28 22:03:06 +01:00
|
|
|
def load(data, basedir, variable_manager=None, loader=None):
|
|
|
|
return PlaybookInclude().load_data(ds=data, basedir=basedir, variable_manager=variable_manager, loader=loader)
|
2015-01-26 18:29:59 +01:00
|
|
|
|
2015-01-28 22:03:06 +01:00
|
|
|
def load_data(self, ds, basedir, variable_manager=None, loader=None):
|
2015-01-26 18:29:59 +01:00
|
|
|
'''
|
|
|
|
Overrides the base load_data(), as we're actually going to return a new
|
|
|
|
Playbook() object rather than a PlaybookInclude object
|
|
|
|
'''
|
|
|
|
|
|
|
|
# import here to avoid a dependency loop
|
|
|
|
from ansible.playbook import Playbook
|
|
|
|
|
|
|
|
# first, we use the original parent method to correctly load the object
|
2015-03-24 02:42:28 +01:00
|
|
|
# via the load_data/preprocess_data system we normally use for other
|
|
|
|
# playbook objects
|
2015-01-26 18:29:59 +01:00
|
|
|
new_obj = super(PlaybookInclude, self).load_data(ds, variable_manager, loader)
|
|
|
|
|
2015-11-23 17:54:06 +01:00
|
|
|
all_vars = self.vars.copy()
|
2015-08-04 21:24:28 +02:00
|
|
|
if variable_manager:
|
2015-11-23 17:54:06 +01:00
|
|
|
all_vars.update(variable_manager.get_vars(loader=loader))
|
2015-08-04 21:24:28 +02:00
|
|
|
|
|
|
|
templar = Templar(loader=loader, variables=all_vars)
|
2016-01-20 07:32:45 +01:00
|
|
|
|
|
|
|
try:
|
|
|
|
forward_conditional = False
|
|
|
|
if not new_obj.evaluate_conditional(templar=templar, all_vars=all_vars):
|
|
|
|
return None
|
|
|
|
except AnsibleError:
|
|
|
|
# conditional evaluation raised an error, so we set a flag to indicate
|
|
|
|
# we need to forward the conditionals on to the included play(s)
|
|
|
|
forward_conditional = True
|
2015-08-04 21:24:28 +02:00
|
|
|
|
2015-01-26 18:29:59 +01:00
|
|
|
# then we use the object to load a Playbook
|
|
|
|
pb = Playbook(loader=loader)
|
2015-01-28 22:03:06 +01:00
|
|
|
|
2015-11-23 17:54:06 +01:00
|
|
|
file_name = templar.template(new_obj.include)
|
2015-01-28 22:03:06 +01:00
|
|
|
if not os.path.isabs(file_name):
|
|
|
|
file_name = os.path.join(basedir, file_name)
|
|
|
|
|
|
|
|
pb._load_playbook_data(file_name=file_name, variable_manager=variable_manager)
|
2015-01-26 18:29:59 +01:00
|
|
|
|
2015-05-11 19:48:03 +02:00
|
|
|
# finally, update each loaded playbook entry with any variables specified
|
|
|
|
# on the included playbook and/or any tags which may have been set
|
2015-01-26 18:29:59 +01:00
|
|
|
for entry in pb._entries:
|
2015-08-13 05:38:20 +02:00
|
|
|
temp_vars = entry.vars.copy()
|
|
|
|
temp_vars.update(new_obj.vars)
|
2015-10-05 14:40:57 +02:00
|
|
|
param_tags = temp_vars.pop('tags', None)
|
|
|
|
if param_tags is not None:
|
|
|
|
entry.tags.extend(param_tags.split(','))
|
2015-08-13 05:38:20 +02:00
|
|
|
entry.vars = temp_vars
|
2015-05-11 19:48:03 +02:00
|
|
|
entry.tags = list(set(entry.tags).union(new_obj.tags))
|
2015-09-29 06:25:59 +02:00
|
|
|
if entry._included_path is None:
|
|
|
|
entry._included_path = os.path.dirname(file_name)
|
2015-01-26 18:29:59 +01:00
|
|
|
|
2016-01-20 07:32:45 +01:00
|
|
|
# Check to see if we need to forward the conditionals on to the included
|
|
|
|
# plays. If so, we can take a shortcut here and simply prepend them to
|
|
|
|
# those attached to each block (if any)
|
|
|
|
if forward_conditional:
|
|
|
|
for task_block in entry.tasks:
|
|
|
|
task_block.when = self.when[:] + task_block.when
|
|
|
|
|
2015-01-26 18:29:59 +01:00
|
|
|
return pb
|
|
|
|
|
2015-03-24 02:42:28 +01:00
|
|
|
def preprocess_data(self, ds):
|
2015-01-26 18:29:59 +01:00
|
|
|
'''
|
|
|
|
Regorganizes the data for a PlaybookInclude datastructure to line
|
|
|
|
up with what we expect the proper attributes to be
|
|
|
|
'''
|
|
|
|
|
|
|
|
assert isinstance(ds, dict)
|
|
|
|
|
|
|
|
# the new, cleaned datastructure, which will have legacy
|
|
|
|
# items reduced to a standard structure
|
|
|
|
new_ds = AnsibleMapping()
|
|
|
|
if isinstance(ds, AnsibleBaseYAMLObject):
|
2015-04-01 23:54:22 +02:00
|
|
|
new_ds.ansible_pos = ds.ansible_pos
|
2015-01-26 18:29:59 +01:00
|
|
|
|
2015-09-03 08:23:27 +02:00
|
|
|
for (k,v) in iteritems(ds):
|
2015-01-26 18:29:59 +01:00
|
|
|
if k == 'include':
|
2015-03-24 02:42:28 +01:00
|
|
|
self._preprocess_include(ds, new_ds, k, v)
|
2015-01-26 18:29:59 +01:00
|
|
|
else:
|
|
|
|
# some basic error checking, to make sure vars are properly
|
|
|
|
# formatted and do not conflict with k=v parameters
|
|
|
|
if k == 'vars':
|
|
|
|
if 'vars' in new_ds:
|
|
|
|
raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds)
|
|
|
|
elif not isinstance(v, dict):
|
|
|
|
raise AnsibleParserError("vars for include statements must be specified as a dictionary", obj=ds)
|
|
|
|
new_ds[k] = v
|
|
|
|
|
2015-03-24 02:42:28 +01:00
|
|
|
return super(PlaybookInclude, self).preprocess_data(new_ds)
|
2015-01-26 18:29:59 +01:00
|
|
|
|
2015-03-24 02:42:28 +01:00
|
|
|
def _preprocess_include(self, ds, new_ds, k, v):
|
2015-01-26 18:29:59 +01:00
|
|
|
'''
|
|
|
|
Splits the include line up into filename and parameters
|
|
|
|
'''
|
|
|
|
|
|
|
|
# The include line must include at least one item, which is the filename
|
|
|
|
# to include. Anything after that should be regarded as a parameter to the include
|
|
|
|
items = split_args(v)
|
|
|
|
if len(items) == 0:
|
|
|
|
raise AnsibleParserError("include statements must specify the file name to include", obj=ds)
|
|
|
|
else:
|
|
|
|
new_ds['include'] = items[0]
|
|
|
|
if len(items) > 1:
|
|
|
|
# rejoin the parameter portion of the arguments and
|
|
|
|
# then use parse_kv() to get a dict of params back
|
|
|
|
params = parse_kv(" ".join(items[1:]))
|
2015-06-30 22:08:46 +02:00
|
|
|
if 'tags' in params:
|
|
|
|
new_ds['tags'] = params.pop('tags')
|
2015-01-26 18:29:59 +01:00
|
|
|
if 'vars' in new_ds:
|
|
|
|
raise AnsibleParserError("include parameters cannot be mixed with 'vars' entries for include statements", obj=ds)
|
|
|
|
new_ds['vars'] = params
|
|
|
|
|