2016-11-02 22:47:42 +01:00
|
|
|
#!/usr/bin/env python
|
2017-05-18 09:38:40 +02:00
|
|
|
|
2020-06-23 04:05:30 +02:00
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
|
|
|
|
2020-07-23 18:24:02 +02:00
|
|
|
import json
|
|
|
|
import shutil
|
|
|
|
|
|
|
|
import ansible.constants as C
|
2017-05-18 09:38:40 +02:00
|
|
|
from ansible.executor.task_queue_manager import TaskQueueManager
|
2020-07-23 18:24:02 +02:00
|
|
|
from ansible.module_utils.common.collections import ImmutableDict
|
2017-10-03 16:59:37 +02:00
|
|
|
from ansible.inventory.manager import InventoryManager
|
2017-05-18 09:38:40 +02:00
|
|
|
from ansible.parsing.dataloader import DataLoader
|
2016-02-17 19:21:12 +01:00
|
|
|
from ansible.playbook.play import Play
|
|
|
|
from ansible.plugins.callback import CallbackBase
|
2017-07-27 10:54:26 +02:00
|
|
|
from ansible.vars.manager import VariableManager
|
2019-02-12 22:35:49 +01:00
|
|
|
from ansible import context
|
2017-05-18 09:38:40 +02:00
|
|
|
|
2012-04-04 16:27:24 +02:00
|
|
|
|
2020-07-23 18:24:02 +02:00
|
|
|
# Create a callback plugin so we can capture the output
|
|
|
|
class ResultsCollectorJSONCallback(CallbackBase):
|
|
|
|
"""A sample callback plugin used for performing an action as results come in.
|
|
|
|
|
|
|
|
If you want to collect all results into a single object for processing at
|
|
|
|
the end of the execution, look into utilizing the ``json`` callback plugin
|
|
|
|
or writing your own custom callback plugin.
|
|
|
|
"""
|
2012-04-04 16:27:24 +02:00
|
|
|
|
2016-02-17 19:21:12 +01:00
|
|
|
def __init__(self, *args, **kwargs):
|
2020-07-23 18:24:02 +02:00
|
|
|
super(ResultsCollectorJSONCallback, self).__init__(*args, **kwargs)
|
2017-05-18 09:38:40 +02:00
|
|
|
self.host_ok = {}
|
2016-02-17 19:21:12 +01:00
|
|
|
self.host_unreachable = {}
|
|
|
|
self.host_failed = {}
|
2012-04-04 16:27:24 +02:00
|
|
|
|
2016-02-17 19:21:12 +01:00
|
|
|
def v2_runner_on_unreachable(self, result):
|
2020-07-23 18:24:02 +02:00
|
|
|
host = result._host
|
|
|
|
self.host_unreachable[host.get_name()] = result
|
2012-04-04 16:27:24 +02:00
|
|
|
|
2017-05-18 09:38:40 +02:00
|
|
|
def v2_runner_on_ok(self, result, *args, **kwargs):
|
2020-07-23 18:24:02 +02:00
|
|
|
"""Print a json representation of the result.
|
|
|
|
|
|
|
|
Also, store the result in an instance attribute for retrieval later
|
|
|
|
"""
|
|
|
|
host = result._host
|
|
|
|
self.host_ok[host.get_name()] = result
|
|
|
|
print(json.dumps({host.name: result._result}, indent=4))
|
2012-04-04 16:27:24 +02:00
|
|
|
|
2017-05-18 09:38:40 +02:00
|
|
|
def v2_runner_on_failed(self, result, *args, **kwargs):
|
2020-07-23 18:24:02 +02:00
|
|
|
host = result._host
|
|
|
|
self.host_failed[host.get_name()] = result
|
2012-04-04 16:27:24 +02:00
|
|
|
|
|
|
|
|
2016-02-17 19:21:12 +01:00
|
|
|
def main():
|
2017-05-18 09:38:40 +02:00
|
|
|
host_list = ['localhost', 'www.example.com', 'www.google.com']
|
2019-02-12 22:35:49 +01:00
|
|
|
# since the API is constructed for CLI it expects certain options to always be set in the context object
|
2020-07-23 18:24:02 +02:00
|
|
|
context.CLIARGS = ImmutableDict(connection='smart', module_path=['/to/mymodules', '/usr/share/ansible'], forks=10, become=None,
|
2019-02-12 22:35:49 +01:00
|
|
|
become_method=None, become_user=None, check=False, diff=False)
|
2018-01-16 18:39:15 +01:00
|
|
|
# required for
|
|
|
|
# https://github.com/ansible/ansible/blob/devel/lib/ansible/inventory/manager.py#L204
|
|
|
|
sources = ','.join(host_list)
|
|
|
|
if len(host_list) == 1:
|
|
|
|
sources += ','
|
2016-02-17 19:21:12 +01:00
|
|
|
|
|
|
|
# initialize needed objects
|
2020-07-23 18:24:02 +02:00
|
|
|
loader = DataLoader() # Takes care of finding and reading yaml, json and ini files
|
|
|
|
passwords = dict(vault_pass='secret')
|
2016-02-17 19:21:12 +01:00
|
|
|
|
2020-07-23 18:24:02 +02:00
|
|
|
# Instantiate our ResultsCollectorJSONCallback for handling results as they come in. Ansible expects this to be one of its main display outlets
|
|
|
|
results_callback = ResultsCollectorJSONCallback()
|
2020-07-02 22:13:33 +02:00
|
|
|
|
2020-07-23 18:24:02 +02:00
|
|
|
# create inventory, use path to host config file as source or hosts in a comma separated string
|
2018-01-16 18:39:15 +01:00
|
|
|
inventory = InventoryManager(loader=loader, sources=sources)
|
2020-07-23 18:24:02 +02:00
|
|
|
|
|
|
|
# variable manager takes care of merging all the different sources to give you a unified view of variables available in each context
|
2017-10-03 16:59:37 +02:00
|
|
|
variable_manager = VariableManager(loader=loader, inventory=inventory)
|
2016-02-17 19:21:12 +01:00
|
|
|
|
2020-07-23 18:24:02 +02:00
|
|
|
# instantiate task queue manager, which takes care of forking and setting up all objects to iterate over host list and tasks
|
2020-07-02 22:13:33 +02:00
|
|
|
# IMPORTANT: This also adds library dirs paths to the module loader
|
|
|
|
# IMPORTANT: and so it must be initialized before calling `Play.load()`.
|
|
|
|
tqm = TaskQueueManager(
|
|
|
|
inventory=inventory,
|
|
|
|
variable_manager=variable_manager,
|
|
|
|
loader=loader,
|
|
|
|
passwords=passwords,
|
2020-07-23 18:24:02 +02:00
|
|
|
stdout_callback=results_callback, # Use our custom callback instead of the ``default`` callback plugin, which prints to stdout
|
2020-07-02 22:13:33 +02:00
|
|
|
)
|
|
|
|
|
2020-07-23 18:24:02 +02:00
|
|
|
# create data structure that represents our play, including tasks, this is basically what our YAML loader does internally.
|
2017-01-11 03:47:03 +01:00
|
|
|
play_source = dict(
|
2017-05-18 09:38:40 +02:00
|
|
|
name="Ansible Play",
|
|
|
|
hosts=host_list,
|
|
|
|
gather_facts='no',
|
2020-07-23 18:24:02 +02:00
|
|
|
tasks=[
|
|
|
|
dict(action=dict(module='shell', args='ls'), register='shell_out'),
|
|
|
|
dict(action=dict(module='debug', args=dict(msg='{{shell_out.stdout}}'))),
|
|
|
|
dict(action=dict(module='command', args=dict(cmd='/usr/bin/uptime'))),
|
|
|
|
]
|
2017-01-29 08:28:53 +01:00
|
|
|
)
|
2020-07-23 18:24:02 +02:00
|
|
|
|
|
|
|
# Create play object, playbook objects use .load instead of init or new methods,
|
|
|
|
# this will also automatically create the task objects from the info provided in play_source
|
2016-02-17 19:21:12 +01:00
|
|
|
play = Play().load(play_source, variable_manager=variable_manager, loader=loader)
|
|
|
|
|
2020-07-23 18:24:02 +02:00
|
|
|
# Actually run it
|
2016-02-17 19:21:12 +01:00
|
|
|
try:
|
2020-07-23 18:24:02 +02:00
|
|
|
result = tqm.run(play) # most interesting data for a play is actually sent to the callback's methods
|
2016-02-17 19:21:12 +01:00
|
|
|
finally:
|
2020-07-23 18:24:02 +02:00
|
|
|
# we always need to cleanup child procs and the structures we use to communicate with them
|
|
|
|
tqm.cleanup()
|
2019-02-12 22:35:49 +01:00
|
|
|
if loader:
|
|
|
|
loader.cleanup_all_tmp_files()
|
2016-02-17 19:21:12 +01:00
|
|
|
|
2020-07-23 18:24:02 +02:00
|
|
|
# Remove ansible tmpdir
|
|
|
|
shutil.rmtree(C.DEFAULT_LOCAL_TMP, True)
|
|
|
|
|
2016-12-07 00:00:35 +01:00
|
|
|
print("UP ***********")
|
2020-07-23 18:24:02 +02:00
|
|
|
for host, result in results_callback.host_ok.items():
|
2018-01-10 21:03:25 +01:00
|
|
|
print('{0} >>> {1}'.format(host, result._result['stdout']))
|
2016-02-17 19:21:12 +01:00
|
|
|
|
2016-12-07 00:00:35 +01:00
|
|
|
print("FAILED *******")
|
2020-07-23 18:24:02 +02:00
|
|
|
for host, result in results_callback.host_failed.items():
|
2018-01-10 21:03:25 +01:00
|
|
|
print('{0} >>> {1}'.format(host, result._result['msg']))
|
2016-02-17 19:21:12 +01:00
|
|
|
|
2016-12-07 00:00:35 +01:00
|
|
|
print("DOWN *********")
|
2020-07-23 18:24:02 +02:00
|
|
|
for host, result in results_callback.host_unreachable.items():
|
2018-01-10 21:03:25 +01:00
|
|
|
print('{0} >>> {1}'.format(host, result._result['msg']))
|
2016-02-17 19:21:12 +01:00
|
|
|
|
2018-07-29 13:46:06 +02:00
|
|
|
|
2016-02-17 19:21:12 +01:00
|
|
|
if __name__ == '__main__':
|
|
|
|
main()
|