2016-06-01 04:35:59 +02:00
|
|
|
'''
|
|
|
|
(Epdb) pprint(DeepDiff(self.final_task_vars, out_task_vars), indent=2)
|
|
|
|
{ 'dic_item_added': set([u"root['ansible_python_interpreter']"]),
|
|
|
|
'dic_item_removed': set([ u"root['hostvars']['127.0.0.1']",
|
|
|
|
u"root['hostvars']['::1']",
|
|
|
|
u"root['hostvars']['localhost']"]),
|
|
|
|
'iterable_item_added': { u"root['hostvars']['el6host']['groups']['all'][1]": u'::1',
|
|
|
|
u"root['hostvars']['el6host']['groups']['ungrouped'][1]": u'::1',
|
|
|
|
u"root['vars']['hostvars']['el6host']['groups']['all'][1]": u'::1',
|
|
|
|
u"root['vars']['hostvars']['el6host']['groups']['ungrouped'][1]": u'::1'}}
|
|
|
|
'''
|
|
|
|
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import unittest
|
|
|
|
import yaml
|
|
|
|
|
|
|
|
from pprint import pprint
|
2017-05-30 19:05:19 +02:00
|
|
|
|
2016-06-01 04:35:59 +02:00
|
|
|
import ansible.plugins
|
2018-10-13 05:01:14 +02:00
|
|
|
from units.compat.mock import patch, MagicMock
|
2016-06-01 04:35:59 +02:00
|
|
|
from ansible.plugins.action.synchronize import ActionModule
|
|
|
|
|
2017-05-30 19:05:19 +02:00
|
|
|
|
2016-06-01 04:35:59 +02:00
|
|
|
# Getting the incoming and outgoing task vars from the plugin's run method
|
|
|
|
|
|
|
|
'''
|
|
|
|
import copy
|
|
|
|
safe_vars = {}
|
2016-12-13 16:47:08 +01:00
|
|
|
for k,v in task_vars.items():
|
2016-06-01 04:35:59 +02:00
|
|
|
if k not in ['vars', 'hostvars']:
|
|
|
|
safe_vars[k] = copy.deepcopy(v)
|
2017-01-28 00:20:31 +01:00
|
|
|
else:
|
2016-06-01 04:35:59 +02:00
|
|
|
sdata = str(v)
|
|
|
|
newv = eval(sdata)
|
|
|
|
safe_vars[k] = newv
|
|
|
|
|
|
|
|
import json
|
|
|
|
with open('task_vars.json', 'wb') as f:
|
|
|
|
f.write(json.dumps(safe_vars, indent=2))
|
|
|
|
'''
|
|
|
|
|
|
|
|
|
2018-05-30 22:28:11 +02:00
|
|
|
class BreakPoint(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2016-06-01 04:35:59 +02:00
|
|
|
class TaskMock(object):
|
2017-01-28 00:20:31 +01:00
|
|
|
args = {'src': u'/tmp/deleteme',
|
2016-06-01 04:35:59 +02:00
|
|
|
'dest': '/tmp/deleteme',
|
|
|
|
'rsync_path': 'rsync'}
|
2017-11-22 21:35:58 +01:00
|
|
|
async_val = None
|
2016-06-01 04:35:59 +02:00
|
|
|
become = None
|
|
|
|
become_user = None
|
|
|
|
become_method = None
|
|
|
|
|
2017-05-30 19:05:19 +02:00
|
|
|
|
2016-06-01 04:35:59 +02:00
|
|
|
class StdinMock(object):
|
|
|
|
shell = None
|
|
|
|
|
2017-05-30 19:05:19 +02:00
|
|
|
|
2016-06-01 04:35:59 +02:00
|
|
|
class ConnectionMock(object):
|
|
|
|
ismock = True
|
|
|
|
_play_context = None
|
2017-05-30 19:05:19 +02:00
|
|
|
# transport = 'ssh'
|
2016-06-01 04:35:59 +02:00
|
|
|
transport = None
|
|
|
|
_new_stdin = StdinMock()
|
|
|
|
|
2018-07-06 19:49:19 +02:00
|
|
|
get_option = MagicMock(return_value='root')
|
|
|
|
|
2018-01-16 06:15:04 +01:00
|
|
|
# my shell
|
|
|
|
_shell = MagicMock()
|
|
|
|
_shell.mkdtemp.return_value = 'mkdir command'
|
|
|
|
_shell.join_path.side_effect = os.path.join
|
2018-07-06 19:49:19 +02:00
|
|
|
_shell.get_option = MagicMock(return_value=['root', 'toor'])
|
2018-01-16 06:15:04 +01:00
|
|
|
|
2017-05-30 19:05:19 +02:00
|
|
|
|
2016-06-01 04:35:59 +02:00
|
|
|
class PlayContextMock(object):
|
|
|
|
shell = None
|
|
|
|
private_key_file = None
|
|
|
|
become = False
|
|
|
|
become_user = 'root'
|
|
|
|
become_method = None
|
|
|
|
check_mode = False
|
|
|
|
no_log = None
|
|
|
|
diff = None
|
|
|
|
remote_addr = None
|
|
|
|
remote_user = None
|
|
|
|
password = None
|
|
|
|
|
2017-05-30 19:05:19 +02:00
|
|
|
|
2016-06-01 04:35:59 +02:00
|
|
|
class ModuleLoaderMock(object):
|
|
|
|
def find_plugin(self, module_name, mod_type):
|
|
|
|
pass
|
|
|
|
|
2017-05-30 19:05:19 +02:00
|
|
|
|
2016-06-01 04:35:59 +02:00
|
|
|
class SharedLoaderMock(object):
|
2017-01-28 00:20:31 +01:00
|
|
|
module_loader = ModuleLoaderMock()
|
2016-06-01 04:35:59 +02:00
|
|
|
|
2017-05-30 19:05:19 +02:00
|
|
|
|
2016-06-01 04:35:59 +02:00
|
|
|
class SynchronizeTester(object):
|
|
|
|
|
|
|
|
''' A wrapper for mocking out synchronize environments '''
|
|
|
|
|
|
|
|
task = TaskMock()
|
|
|
|
connection = ConnectionMock()
|
|
|
|
_play_context = PlayContextMock()
|
|
|
|
loader = None
|
|
|
|
templar = None
|
|
|
|
shared_loader_obj = SharedLoaderMock()
|
|
|
|
|
|
|
|
final_task_vars = None
|
|
|
|
execute_called = False
|
|
|
|
|
2016-12-12 22:33:30 +01:00
|
|
|
def _execute_module(self, module_name, module_args=None, task_vars=None):
|
2016-06-01 04:35:59 +02:00
|
|
|
self.execute_called = True
|
2016-12-12 22:33:30 +01:00
|
|
|
self.final_module_args = module_args
|
2016-06-01 04:35:59 +02:00
|
|
|
self.final_task_vars = task_vars
|
|
|
|
return {}
|
2016-12-12 22:33:30 +01:00
|
|
|
|
2016-06-01 04:35:59 +02:00
|
|
|
def runtest(self, fixturepath='fixtures/synchronize/basic'):
|
|
|
|
|
|
|
|
metapath = os.path.join(fixturepath, 'meta.yaml')
|
|
|
|
with open(metapath, 'rb') as f:
|
|
|
|
fdata = f.read()
|
|
|
|
test_meta = yaml.load(fdata)
|
|
|
|
|
2017-06-12 08:55:19 +02:00
|
|
|
# load initial play context vars
|
2016-06-01 04:35:59 +02:00
|
|
|
if '_play_context' in test_meta:
|
|
|
|
if test_meta['_play_context']:
|
|
|
|
self.task.args = {}
|
2017-05-30 19:05:19 +02:00
|
|
|
for (k, v) in test_meta['_play_context'].items():
|
2016-06-01 04:35:59 +02:00
|
|
|
if v == 'None':
|
|
|
|
v = None
|
|
|
|
setattr(self._play_context, k, v)
|
|
|
|
|
2017-06-12 08:55:19 +02:00
|
|
|
# load initial task context vars
|
2016-06-01 04:35:59 +02:00
|
|
|
if '_task' in test_meta:
|
|
|
|
if test_meta['_task']:
|
|
|
|
self.task.args = {}
|
2017-05-30 19:05:19 +02:00
|
|
|
for (k, v) in test_meta['_task'].items():
|
|
|
|
# import epdb; epdb.st()
|
2016-06-01 04:35:59 +02:00
|
|
|
if v == 'None':
|
|
|
|
v = None
|
|
|
|
setattr(self.task, k, v)
|
|
|
|
|
2017-06-12 08:55:19 +02:00
|
|
|
# load initial task vars
|
2016-06-01 04:35:59 +02:00
|
|
|
if 'task_args' in test_meta:
|
|
|
|
if test_meta['task_args']:
|
|
|
|
self.task.args = {}
|
2017-05-30 19:05:19 +02:00
|
|
|
for (k, v) in test_meta['task_args'].items():
|
2016-06-01 04:35:59 +02:00
|
|
|
self.task.args[k] = v
|
|
|
|
|
2017-06-12 08:55:19 +02:00
|
|
|
# load initial task vars
|
2017-05-30 19:05:19 +02:00
|
|
|
invarspath = os.path.join(fixturepath, test_meta.get('fixtures', {}).get('taskvars_in', 'taskvars_in.json'))
|
2016-06-01 04:35:59 +02:00
|
|
|
with open(invarspath, 'rb') as f:
|
|
|
|
fdata = f.read()
|
2017-01-28 00:20:31 +01:00
|
|
|
fdata = fdata.decode("utf-8")
|
2016-06-01 04:35:59 +02:00
|
|
|
in_task_vars = json.loads(fdata)
|
|
|
|
|
|
|
|
# load expected final task vars
|
2017-05-30 19:05:19 +02:00
|
|
|
outvarspath = os.path.join(fixturepath, test_meta.get('fixtures', {}).get('taskvars_out', 'taskvars_out.json'))
|
2016-06-01 04:35:59 +02:00
|
|
|
with open(outvarspath, 'rb') as f:
|
|
|
|
fdata = f.read()
|
2017-01-28 00:20:31 +01:00
|
|
|
fdata = fdata.decode("utf-8")
|
2016-06-01 04:35:59 +02:00
|
|
|
out_task_vars = json.loads(fdata)
|
|
|
|
|
|
|
|
# fixup the connection
|
2017-05-30 19:05:19 +02:00
|
|
|
for (k, v) in test_meta['connection'].items():
|
2016-06-01 04:35:59 +02:00
|
|
|
setattr(self.connection, k, v)
|
|
|
|
|
|
|
|
# fixup the hostvars
|
|
|
|
if test_meta['hostvars']:
|
2017-05-30 19:05:19 +02:00
|
|
|
for (k, v) in test_meta['hostvars'].items():
|
2016-06-01 04:35:59 +02:00
|
|
|
in_task_vars['hostvars'][k] = v
|
|
|
|
|
2017-06-12 08:55:19 +02:00
|
|
|
# initialize and run the module
|
2017-01-28 00:20:31 +01:00
|
|
|
SAM = ActionModule(self.task, self.connection, self._play_context,
|
2016-06-01 04:35:59 +02:00
|
|
|
self.loader, self.templar, self.shared_loader_obj)
|
|
|
|
SAM._execute_module = self._execute_module
|
|
|
|
result = SAM.run(task_vars=in_task_vars)
|
|
|
|
|
|
|
|
# run assertions
|
|
|
|
for check in test_meta['asserts']:
|
|
|
|
value = eval(check)
|
2017-05-30 19:05:19 +02:00
|
|
|
# if not value:
|
|
|
|
# print(check, value)
|
|
|
|
# import epdb; epdb.st()
|
2016-06-01 04:35:59 +02:00
|
|
|
assert value, check
|
|
|
|
|
|
|
|
|
|
|
|
class FakePluginLoader(object):
|
|
|
|
mocked = True
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def get(transport, play_context, new_stdin):
|
|
|
|
conn = ConnectionMock()
|
|
|
|
conn.transport = transport
|
|
|
|
conn._play_context = play_context
|
|
|
|
conn._new_stdin = new_stdin
|
|
|
|
return conn
|
|
|
|
|
|
|
|
|
|
|
|
class TestSynchronizeAction(unittest.TestCase):
|
|
|
|
|
|
|
|
fixturedir = os.path.dirname(__file__)
|
|
|
|
fixturedir = os.path.join(fixturedir, 'fixtures', 'synchronize')
|
2017-05-30 19:05:19 +02:00
|
|
|
# print(basedir)
|
2016-06-01 04:35:59 +02:00
|
|
|
|
|
|
|
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
|
|
|
def test_basic(self):
|
|
|
|
x = SynchronizeTester()
|
2017-05-30 19:05:19 +02:00
|
|
|
x.runtest(fixturepath=os.path.join(self.fixturedir, 'basic'))
|
2016-06-01 04:35:59 +02:00
|
|
|
|
|
|
|
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
|
|
|
def test_basic_become(self):
|
|
|
|
x = SynchronizeTester()
|
2017-05-30 19:05:19 +02:00
|
|
|
x.runtest(fixturepath=os.path.join(self.fixturedir, 'basic_become'))
|
2016-06-01 04:35:59 +02:00
|
|
|
|
|
|
|
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
|
|
|
def test_basic_become_cli(self):
|
|
|
|
# --become on the cli sets _play_context.become
|
|
|
|
x = SynchronizeTester()
|
2017-05-30 19:05:19 +02:00
|
|
|
x.runtest(fixturepath=os.path.join(self.fixturedir, 'basic_become_cli'))
|
2016-06-01 04:35:59 +02:00
|
|
|
|
|
|
|
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
|
|
|
def test_basic_vagrant(self):
|
|
|
|
# simple vagrant example
|
|
|
|
x = SynchronizeTester()
|
2017-05-30 19:05:19 +02:00
|
|
|
x.runtest(fixturepath=os.path.join(self.fixturedir, 'basic_vagrant'))
|
2016-06-01 04:35:59 +02:00
|
|
|
|
|
|
|
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
|
|
|
def test_basic_vagrant_sudo(self):
|
|
|
|
# vagrant plus sudo
|
|
|
|
x = SynchronizeTester()
|
2017-05-30 19:05:19 +02:00
|
|
|
x.runtest(fixturepath=os.path.join(self.fixturedir, 'basic_vagrant_sudo'))
|
2016-06-01 04:35:59 +02:00
|
|
|
|
|
|
|
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
|
|
|
def test_basic_vagrant_become_cli(self):
|
|
|
|
# vagrant plus sudo
|
|
|
|
x = SynchronizeTester()
|
2017-05-30 19:05:19 +02:00
|
|
|
x.runtest(fixturepath=os.path.join(self.fixturedir, 'basic_vagrant_become_cli'))
|
2016-06-01 04:35:59 +02:00
|
|
|
|
|
|
|
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
|
|
|
def test_delegate_remote(self):
|
|
|
|
# delegate to other remote host
|
|
|
|
x = SynchronizeTester()
|
2017-05-30 19:05:19 +02:00
|
|
|
x.runtest(fixturepath=os.path.join(self.fixturedir, 'delegate_remote'))
|
2016-06-01 04:35:59 +02:00
|
|
|
|
|
|
|
@patch('ansible.plugins.action.synchronize.connection_loader', FakePluginLoader)
|
|
|
|
def test_delegate_remote_su(self):
|
|
|
|
# delegate to other remote host with su enabled
|
|
|
|
x = SynchronizeTester()
|
2017-05-30 19:05:19 +02:00
|
|
|
x.runtest(fixturepath=os.path.join(self.fixturedir, 'delegate_remote_su'))
|
2018-05-30 22:28:11 +02:00
|
|
|
|
|
|
|
@patch.object(ActionModule, '_low_level_execute_command', side_effect=BreakPoint)
|
|
|
|
@patch.object(ActionModule, '_remote_expand_user', side_effect=ActionModule._remote_expand_user, autospec=True)
|
|
|
|
def test_remote_user_not_in_local_tmpdir(self, spy_remote_expand_user, ll_ec):
|
|
|
|
x = SynchronizeTester()
|
|
|
|
SAM = ActionModule(x.task, x.connection, x._play_context,
|
|
|
|
x.loader, x.templar, x.shared_loader_obj)
|
|
|
|
try:
|
|
|
|
SAM.run(task_vars={'hostvars': {'foo': {}, 'localhost': {}}, 'inventory_hostname': 'foo'})
|
|
|
|
except BreakPoint:
|
|
|
|
pass
|
|
|
|
self.assertEqual(spy_remote_expand_user.call_count, 0)
|