2019-07-09 21:47:25 +02:00
|
|
|
# -*- coding: utf-8 -*-
|
2016-03-26 08:22:37 +01:00
|
|
|
# (c) 2016, Adrian Likins <alikins@redhat.com>
|
|
|
|
#
|
|
|
|
# This file is part of Ansible
|
|
|
|
#
|
|
|
|
# Ansible is free software: you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation, either version 3 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# Ansible is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
|
|
|
|
|
|
|
# Make coding more python3-ish
|
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
|
|
|
|
2017-05-30 19:05:19 +02:00
|
|
|
import ansible
|
2019-07-09 21:47:25 +02:00
|
|
|
import json
|
2016-07-11 19:54:29 +02:00
|
|
|
import os
|
2019-07-09 21:47:25 +02:00
|
|
|
import pytest
|
2016-07-11 19:54:29 +02:00
|
|
|
import shutil
|
|
|
|
import tarfile
|
2016-07-12 22:01:47 +02:00
|
|
|
import tempfile
|
2017-03-02 02:23:18 +01:00
|
|
|
import yaml
|
2016-07-11 19:54:29 +02:00
|
|
|
|
2019-07-09 21:47:25 +02:00
|
|
|
import ansible.constants as C
|
2018-12-18 03:10:59 +01:00
|
|
|
from ansible import context
|
2017-05-30 19:05:19 +02:00
|
|
|
from ansible.cli.galaxy import GalaxyCLI
|
2019-07-09 21:47:25 +02:00
|
|
|
from ansible.errors import AnsibleError
|
|
|
|
from ansible.module_utils._text import to_text
|
2018-12-20 19:07:49 +01:00
|
|
|
from ansible.utils import context_objects as co
|
|
|
|
from units.compat import unittest
|
2019-07-26 16:53:31 +02:00
|
|
|
from units.compat.mock import patch, MagicMock
|
2019-07-09 21:47:25 +02:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(autouse='function')
|
|
|
|
def reset_cli_args():
|
|
|
|
co.GlobalCLIArgs._Singleton__instance = None
|
|
|
|
yield
|
|
|
|
co.GlobalCLIArgs._Singleton__instance = None
|
2016-07-22 17:45:56 +02:00
|
|
|
|
2016-03-26 08:22:37 +01:00
|
|
|
|
|
|
|
class TestGalaxy(unittest.TestCase):
|
2016-07-11 19:54:29 +02:00
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
'''creating prerequisites for installing a role; setUpClass occurs ONCE whereas setUp occurs with every method tested.'''
|
|
|
|
# class data for easy viewing: role_dir, role_tar, role_name, role_req, role_path
|
2016-07-07 20:25:08 +02:00
|
|
|
|
2018-09-18 05:27:14 +02:00
|
|
|
cls.temp_dir = tempfile.mkdtemp(prefix='ansible-test_galaxy-')
|
|
|
|
os.chdir(cls.temp_dir)
|
|
|
|
|
2016-07-11 19:54:29 +02:00
|
|
|
if os.path.exists("./delete_me"):
|
|
|
|
shutil.rmtree("./delete_me")
|
2016-07-07 20:25:08 +02:00
|
|
|
|
2016-07-11 19:54:29 +02:00
|
|
|
# creating framework for a role
|
2018-01-04 18:16:56 +01:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "init", "--offline", "delete_me"])
|
2016-07-11 19:54:29 +02:00
|
|
|
gc.run()
|
|
|
|
cls.role_dir = "./delete_me"
|
|
|
|
cls.role_name = "delete_me"
|
2016-07-12 22:01:47 +02:00
|
|
|
|
|
|
|
# making a temp dir for role installation
|
|
|
|
cls.role_path = os.path.join(tempfile.mkdtemp(), "roles")
|
|
|
|
if not os.path.isdir(cls.role_path):
|
|
|
|
os.makedirs(cls.role_path)
|
2016-07-11 19:54:29 +02:00
|
|
|
|
|
|
|
# creating a tar file name for class data
|
|
|
|
cls.role_tar = './delete_me.tar.gz'
|
|
|
|
cls.makeTar(cls.role_tar, cls.role_dir)
|
|
|
|
|
|
|
|
# creating a temp file with installation requirements
|
|
|
|
cls.role_req = './delete_me_requirements.yml'
|
|
|
|
fd = open(cls.role_req, "w")
|
|
|
|
fd.write("- 'src': '%s'\n 'name': '%s'\n 'path': '%s'" % (cls.role_tar, cls.role_name, cls.role_path))
|
|
|
|
fd.close()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def makeTar(cls, output_file, source_dir):
|
|
|
|
''' used for making a tarfile from a role directory '''
|
|
|
|
# adding directory into a tar file
|
|
|
|
try:
|
|
|
|
tar = tarfile.open(output_file, "w:gz")
|
|
|
|
tar.add(source_dir, arcname=os.path.basename(source_dir))
|
2017-05-30 19:05:19 +02:00
|
|
|
except AttributeError: # tarfile obj. has no attribute __exit__ prior to python 2. 7
|
2017-01-31 00:01:47 +01:00
|
|
|
pass
|
2016-07-11 19:54:29 +02:00
|
|
|
finally: # ensuring closure of tarfile obj
|
|
|
|
tar.close()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
|
|
|
'''After tests are finished removes things created in setUpClass'''
|
|
|
|
# deleting the temp role directory
|
|
|
|
if os.path.exists(cls.role_dir):
|
|
|
|
shutil.rmtree(cls.role_dir)
|
|
|
|
if os.path.exists(cls.role_req):
|
|
|
|
os.remove(cls.role_req)
|
|
|
|
if os.path.exists(cls.role_tar):
|
|
|
|
os.remove(cls.role_tar)
|
2016-07-12 22:01:47 +02:00
|
|
|
if os.path.isdir(cls.role_path):
|
|
|
|
shutil.rmtree(cls.role_path)
|
2016-07-11 19:54:29 +02:00
|
|
|
|
2018-09-18 05:27:14 +02:00
|
|
|
os.chdir('/')
|
|
|
|
shutil.rmtree(cls.temp_dir)
|
|
|
|
|
2016-03-26 08:22:37 +01:00
|
|
|
def setUp(self):
|
2018-12-18 03:10:59 +01:00
|
|
|
# Reset the stored command line args
|
2018-12-19 09:28:33 +01:00
|
|
|
co.GlobalCLIArgs._Singleton__instance = None
|
2018-01-04 18:16:56 +01:00
|
|
|
self.default_args = ['ansible-galaxy']
|
2016-03-26 08:22:37 +01:00
|
|
|
|
2018-12-18 03:10:59 +01:00
|
|
|
def tearDown(self):
|
|
|
|
# Reset the stored command line args
|
2018-12-19 09:28:33 +01:00
|
|
|
co.GlobalCLIArgs._Singleton__instance = None
|
2018-12-18 03:10:59 +01:00
|
|
|
|
2016-03-26 08:22:37 +01:00
|
|
|
def test_init(self):
|
|
|
|
galaxy_cli = GalaxyCLI(args=self.default_args)
|
|
|
|
self.assertTrue(isinstance(galaxy_cli, GalaxyCLI))
|
|
|
|
|
|
|
|
def test_display_min(self):
|
|
|
|
gc = GalaxyCLI(args=self.default_args)
|
|
|
|
role_info = {'name': 'some_role_name'}
|
|
|
|
display_result = gc._display_role_info(role_info)
|
2017-05-30 19:05:19 +02:00
|
|
|
self.assertTrue(display_result.find('some_role_name') > -1)
|
2016-03-26 08:22:37 +01:00
|
|
|
|
|
|
|
def test_display_galaxy_info(self):
|
|
|
|
gc = GalaxyCLI(args=self.default_args)
|
|
|
|
galaxy_info = {}
|
|
|
|
role_info = {'name': 'some_role_name',
|
|
|
|
'galaxy_info': galaxy_info}
|
|
|
|
display_result = gc._display_role_info(role_info)
|
2016-07-21 21:02:19 +02:00
|
|
|
if display_result.find('\n\tgalaxy_info:') == -1:
|
|
|
|
self.fail('Expected galaxy_info to be indented once')
|
2016-07-11 19:54:29 +02:00
|
|
|
|
2016-07-13 20:58:39 +02:00
|
|
|
def test_run(self):
|
|
|
|
''' verifies that the GalaxyCLI object's api is created and that execute() is called. '''
|
2018-01-04 18:16:56 +01:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "install", "--ignore-errors", "imaginary_role"])
|
2019-04-23 20:54:39 +02:00
|
|
|
gc.parse()
|
|
|
|
with patch.object(ansible.cli.CLI, "run", return_value=None) as mock_run:
|
|
|
|
gc.run()
|
|
|
|
# testing
|
|
|
|
self.assertIsInstance(gc.galaxy, ansible.galaxy.Galaxy)
|
|
|
|
self.assertEqual(mock_run.call_count, 1)
|
|
|
|
self.assertTrue(isinstance(gc.api, ansible.galaxy.api.GalaxyAPI))
|
2016-07-13 20:58:39 +02:00
|
|
|
|
2016-07-11 19:54:29 +02:00
|
|
|
def test_execute_remove(self):
|
|
|
|
# installing role
|
2018-01-04 18:16:56 +01:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "install", "-p", self.role_path, "-r", self.role_req, '--force'])
|
2016-07-11 19:54:29 +02:00
|
|
|
gc.run()
|
2016-07-07 20:25:08 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
# location where the role was installed
|
2016-07-11 19:54:29 +02:00
|
|
|
role_file = os.path.join(self.role_path, self.role_name)
|
|
|
|
|
|
|
|
# removing role
|
2018-12-18 03:10:59 +01:00
|
|
|
# Have to reset the arguments in the context object manually since we're doing the
|
|
|
|
# equivalent of running the command line program twice
|
2018-12-19 09:28:33 +01:00
|
|
|
co.GlobalCLIArgs._Singleton__instance = None
|
2018-01-04 18:16:56 +01:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "remove", role_file, self.role_name])
|
2017-02-08 22:35:06 +01:00
|
|
|
gc.run()
|
2016-07-11 19:54:29 +02:00
|
|
|
|
|
|
|
# testing role was removed
|
2017-02-08 22:35:06 +01:00
|
|
|
removed_role = not os.path.exists(role_file)
|
|
|
|
self.assertTrue(removed_role)
|
2016-07-07 20:25:08 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_exit_without_ignore_without_flag(self):
|
|
|
|
''' tests that GalaxyCLI exits with the error specified if the --ignore-errors flag is not used '''
|
2018-01-04 18:16:56 +01:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "install", "--server=None", "fake_role_name"])
|
2016-07-07 20:25:08 +02:00
|
|
|
with patch.object(ansible.utils.display.Display, "display", return_value=None) as mocked_display:
|
|
|
|
# testing that error expected is raised
|
|
|
|
self.assertRaises(AnsibleError, gc.run)
|
|
|
|
self.assertTrue(mocked_display.called_once_with("- downloading role 'fake_role_name', owned by "))
|
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_exit_without_ignore_with_flag(self):
|
|
|
|
''' tests that GalaxyCLI exits without the error specified if the --ignore-errors flag is used '''
|
2016-07-07 20:25:08 +02:00
|
|
|
# testing with --ignore-errors flag
|
2018-01-04 18:16:56 +01:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "install", "--server=None", "fake_role_name", "--ignore-errors"])
|
2016-07-07 20:25:08 +02:00
|
|
|
with patch.object(ansible.utils.display.Display, "display", return_value=None) as mocked_display:
|
|
|
|
gc.run()
|
|
|
|
self.assertTrue(mocked_display.called_once_with("- downloading role 'fake_role_name', owned by "))
|
2016-07-22 17:45:56 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_parse_no_action(self):
|
|
|
|
''' testing the options parser when no action is given '''
|
2018-01-04 18:16:56 +01:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", ""])
|
2019-04-23 20:54:39 +02:00
|
|
|
self.assertRaises(SystemExit, gc.parse)
|
2016-07-22 17:45:56 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_parse_invalid_action(self):
|
|
|
|
''' testing the options parser when an invalid action is given '''
|
2018-01-04 18:16:56 +01:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "NOT_ACTION"])
|
2019-04-23 20:54:39 +02:00
|
|
|
self.assertRaises(SystemExit, gc.parse)
|
2016-07-22 17:45:56 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_parse_delete(self):
|
|
|
|
''' testing the options parser when the action 'delete' is given '''
|
2019-04-23 20:54:39 +02:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "delete", "foo", "bar"])
|
|
|
|
gc.parse()
|
2018-12-18 03:10:59 +01:00
|
|
|
self.assertEqual(context.CLIARGS['verbosity'], 0)
|
2016-07-22 17:45:56 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_parse_import(self):
|
|
|
|
''' testing the options parser when the action 'import' is given '''
|
2019-04-23 20:54:39 +02:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "import", "foo", "bar"])
|
|
|
|
gc.parse()
|
2018-12-18 03:10:59 +01:00
|
|
|
self.assertEqual(context.CLIARGS['wait'], True)
|
|
|
|
self.assertEqual(context.CLIARGS['reference'], None)
|
|
|
|
self.assertEqual(context.CLIARGS['check_status'], False)
|
|
|
|
self.assertEqual(context.CLIARGS['verbosity'], 0)
|
2016-07-22 17:45:56 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_parse_info(self):
|
|
|
|
''' testing the options parser when the action 'info' is given '''
|
2019-04-23 20:54:39 +02:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "info", "foo", "bar"])
|
|
|
|
gc.parse()
|
2018-12-18 03:10:59 +01:00
|
|
|
self.assertEqual(context.CLIARGS['offline'], False)
|
2016-07-22 17:45:56 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_parse_init(self):
|
|
|
|
''' testing the options parser when the action 'init' is given '''
|
2019-04-23 20:54:39 +02:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "init", "foo"])
|
|
|
|
gc.parse()
|
2018-12-18 03:10:59 +01:00
|
|
|
self.assertEqual(context.CLIARGS['offline'], False)
|
|
|
|
self.assertEqual(context.CLIARGS['force'], False)
|
2016-07-22 17:45:56 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_parse_install(self):
|
|
|
|
''' testing the options parser when the action 'install' is given '''
|
2018-01-04 18:16:56 +01:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "install"])
|
2019-04-23 20:54:39 +02:00
|
|
|
gc.parse()
|
2018-12-18 03:10:59 +01:00
|
|
|
self.assertEqual(context.CLIARGS['ignore_errors'], False)
|
|
|
|
self.assertEqual(context.CLIARGS['no_deps'], False)
|
|
|
|
self.assertEqual(context.CLIARGS['role_file'], None)
|
|
|
|
self.assertEqual(context.CLIARGS['force'], False)
|
2016-07-22 17:45:56 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_parse_list(self):
|
|
|
|
''' testing the options parser when the action 'list' is given '''
|
2018-01-04 18:16:56 +01:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "list"])
|
2019-04-23 20:54:39 +02:00
|
|
|
gc.parse()
|
2018-12-18 03:10:59 +01:00
|
|
|
self.assertEqual(context.CLIARGS['verbosity'], 0)
|
2016-07-22 17:45:56 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_parse_login(self):
|
|
|
|
''' testing the options parser when the action 'login' is given '''
|
2018-01-04 18:16:56 +01:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "login"])
|
2019-04-23 20:54:39 +02:00
|
|
|
gc.parse()
|
2018-12-18 03:10:59 +01:00
|
|
|
self.assertEqual(context.CLIARGS['verbosity'], 0)
|
|
|
|
self.assertEqual(context.CLIARGS['token'], None)
|
2016-07-22 17:45:56 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_parse_remove(self):
|
|
|
|
''' testing the options parser when the action 'remove' is given '''
|
2019-04-23 20:54:39 +02:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "remove", "foo"])
|
|
|
|
gc.parse()
|
2018-12-18 03:10:59 +01:00
|
|
|
self.assertEqual(context.CLIARGS['verbosity'], 0)
|
2016-07-22 17:45:56 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_parse_search(self):
|
|
|
|
''' testing the options parswer when the action 'search' is given '''
|
2018-01-04 18:16:56 +01:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "search"])
|
2019-04-23 20:54:39 +02:00
|
|
|
gc.parse()
|
2018-12-18 03:10:59 +01:00
|
|
|
self.assertEqual(context.CLIARGS['platforms'], None)
|
|
|
|
self.assertEqual(context.CLIARGS['galaxy_tags'], None)
|
|
|
|
self.assertEqual(context.CLIARGS['author'], None)
|
2016-07-22 17:45:56 +02:00
|
|
|
|
2017-02-08 22:35:06 +01:00
|
|
|
def test_parse_setup(self):
|
|
|
|
''' testing the options parser when the action 'setup' is given '''
|
2019-04-23 20:54:39 +02:00
|
|
|
gc = GalaxyCLI(args=["ansible-galaxy", "setup", "source", "github_user", "github_repo", "secret"])
|
|
|
|
gc.parse()
|
2018-12-18 03:10:59 +01:00
|
|
|
self.assertEqual(context.CLIARGS['verbosity'], 0)
|
|
|
|
self.assertEqual(context.CLIARGS['remove_id'], None)
|
|
|
|
self.assertEqual(context.CLIARGS['setup_list'], False)
|
2017-03-02 02:23:18 +01:00
|
|
|
|
|
|
|
|
|
|
|
class ValidRoleTests(object):
|
|
|
|
|
|
|
|
expected_role_dirs = ('defaults', 'files', 'handlers', 'meta', 'tasks', 'templates', 'vars', 'tests')
|
|
|
|
|
|
|
|
@classmethod
|
2019-07-09 21:47:25 +02:00
|
|
|
def setUpRole(cls, role_name, galaxy_args=None, skeleton_path=None, use_explicit_type=False):
|
2017-03-02 02:23:18 +01:00
|
|
|
if galaxy_args is None:
|
|
|
|
galaxy_args = []
|
|
|
|
|
|
|
|
if skeleton_path is not None:
|
|
|
|
cls.role_skeleton_path = skeleton_path
|
|
|
|
galaxy_args += ['--role-skeleton', skeleton_path]
|
|
|
|
|
|
|
|
# Make temp directory for testing
|
|
|
|
cls.test_dir = tempfile.mkdtemp()
|
|
|
|
if not os.path.isdir(cls.test_dir):
|
|
|
|
os.makedirs(cls.test_dir)
|
|
|
|
|
|
|
|
cls.role_dir = os.path.join(cls.test_dir, role_name)
|
|
|
|
cls.role_name = role_name
|
|
|
|
|
|
|
|
# create role using default skeleton
|
2019-07-09 21:47:25 +02:00
|
|
|
args = ['ansible-galaxy']
|
|
|
|
if use_explicit_type:
|
|
|
|
args += ['role']
|
|
|
|
args += ['init', '-c', '--offline'] + galaxy_args + ['--init-path', cls.test_dir, cls.role_name]
|
|
|
|
|
|
|
|
gc = GalaxyCLI(args=args)
|
2017-03-02 02:23:18 +01:00
|
|
|
gc.run()
|
|
|
|
cls.gc = gc
|
|
|
|
|
|
|
|
if skeleton_path is None:
|
|
|
|
cls.role_skeleton_path = gc.galaxy.default_role_skeleton_path
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def tearDownClass(cls):
|
|
|
|
if os.path.isdir(cls.test_dir):
|
|
|
|
shutil.rmtree(cls.test_dir)
|
|
|
|
|
|
|
|
def test_metadata(self):
|
|
|
|
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
|
|
|
|
metadata = yaml.safe_load(mf)
|
|
|
|
self.assertIn('galaxy_info', metadata, msg='unable to find galaxy_info in metadata')
|
|
|
|
self.assertIn('dependencies', metadata, msg='unable to find dependencies in metadata')
|
|
|
|
|
|
|
|
def test_readme(self):
|
|
|
|
readme_path = os.path.join(self.role_dir, 'README.md')
|
|
|
|
self.assertTrue(os.path.exists(readme_path), msg='Readme doesn\'t exist')
|
|
|
|
|
|
|
|
def test_main_ymls(self):
|
|
|
|
need_main_ymls = set(self.expected_role_dirs) - set(['meta', 'tests', 'files', 'templates'])
|
|
|
|
for d in need_main_ymls:
|
|
|
|
main_yml = os.path.join(self.role_dir, d, 'main.yml')
|
|
|
|
self.assertTrue(os.path.exists(main_yml))
|
|
|
|
expected_string = "---\n# {0} file for {1}".format(d, self.role_name)
|
|
|
|
with open(main_yml, 'r') as f:
|
|
|
|
self.assertEqual(expected_string, f.read().strip())
|
|
|
|
|
|
|
|
def test_role_dirs(self):
|
|
|
|
for d in self.expected_role_dirs:
|
|
|
|
self.assertTrue(os.path.isdir(os.path.join(self.role_dir, d)), msg="Expected role subdirectory {0} doesn't exist".format(d))
|
|
|
|
|
|
|
|
def test_travis_yml(self):
|
2017-05-30 19:05:19 +02:00
|
|
|
with open(os.path.join(self.role_dir, '.travis.yml'), 'r') as f:
|
2017-03-02 02:23:18 +01:00
|
|
|
contents = f.read()
|
|
|
|
|
|
|
|
with open(os.path.join(self.role_skeleton_path, '.travis.yml'), 'r') as f:
|
|
|
|
expected_contents = f.read()
|
|
|
|
|
|
|
|
self.assertEqual(expected_contents, contents, msg='.travis.yml does not match expected')
|
|
|
|
|
|
|
|
def test_readme_contents(self):
|
|
|
|
with open(os.path.join(self.role_dir, 'README.md'), 'r') as readme:
|
|
|
|
contents = readme.read()
|
|
|
|
|
|
|
|
with open(os.path.join(self.role_skeleton_path, 'README.md'), 'r') as f:
|
|
|
|
expected_contents = f.read()
|
|
|
|
|
|
|
|
self.assertEqual(expected_contents, contents, msg='README.md does not match expected')
|
|
|
|
|
|
|
|
def test_test_yml(self):
|
|
|
|
with open(os.path.join(self.role_dir, 'tests', 'test.yml'), 'r') as f:
|
|
|
|
test_playbook = yaml.safe_load(f)
|
|
|
|
print(test_playbook)
|
|
|
|
self.assertEqual(len(test_playbook), 1)
|
|
|
|
self.assertEqual(test_playbook[0]['hosts'], 'localhost')
|
|
|
|
self.assertEqual(test_playbook[0]['remote_user'], 'root')
|
|
|
|
self.assertListEqual(test_playbook[0]['roles'], [self.role_name], msg='The list of roles included in the test play doesn\'t match')
|
|
|
|
|
|
|
|
|
|
|
|
class TestGalaxyInitDefault(unittest.TestCase, ValidRoleTests):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
cls.setUpRole(role_name='delete_me')
|
|
|
|
|
|
|
|
def test_metadata_contents(self):
|
|
|
|
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
|
|
|
|
metadata = yaml.safe_load(mf)
|
|
|
|
self.assertEqual(metadata.get('galaxy_info', dict()).get('author'), 'your name', msg='author was not set properly in metadata')
|
|
|
|
|
|
|
|
|
2018-03-09 12:50:17 +01:00
|
|
|
class TestGalaxyInitAPB(unittest.TestCase, ValidRoleTests):
|
2017-03-02 02:23:18 +01:00
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
2018-03-09 12:50:17 +01:00
|
|
|
cls.setUpRole('delete_me_apb', galaxy_args=['--type=apb'])
|
|
|
|
|
|
|
|
def test_metadata_apb_tag(self):
|
|
|
|
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
|
|
|
|
metadata = yaml.safe_load(mf)
|
|
|
|
self.assertIn('apb', metadata.get('galaxy_info', dict()).get('galaxy_tags', []), msg='apb tag not set in role metadata')
|
|
|
|
|
|
|
|
def test_metadata_contents(self):
|
|
|
|
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
|
|
|
|
metadata = yaml.safe_load(mf)
|
|
|
|
self.assertEqual(metadata.get('galaxy_info', dict()).get('author'), 'your name', msg='author was not set properly in metadata')
|
|
|
|
|
|
|
|
def test_apb_yml(self):
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(self.role_dir, 'apb.yml')), msg='apb.yml was not created')
|
|
|
|
|
|
|
|
def test_test_yml(self):
|
|
|
|
with open(os.path.join(self.role_dir, 'tests', 'test.yml'), 'r') as f:
|
|
|
|
test_playbook = yaml.safe_load(f)
|
|
|
|
print(test_playbook)
|
|
|
|
self.assertEqual(len(test_playbook), 1)
|
|
|
|
self.assertEqual(test_playbook[0]['hosts'], 'localhost')
|
|
|
|
self.assertFalse(test_playbook[0]['gather_facts'])
|
|
|
|
self.assertEqual(test_playbook[0]['connection'], 'local')
|
|
|
|
self.assertIsNone(test_playbook[0]['tasks'], msg='We\'re expecting an unset list of tasks in test.yml')
|
|
|
|
|
|
|
|
|
|
|
|
class TestGalaxyInitContainer(unittest.TestCase, ValidRoleTests):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
cls.setUpRole('delete_me_container', galaxy_args=['--type=container'])
|
2017-03-02 02:23:18 +01:00
|
|
|
|
|
|
|
def test_metadata_container_tag(self):
|
|
|
|
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
|
|
|
|
metadata = yaml.safe_load(mf)
|
2017-05-30 19:05:19 +02:00
|
|
|
self.assertIn('container', metadata.get('galaxy_info', dict()).get('galaxy_tags', []), msg='container tag not set in role metadata')
|
2017-03-02 02:23:18 +01:00
|
|
|
|
|
|
|
def test_metadata_contents(self):
|
|
|
|
with open(os.path.join(self.role_dir, 'meta', 'main.yml'), 'r') as mf:
|
|
|
|
metadata = yaml.safe_load(mf)
|
|
|
|
self.assertEqual(metadata.get('galaxy_info', dict()).get('author'), 'your name', msg='author was not set properly in metadata')
|
|
|
|
|
|
|
|
def test_meta_container_yml(self):
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(self.role_dir, 'meta', 'container.yml')), msg='container.yml was not created')
|
|
|
|
|
|
|
|
def test_test_yml(self):
|
|
|
|
with open(os.path.join(self.role_dir, 'tests', 'test.yml'), 'r') as f:
|
|
|
|
test_playbook = yaml.safe_load(f)
|
|
|
|
print(test_playbook)
|
|
|
|
self.assertEqual(len(test_playbook), 1)
|
|
|
|
self.assertEqual(test_playbook[0]['hosts'], 'localhost')
|
|
|
|
self.assertFalse(test_playbook[0]['gather_facts'])
|
|
|
|
self.assertEqual(test_playbook[0]['connection'], 'local')
|
|
|
|
self.assertIsNone(test_playbook[0]['tasks'], msg='We\'re expecting an unset list of tasks in test.yml')
|
|
|
|
|
|
|
|
|
|
|
|
class TestGalaxyInitSkeleton(unittest.TestCase, ValidRoleTests):
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def setUpClass(cls):
|
|
|
|
role_skeleton_path = os.path.join(os.path.split(__file__)[0], 'test_data', 'role_skeleton')
|
2019-07-09 21:47:25 +02:00
|
|
|
cls.setUpRole('delete_me_skeleton', skeleton_path=role_skeleton_path, use_explicit_type=True)
|
2017-03-02 02:23:18 +01:00
|
|
|
|
|
|
|
def test_empty_files_dir(self):
|
|
|
|
files_dir = os.path.join(self.role_dir, 'files')
|
|
|
|
self.assertTrue(os.path.isdir(files_dir))
|
|
|
|
self.assertListEqual(os.listdir(files_dir), [], msg='we expect the files directory to be empty, is ignore working?')
|
|
|
|
|
|
|
|
def test_template_ignore_jinja(self):
|
|
|
|
test_conf_j2 = os.path.join(self.role_dir, 'templates', 'test.conf.j2')
|
|
|
|
self.assertTrue(os.path.exists(test_conf_j2), msg="The test.conf.j2 template doesn't seem to exist, is it being rendered as test.conf?")
|
|
|
|
with open(test_conf_j2, 'r') as f:
|
|
|
|
contents = f.read()
|
|
|
|
expected_contents = '[defaults]\ntest_key = {{ test_variable }}'
|
|
|
|
self.assertEqual(expected_contents, contents.strip(), msg="test.conf.j2 doesn't contain what it should, is it being rendered?")
|
|
|
|
|
|
|
|
def test_template_ignore_jinja_subfolder(self):
|
|
|
|
test_conf_j2 = os.path.join(self.role_dir, 'templates', 'subfolder', 'test.conf.j2')
|
|
|
|
self.assertTrue(os.path.exists(test_conf_j2), msg="The test.conf.j2 template doesn't seem to exist, is it being rendered as test.conf?")
|
|
|
|
with open(test_conf_j2, 'r') as f:
|
|
|
|
contents = f.read()
|
|
|
|
expected_contents = '[defaults]\ntest_key = {{ test_variable }}'
|
|
|
|
self.assertEqual(expected_contents, contents.strip(), msg="test.conf.j2 doesn't contain what it should, is it being rendered?")
|
|
|
|
|
|
|
|
def test_template_ignore_similar_folder(self):
|
|
|
|
self.assertTrue(os.path.exists(os.path.join(self.role_dir, 'templates_extra', 'templates.txt')))
|
|
|
|
|
|
|
|
def test_skeleton_option(self):
|
2018-12-18 03:10:59 +01:00
|
|
|
self.assertEquals(self.role_skeleton_path, context.CLIARGS['role_skeleton'], msg='Skeleton path was not parsed properly from the command line')
|
2019-07-09 21:47:25 +02:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture()
|
|
|
|
def collection_skeleton(request, tmp_path_factory):
|
|
|
|
name, skeleton_path = request.param
|
|
|
|
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'init', '-c']
|
|
|
|
|
|
|
|
if skeleton_path is not None:
|
|
|
|
galaxy_args += ['--collection-skeleton', skeleton_path]
|
|
|
|
|
|
|
|
test_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections'))
|
|
|
|
galaxy_args += ['--init-path', test_dir, name]
|
|
|
|
|
|
|
|
GalaxyCLI(args=galaxy_args).run()
|
|
|
|
namespace_name, collection_name = name.split('.', 1)
|
|
|
|
collection_dir = os.path.join(test_dir, namespace_name, collection_name)
|
|
|
|
|
|
|
|
return collection_dir
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('collection_skeleton', [
|
|
|
|
('ansible_test.my_collection', None),
|
|
|
|
], indirect=True)
|
|
|
|
def test_collection_default(collection_skeleton):
|
|
|
|
meta_path = os.path.join(collection_skeleton, 'galaxy.yml')
|
|
|
|
|
|
|
|
with open(meta_path, 'r') as galaxy_meta:
|
|
|
|
metadata = yaml.safe_load(galaxy_meta)
|
|
|
|
|
|
|
|
assert metadata['namespace'] == 'ansible_test'
|
|
|
|
assert metadata['name'] == 'my_collection'
|
|
|
|
assert metadata['authors'] == ['your name <example@domain.com>']
|
|
|
|
assert metadata['readme'] == 'README.md'
|
|
|
|
assert metadata['version'] == '1.0.0'
|
2019-08-01 23:48:34 +02:00
|
|
|
assert metadata['description'] == 'your collection description'
|
2019-07-22 22:50:46 +02:00
|
|
|
assert metadata['license'] == ['GPL-2.0-or-later']
|
2019-07-09 21:47:25 +02:00
|
|
|
assert metadata['tags'] == []
|
|
|
|
assert metadata['dependencies'] == {}
|
|
|
|
assert metadata['documentation'] == 'http://docs.example.com'
|
|
|
|
assert metadata['repository'] == 'http://example.com/repository'
|
|
|
|
assert metadata['homepage'] == 'http://example.com'
|
|
|
|
assert metadata['issues'] == 'http://example.com/issue/tracker'
|
|
|
|
|
|
|
|
for d in ['docs', 'plugins', 'roles']:
|
|
|
|
assert os.path.isdir(os.path.join(collection_skeleton, d)), \
|
|
|
|
"Expected collection subdirectory {0} doesn't exist".format(d)
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('collection_skeleton', [
|
|
|
|
('ansible_test.delete_me_skeleton', os.path.join(os.path.split(__file__)[0], 'test_data', 'collection_skeleton')),
|
|
|
|
], indirect=True)
|
|
|
|
def test_collection_skeleton(collection_skeleton):
|
|
|
|
meta_path = os.path.join(collection_skeleton, 'galaxy.yml')
|
|
|
|
|
|
|
|
with open(meta_path, 'r') as galaxy_meta:
|
|
|
|
metadata = yaml.safe_load(galaxy_meta)
|
|
|
|
|
|
|
|
assert metadata['namespace'] == 'ansible_test'
|
|
|
|
assert metadata['name'] == 'delete_me_skeleton'
|
|
|
|
assert metadata['authors'] == ['Ansible Cow <acow@bovineuniversity.edu>', 'Tu Cow <tucow@bovineuniversity.edu>']
|
|
|
|
assert metadata['version'] == '0.1.0'
|
|
|
|
assert metadata['readme'] == 'README.md'
|
|
|
|
assert len(metadata) == 5
|
|
|
|
|
|
|
|
assert os.path.exists(os.path.join(collection_skeleton, 'README.md'))
|
|
|
|
|
|
|
|
# Test empty directories exist and are empty
|
|
|
|
for empty_dir in ['plugins/action', 'plugins/filter', 'plugins/inventory', 'plugins/lookup',
|
|
|
|
'plugins/module_utils', 'plugins/modules']:
|
|
|
|
|
|
|
|
assert os.listdir(os.path.join(collection_skeleton, empty_dir)) == []
|
|
|
|
|
|
|
|
# Test files that don't end with .j2 were not templated
|
|
|
|
doc_file = os.path.join(collection_skeleton, 'docs', 'My Collection.md')
|
|
|
|
with open(doc_file, 'r') as f:
|
|
|
|
doc_contents = f.read()
|
|
|
|
assert doc_contents.strip() == 'Welcome to my test collection doc for {{ namespace }}.'
|
|
|
|
|
|
|
|
# Test files that end with .j2 but are in the templates directory were not templated
|
|
|
|
for template_dir in ['playbooks/templates', 'playbooks/templates/subfolder',
|
|
|
|
'roles/common/templates', 'roles/common/templates/subfolder']:
|
|
|
|
test_conf_j2 = os.path.join(collection_skeleton, template_dir, 'test.conf.j2')
|
|
|
|
assert os.path.exists(test_conf_j2)
|
|
|
|
|
|
|
|
with open(test_conf_j2, 'r') as f:
|
|
|
|
contents = f.read()
|
|
|
|
expected_contents = '[defaults]\ntest_key = {{ test_variable }}'
|
|
|
|
|
|
|
|
assert expected_contents == contents.strip()
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture()
|
|
|
|
def collection_artifact(collection_skeleton, tmp_path_factory):
|
|
|
|
''' Creates a collection artifact tarball that is ready to be published and installed '''
|
|
|
|
output_dir = to_text(tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Output'))
|
|
|
|
|
|
|
|
# Because we call GalaxyCLI in collection_skeleton we need to reset the singleton back to None so it uses the new
|
|
|
|
# args, we reset the original args once it is done.
|
|
|
|
orig_cli_args = co.GlobalCLIArgs._Singleton__instance
|
|
|
|
try:
|
|
|
|
co.GlobalCLIArgs._Singleton__instance = None
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'build', collection_skeleton, '--output-path', output_dir]
|
|
|
|
gc = GalaxyCLI(args=galaxy_args)
|
|
|
|
gc.run()
|
|
|
|
|
|
|
|
yield output_dir
|
|
|
|
finally:
|
|
|
|
co.GlobalCLIArgs._Singleton__instance = orig_cli_args
|
|
|
|
|
|
|
|
|
|
|
|
def test_invalid_skeleton_path():
|
|
|
|
expected = "- the skeleton path '/fake/path' does not exist, cannot init collection"
|
|
|
|
|
|
|
|
gc = GalaxyCLI(args=['ansible-galaxy', 'collection', 'init', 'my.collection', '--collection-skeleton',
|
|
|
|
'/fake/path'])
|
|
|
|
with pytest.raises(AnsibleError, match=expected):
|
|
|
|
gc.run()
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("name", [
|
|
|
|
"",
|
|
|
|
"invalid",
|
|
|
|
"hypen-ns.collection",
|
|
|
|
"ns.hyphen-collection",
|
|
|
|
"ns.collection.weird",
|
|
|
|
])
|
|
|
|
def test_invalid_collection_name(name):
|
|
|
|
expected = "Invalid collection name, must be in the format <namespace>.<collection>"
|
|
|
|
|
|
|
|
gc = GalaxyCLI(args=['ansible-galaxy', 'collection', 'init', name])
|
|
|
|
with pytest.raises(AnsibleError, match=expected):
|
|
|
|
gc.run()
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize('collection_skeleton', [
|
|
|
|
('ansible_test.build_collection', None),
|
|
|
|
], indirect=True)
|
|
|
|
def test_collection_build(collection_artifact):
|
|
|
|
tar_path = os.path.join(collection_artifact, 'ansible_test-build_collection-1.0.0.tar.gz')
|
|
|
|
assert tarfile.is_tarfile(tar_path)
|
|
|
|
|
|
|
|
with tarfile.open(tar_path, mode='r') as tar:
|
|
|
|
tar_members = tar.getmembers()
|
|
|
|
|
|
|
|
valid_files = ['MANIFEST.json', 'FILES.json', 'roles', 'docs', 'plugins', 'plugins/README.md', 'README.md']
|
|
|
|
assert len(tar_members) == 7
|
|
|
|
|
|
|
|
# Verify the uid and gid is 0 and the correct perms are set
|
|
|
|
for member in tar_members:
|
|
|
|
assert member.name in valid_files
|
|
|
|
|
|
|
|
assert member.gid == 0
|
|
|
|
assert member.gname == ''
|
|
|
|
assert member.uid == 0
|
|
|
|
assert member.uname == ''
|
|
|
|
if member.isdir():
|
|
|
|
assert member.mode == 0o0755
|
|
|
|
else:
|
|
|
|
assert member.mode == 0o0644
|
|
|
|
|
|
|
|
manifest_file = tar.extractfile(tar_members[0])
|
|
|
|
try:
|
|
|
|
manifest = json.loads(to_text(manifest_file.read()))
|
|
|
|
finally:
|
|
|
|
manifest_file.close()
|
|
|
|
|
|
|
|
coll_info = manifest['collection_info']
|
|
|
|
file_manifest = manifest['file_manifest_file']
|
|
|
|
assert manifest['format'] == 1
|
|
|
|
assert len(manifest.keys()) == 3
|
|
|
|
|
|
|
|
assert coll_info['namespace'] == 'ansible_test'
|
|
|
|
assert coll_info['name'] == 'build_collection'
|
|
|
|
assert coll_info['version'] == '1.0.0'
|
|
|
|
assert coll_info['authors'] == ['your name <example@domain.com>']
|
|
|
|
assert coll_info['readme'] == 'README.md'
|
|
|
|
assert coll_info['tags'] == []
|
2019-08-01 23:48:34 +02:00
|
|
|
assert coll_info['description'] == 'your collection description'
|
2019-07-09 21:47:25 +02:00
|
|
|
assert coll_info['license'] == ['GPL-2.0-or-later']
|
|
|
|
assert coll_info['license_file'] is None
|
|
|
|
assert coll_info['dependencies'] == {}
|
|
|
|
assert coll_info['repository'] == 'http://example.com/repository'
|
|
|
|
assert coll_info['documentation'] == 'http://docs.example.com'
|
|
|
|
assert coll_info['homepage'] == 'http://example.com'
|
|
|
|
assert coll_info['issues'] == 'http://example.com/issue/tracker'
|
|
|
|
assert len(coll_info.keys()) == 14
|
|
|
|
|
|
|
|
assert file_manifest['name'] == 'FILES.json'
|
|
|
|
assert file_manifest['ftype'] == 'file'
|
|
|
|
assert file_manifest['chksum_type'] == 'sha256'
|
|
|
|
assert file_manifest['chksum_sha256'] is not None # Order of keys makes it hard to verify the checksum
|
|
|
|
assert file_manifest['format'] == 1
|
|
|
|
assert len(file_manifest.keys()) == 5
|
|
|
|
|
|
|
|
files_file = tar.extractfile(tar_members[1])
|
|
|
|
try:
|
|
|
|
files = json.loads(to_text(files_file.read()))
|
|
|
|
finally:
|
|
|
|
files_file.close()
|
|
|
|
|
|
|
|
assert len(files['files']) == 6
|
|
|
|
assert files['format'] == 1
|
|
|
|
assert len(files.keys()) == 2
|
|
|
|
|
|
|
|
valid_files_entries = ['.', 'roles', 'docs', 'plugins', 'plugins/README.md', 'README.md']
|
|
|
|
for file_entry in files['files']:
|
|
|
|
assert file_entry['name'] in valid_files_entries
|
|
|
|
assert file_entry['format'] == 1
|
|
|
|
|
|
|
|
if file_entry['name'] == 'plugins/README.md':
|
|
|
|
assert file_entry['ftype'] == 'file'
|
|
|
|
assert file_entry['chksum_type'] == 'sha256'
|
|
|
|
assert file_entry['chksum_sha256'] == '5be7ec7b71096d56e1cc48311b6a2266b77b5fdb9d1985b5bc625787b1e857c5'
|
|
|
|
elif file_entry['name'] == 'README.md':
|
|
|
|
assert file_entry['ftype'] == 'file'
|
|
|
|
assert file_entry['chksum_type'] == 'sha256'
|
|
|
|
assert file_entry['chksum_sha256'] == '45923ca2ece0e8ce31d29e5df9d8b649fe55e2f5b5b61c9724d7cc187bd6ad4a'
|
|
|
|
else:
|
|
|
|
assert file_entry['ftype'] == 'dir'
|
|
|
|
assert file_entry['chksum_type'] is None
|
|
|
|
assert file_entry['chksum_sha256'] is None
|
|
|
|
|
|
|
|
assert len(file_entry.keys()) == 5
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture()
|
|
|
|
def collection_install(reset_cli_args, tmp_path_factory, monkeypatch):
|
|
|
|
mock_install = MagicMock()
|
|
|
|
monkeypatch.setattr(ansible.cli.galaxy, 'install_collections', mock_install)
|
|
|
|
|
|
|
|
mock_warning = MagicMock()
|
|
|
|
monkeypatch.setattr(ansible.utils.display.Display, 'warning', mock_warning)
|
|
|
|
|
|
|
|
output_dir = to_text((tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Output')))
|
|
|
|
yield mock_install, mock_warning, output_dir
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_with_names(collection_install):
|
|
|
|
mock_install, mock_warning, output_dir = collection_install
|
|
|
|
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', 'namespace2.collection:1.0.1',
|
|
|
|
'--collections-path', output_dir]
|
|
|
|
GalaxyCLI(args=galaxy_args).run()
|
|
|
|
|
|
|
|
collection_path = os.path.join(output_dir, 'ansible_collections')
|
|
|
|
assert os.path.isdir(collection_path)
|
|
|
|
|
|
|
|
assert mock_warning.call_count == 1
|
|
|
|
assert "The specified collections path '%s' is not part of the configured Ansible collections path" % output_dir \
|
|
|
|
in mock_warning.call_args[0][0]
|
|
|
|
|
|
|
|
assert mock_install.call_count == 1
|
|
|
|
assert mock_install.call_args[0][0] == [('namespace.collection', '*', None),
|
|
|
|
('namespace2.collection', '1.0.1', None)]
|
|
|
|
assert mock_install.call_args[0][1] == collection_path
|
|
|
|
assert mock_install.call_args[0][2] == ['https://galaxy.ansible.com']
|
|
|
|
assert mock_install.call_args[0][3] is True
|
|
|
|
assert mock_install.call_args[0][4] is False
|
|
|
|
assert mock_install.call_args[0][5] is False
|
|
|
|
assert mock_install.call_args[0][6] is False
|
|
|
|
assert mock_install.call_args[0][7] is False
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_with_requirements_file(collection_install):
|
|
|
|
mock_install, mock_warning, output_dir = collection_install
|
|
|
|
|
|
|
|
requirements_file = os.path.join(output_dir, 'requirements.yml')
|
|
|
|
with open(requirements_file, 'wb') as req_obj:
|
|
|
|
req_obj.write(b'''---
|
|
|
|
collections:
|
|
|
|
- namespace.coll
|
|
|
|
- name: namespace2.coll
|
|
|
|
version: '>2.0.1'
|
|
|
|
''')
|
|
|
|
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'install', '--requirements-file', requirements_file,
|
|
|
|
'--collections-path', output_dir]
|
|
|
|
GalaxyCLI(args=galaxy_args).run()
|
|
|
|
|
|
|
|
collection_path = os.path.join(output_dir, 'ansible_collections')
|
|
|
|
assert os.path.isdir(collection_path)
|
|
|
|
|
|
|
|
assert mock_warning.call_count == 1
|
|
|
|
assert "The specified collections path '%s' is not part of the configured Ansible collections path" % output_dir \
|
|
|
|
in mock_warning.call_args[0][0]
|
|
|
|
|
|
|
|
assert mock_install.call_count == 1
|
|
|
|
assert mock_install.call_args[0][0] == [('namespace.coll', '*', None),
|
|
|
|
('namespace2.coll', '>2.0.1', None)]
|
|
|
|
assert mock_install.call_args[0][1] == collection_path
|
|
|
|
assert mock_install.call_args[0][2] == ['https://galaxy.ansible.com']
|
|
|
|
assert mock_install.call_args[0][3] is True
|
|
|
|
assert mock_install.call_args[0][4] is False
|
|
|
|
assert mock_install.call_args[0][5] is False
|
|
|
|
assert mock_install.call_args[0][6] is False
|
|
|
|
assert mock_install.call_args[0][7] is False
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_with_relative_path(collection_install, monkeypatch):
|
|
|
|
mock_install = collection_install[0]
|
|
|
|
|
|
|
|
mock_req = MagicMock()
|
|
|
|
mock_req.return_value = [('namespace.coll', '*', None)]
|
|
|
|
monkeypatch.setattr(ansible.cli.galaxy, 'parse_collections_requirements_file', mock_req)
|
|
|
|
|
|
|
|
monkeypatch.setattr(os, 'makedirs', MagicMock())
|
|
|
|
|
|
|
|
requirements_file = './requirements.myl'
|
|
|
|
collections_path = './ansible_collections'
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'install', '--requirements-file', requirements_file,
|
|
|
|
'--collections-path', collections_path]
|
|
|
|
GalaxyCLI(args=galaxy_args).run()
|
|
|
|
|
|
|
|
assert mock_install.call_count == 1
|
|
|
|
assert mock_install.call_args[0][0] == [('namespace.coll', '*', None)]
|
|
|
|
assert mock_install.call_args[0][1] == os.path.abspath(collections_path)
|
|
|
|
assert mock_install.call_args[0][2] == ['https://galaxy.ansible.com']
|
|
|
|
assert mock_install.call_args[0][3] is True
|
|
|
|
assert mock_install.call_args[0][4] is False
|
|
|
|
assert mock_install.call_args[0][5] is False
|
|
|
|
assert mock_install.call_args[0][6] is False
|
|
|
|
assert mock_install.call_args[0][7] is False
|
|
|
|
|
|
|
|
assert mock_req.call_count == 1
|
|
|
|
assert mock_req.call_args[0][0] == os.path.abspath(requirements_file)
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_with_unexpanded_path(collection_install, monkeypatch):
|
|
|
|
mock_install = collection_install[0]
|
|
|
|
|
|
|
|
mock_req = MagicMock()
|
|
|
|
mock_req.return_value = [('namespace.coll', '*', None)]
|
|
|
|
monkeypatch.setattr(ansible.cli.galaxy, 'parse_collections_requirements_file', mock_req)
|
|
|
|
|
|
|
|
monkeypatch.setattr(os, 'makedirs', MagicMock())
|
|
|
|
|
|
|
|
requirements_file = '~/requirements.myl'
|
|
|
|
collections_path = '~/ansible_collections'
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'install', '--requirements-file', requirements_file,
|
|
|
|
'--collections-path', collections_path]
|
|
|
|
GalaxyCLI(args=galaxy_args).run()
|
|
|
|
|
|
|
|
assert mock_install.call_count == 1
|
|
|
|
assert mock_install.call_args[0][0] == [('namespace.coll', '*', None)]
|
|
|
|
assert mock_install.call_args[0][1] == os.path.expanduser(os.path.expandvars(collections_path))
|
|
|
|
assert mock_install.call_args[0][2] == ['https://galaxy.ansible.com']
|
|
|
|
assert mock_install.call_args[0][3] is True
|
|
|
|
assert mock_install.call_args[0][4] is False
|
|
|
|
assert mock_install.call_args[0][5] is False
|
|
|
|
assert mock_install.call_args[0][6] is False
|
|
|
|
assert mock_install.call_args[0][7] is False
|
|
|
|
|
|
|
|
assert mock_req.call_count == 1
|
|
|
|
assert mock_req.call_args[0][0] == os.path.expanduser(os.path.expandvars(requirements_file))
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_in_collection_dir(collection_install, monkeypatch):
|
|
|
|
mock_install, mock_warning, output_dir = collection_install
|
|
|
|
|
|
|
|
collections_path = C.COLLECTIONS_PATHS[0]
|
|
|
|
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', 'namespace2.collection:1.0.1',
|
|
|
|
'--collections-path', collections_path]
|
|
|
|
GalaxyCLI(args=galaxy_args).run()
|
|
|
|
|
|
|
|
assert mock_warning.call_count == 0
|
|
|
|
|
|
|
|
assert mock_install.call_count == 1
|
|
|
|
assert mock_install.call_args[0][0] == [('namespace.collection', '*', None),
|
|
|
|
('namespace2.collection', '1.0.1', None)]
|
|
|
|
assert mock_install.call_args[0][1] == os.path.join(collections_path, 'ansible_collections')
|
|
|
|
assert mock_install.call_args[0][2] == ['https://galaxy.ansible.com']
|
|
|
|
assert mock_install.call_args[0][3] is True
|
|
|
|
assert mock_install.call_args[0][4] is False
|
|
|
|
assert mock_install.call_args[0][5] is False
|
|
|
|
assert mock_install.call_args[0][6] is False
|
|
|
|
assert mock_install.call_args[0][7] is False
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_name_and_requirements_fail(collection_install):
|
|
|
|
test_path = collection_install[2]
|
|
|
|
expected = 'The positional collection_name arg and --requirements-file are mutually exclusive.'
|
|
|
|
|
|
|
|
with pytest.raises(AnsibleError, match=expected):
|
|
|
|
GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path',
|
|
|
|
test_path, '--requirements-file', test_path]).run()
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_no_name_and_requirements_fail(collection_install):
|
|
|
|
test_path = collection_install[2]
|
|
|
|
expected = 'You must specify a collection name or a requirements file.'
|
|
|
|
|
|
|
|
with pytest.raises(AnsibleError, match=expected):
|
|
|
|
GalaxyCLI(args=['ansible-galaxy', 'collection', 'install', '--collections-path', test_path]).run()
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_path_with_ansible_collections(collection_install):
|
|
|
|
mock_install, mock_warning, output_dir = collection_install
|
|
|
|
|
|
|
|
collection_path = os.path.join(output_dir, 'ansible_collections')
|
|
|
|
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', 'namespace2.collection:1.0.1',
|
|
|
|
'--collections-path', collection_path]
|
|
|
|
GalaxyCLI(args=galaxy_args).run()
|
|
|
|
|
|
|
|
assert os.path.isdir(collection_path)
|
|
|
|
|
|
|
|
assert mock_warning.call_count == 1
|
|
|
|
assert "The specified collections path '%s' is not part of the configured Ansible collections path" \
|
|
|
|
% collection_path in mock_warning.call_args[0][0]
|
|
|
|
|
|
|
|
assert mock_install.call_count == 1
|
|
|
|
assert mock_install.call_args[0][0] == [('namespace.collection', '*', None),
|
|
|
|
('namespace2.collection', '1.0.1', None)]
|
|
|
|
assert mock_install.call_args[0][1] == collection_path
|
|
|
|
assert mock_install.call_args[0][2] == ['https://galaxy.ansible.com']
|
|
|
|
assert mock_install.call_args[0][3] is True
|
|
|
|
assert mock_install.call_args[0][4] is False
|
|
|
|
assert mock_install.call_args[0][5] is False
|
|
|
|
assert mock_install.call_args[0][6] is False
|
|
|
|
assert mock_install.call_args[0][7] is False
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_ignore_certs(collection_install):
|
|
|
|
mock_install, mock_warning, output_dir = collection_install
|
|
|
|
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
|
|
|
|
'--ignore-certs']
|
|
|
|
GalaxyCLI(args=galaxy_args).run()
|
|
|
|
|
|
|
|
assert mock_install.call_args[0][3] is False
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_force(collection_install):
|
|
|
|
mock_install, mock_warning, output_dir = collection_install
|
|
|
|
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
|
|
|
|
'--force']
|
|
|
|
GalaxyCLI(args=galaxy_args).run()
|
|
|
|
|
|
|
|
assert mock_install.call_args[0][6] is True
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_force_deps(collection_install):
|
|
|
|
mock_install, mock_warning, output_dir = collection_install
|
|
|
|
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
|
|
|
|
'--force-with-deps']
|
|
|
|
GalaxyCLI(args=galaxy_args).run()
|
|
|
|
|
|
|
|
assert mock_install.call_args[0][7] is True
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_no_deps(collection_install):
|
|
|
|
mock_install, mock_warning, output_dir = collection_install
|
|
|
|
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
|
|
|
|
'--no-deps']
|
|
|
|
GalaxyCLI(args=galaxy_args).run()
|
|
|
|
|
|
|
|
assert mock_install.call_args[0][5] is True
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_ignore(collection_install):
|
|
|
|
mock_install, mock_warning, output_dir = collection_install
|
|
|
|
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
|
|
|
|
'--ignore-errors']
|
|
|
|
GalaxyCLI(args=galaxy_args).run()
|
|
|
|
|
|
|
|
assert mock_install.call_args[0][4] is True
|
|
|
|
|
|
|
|
|
|
|
|
def test_collection_install_custom_server(collection_install):
|
|
|
|
mock_install, mock_warning, output_dir = collection_install
|
|
|
|
|
|
|
|
galaxy_args = ['ansible-galaxy', 'collection', 'install', 'namespace.collection', '--collections-path', output_dir,
|
|
|
|
'--server', 'https://galaxy-dev.ansible.com']
|
|
|
|
GalaxyCLI(args=galaxy_args).run()
|
|
|
|
|
|
|
|
assert mock_install.call_args[0][2] == ['https://galaxy-dev.ansible.com']
|