2016-11-30 06:21:53 +01:00
|
|
|
"""Miscellaneous utility functions and classes."""
|
2019-07-12 08:46:20 +02:00
|
|
|
from __future__ import (absolute_import, division, print_function)
|
|
|
|
__metaclass__ = type
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
import contextlib
|
2016-11-30 06:21:53 +01:00
|
|
|
import errno
|
2018-02-16 07:56:05 +01:00
|
|
|
import fcntl
|
2017-08-19 02:21:11 +02:00
|
|
|
import inspect
|
2016-11-30 06:21:53 +01:00
|
|
|
import os
|
2017-05-05 10:23:00 +02:00
|
|
|
import pkgutil
|
2017-10-26 09:21:46 +02:00
|
|
|
import random
|
|
|
|
import re
|
2016-11-30 06:21:53 +01:00
|
|
|
import shutil
|
2018-05-09 18:24:39 +02:00
|
|
|
import socket
|
2017-10-26 09:21:46 +02:00
|
|
|
import stat
|
|
|
|
import string
|
2016-11-30 06:21:53 +01:00
|
|
|
import subprocess
|
|
|
|
import sys
|
2019-08-27 23:03:23 +02:00
|
|
|
import tempfile
|
2016-11-30 06:21:53 +01:00
|
|
|
import time
|
2019-08-27 23:03:23 +02:00
|
|
|
import zipfile
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-02-16 07:56:05 +01:00
|
|
|
from struct import unpack, pack
|
|
|
|
from termios import TIOCGWINSZ
|
|
|
|
|
2017-10-17 21:49:10 +02:00
|
|
|
try:
|
|
|
|
from abc import ABC
|
|
|
|
except ImportError:
|
|
|
|
from abc import ABCMeta
|
|
|
|
ABC = ABCMeta('ABC', (), {})
|
2017-08-19 02:21:11 +02:00
|
|
|
|
2018-09-21 20:38:22 +02:00
|
|
|
try:
|
|
|
|
# noinspection PyCompatibility
|
2019-07-11 22:03:49 +02:00
|
|
|
from configparser import ConfigParser
|
2018-09-21 20:38:22 +02:00
|
|
|
except ImportError:
|
2019-07-13 06:58:19 +02:00
|
|
|
# noinspection PyCompatibility,PyUnresolvedReferences
|
2019-07-11 22:03:49 +02:00
|
|
|
from ConfigParser import SafeConfigParser as ConfigParser
|
2018-09-21 20:38:22 +02:00
|
|
|
|
2019-05-24 22:10:33 +02:00
|
|
|
try:
|
2019-07-11 22:03:49 +02:00
|
|
|
# noinspection PyProtectedMember
|
2019-05-24 22:10:33 +02:00
|
|
|
from shlex import quote as cmd_quote
|
|
|
|
except ImportError:
|
2019-07-11 22:03:49 +02:00
|
|
|
# noinspection PyProtectedMember
|
2019-05-24 22:10:33 +02:00
|
|
|
from pipes import quote as cmd_quote
|
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
from . import types as t
|
2019-07-10 02:31:04 +02:00
|
|
|
|
2020-02-04 20:21:53 +01:00
|
|
|
from .encoding import (
|
|
|
|
to_bytes,
|
|
|
|
to_optional_bytes,
|
|
|
|
to_optional_text,
|
|
|
|
)
|
|
|
|
|
|
|
|
from .io import (
|
|
|
|
open_binary_file,
|
|
|
|
read_text_file,
|
|
|
|
)
|
|
|
|
|
2019-07-10 02:31:04 +02:00
|
|
|
try:
|
|
|
|
C = t.TypeVar('C')
|
|
|
|
except AttributeError:
|
|
|
|
C = None
|
|
|
|
|
|
|
|
|
2019-07-11 22:03:49 +02:00
|
|
|
DOCKER_COMPLETION = {} # type: t.Dict[str, t.Dict[str, str]]
|
|
|
|
REMOTE_COMPLETION = {} # type: t.Dict[str, t.Dict[str, str]]
|
2020-02-21 00:27:08 +01:00
|
|
|
NETWORK_COMPLETION = {} # type: t.Dict[str, t.Dict[str, str]]
|
2019-07-11 22:03:49 +02:00
|
|
|
PYTHON_PATHS = {} # type: t.Dict[str, str]
|
2017-10-26 09:21:46 +02:00
|
|
|
|
2019-02-18 22:28:25 +01:00
|
|
|
try:
|
2019-07-11 22:03:49 +02:00
|
|
|
# noinspection PyUnresolvedReferences
|
2019-02-18 22:28:25 +01:00
|
|
|
MAXFD = subprocess.MAXFD
|
|
|
|
except AttributeError:
|
|
|
|
MAXFD = -1
|
|
|
|
|
2019-08-07 20:43:42 +02:00
|
|
|
COVERAGE_CONFIG_NAME = 'coveragerc'
|
2019-03-13 15:14:12 +01:00
|
|
|
|
2019-08-09 01:14:19 +02:00
|
|
|
ANSIBLE_TEST_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
|
|
|
|
|
|
# assume running from install
|
|
|
|
ANSIBLE_ROOT = os.path.dirname(ANSIBLE_TEST_ROOT)
|
2019-08-09 02:21:38 +02:00
|
|
|
ANSIBLE_BIN_PATH = os.path.dirname(os.path.abspath(sys.argv[0]))
|
2019-08-09 01:14:19 +02:00
|
|
|
ANSIBLE_LIB_ROOT = os.path.join(ANSIBLE_ROOT, 'ansible')
|
2019-08-09 08:51:03 +02:00
|
|
|
ANSIBLE_SOURCE_ROOT = None
|
2019-08-09 01:14:19 +02:00
|
|
|
|
|
|
|
if not os.path.exists(ANSIBLE_LIB_ROOT):
|
|
|
|
# running from source
|
|
|
|
ANSIBLE_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(ANSIBLE_TEST_ROOT)))
|
2019-08-09 02:21:38 +02:00
|
|
|
ANSIBLE_BIN_PATH = os.path.join(ANSIBLE_ROOT, 'bin')
|
2019-08-09 01:14:19 +02:00
|
|
|
ANSIBLE_LIB_ROOT = os.path.join(ANSIBLE_ROOT, 'lib', 'ansible')
|
2019-08-09 08:51:03 +02:00
|
|
|
ANSIBLE_SOURCE_ROOT = ANSIBLE_ROOT
|
2019-08-09 01:14:19 +02:00
|
|
|
|
2019-08-06 23:43:29 +02:00
|
|
|
ANSIBLE_TEST_DATA_ROOT = os.path.join(ANSIBLE_TEST_ROOT, '_data')
|
2019-08-08 03:32:30 +02:00
|
|
|
ANSIBLE_TEST_CONFIG_ROOT = os.path.join(ANSIBLE_TEST_ROOT, 'config')
|
2019-07-10 02:31:04 +02:00
|
|
|
|
2019-03-13 15:14:12 +01:00
|
|
|
# Modes are set to allow all users the same level of access.
|
|
|
|
# This permits files to be used in tests that change users.
|
|
|
|
# The only exception is write access to directories for the user creating them.
|
|
|
|
# This avoids having to modify the directory permissions a second time.
|
|
|
|
|
|
|
|
MODE_READ = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
|
|
|
|
|
|
|
|
MODE_FILE = MODE_READ
|
|
|
|
MODE_FILE_EXECUTE = MODE_FILE | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
|
|
|
MODE_FILE_WRITE = MODE_FILE | stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH
|
|
|
|
|
|
|
|
MODE_DIRECTORY = MODE_READ | stat.S_IWUSR | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
|
|
|
|
MODE_DIRECTORY_WRITE = MODE_DIRECTORY | stat.S_IWGRP | stat.S_IWOTH
|
|
|
|
|
2019-08-28 18:10:17 +02:00
|
|
|
REMOTE_ONLY_PYTHON_VERSIONS = (
|
|
|
|
'2.6',
|
|
|
|
)
|
|
|
|
|
|
|
|
SUPPORTED_PYTHON_VERSIONS = (
|
|
|
|
'2.6',
|
|
|
|
'2.7',
|
|
|
|
'3.5',
|
|
|
|
'3.6',
|
|
|
|
'3.7',
|
|
|
|
'3.8',
|
2020-03-01 03:17:08 +01:00
|
|
|
'3.9',
|
2019-08-28 18:10:17 +02:00
|
|
|
)
|
|
|
|
|
2019-07-23 04:24:48 +02:00
|
|
|
|
2017-10-26 09:21:46 +02:00
|
|
|
def get_docker_completion():
|
|
|
|
"""
|
2019-03-28 00:40:27 +01:00
|
|
|
:rtype: dict[str, dict[str, str]]
|
2017-10-26 09:21:46 +02:00
|
|
|
"""
|
2019-03-28 00:40:27 +01:00
|
|
|
return get_parameterized_completion(DOCKER_COMPLETION, 'docker')
|
2017-10-26 09:21:46 +02:00
|
|
|
|
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
def get_remote_completion():
|
|
|
|
"""
|
|
|
|
:rtype: dict[str, dict[str, str]]
|
|
|
|
"""
|
|
|
|
return get_parameterized_completion(REMOTE_COMPLETION, 'remote')
|
|
|
|
|
|
|
|
|
2020-02-21 00:27:08 +01:00
|
|
|
def get_network_completion():
|
|
|
|
"""
|
|
|
|
:rtype: dict[str, dict[str, str]]
|
|
|
|
"""
|
|
|
|
return get_parameterized_completion(NETWORK_COMPLETION, 'network')
|
|
|
|
|
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
def get_parameterized_completion(cache, name):
|
|
|
|
"""
|
|
|
|
:type cache: dict[str, dict[str, str]]
|
|
|
|
:type name: str
|
|
|
|
:rtype: dict[str, dict[str, str]]
|
|
|
|
"""
|
|
|
|
if not cache:
|
2019-08-06 02:40:00 +02:00
|
|
|
images = read_lines_without_comments(os.path.join(ANSIBLE_TEST_DATA_ROOT, 'completion', '%s.txt' % name), remove_blank_lines=True)
|
2019-03-28 00:40:27 +01:00
|
|
|
|
|
|
|
cache.update(dict(kvp for kvp in [parse_parameterized_completion(i) for i in images] if kvp))
|
|
|
|
|
|
|
|
return cache
|
2017-10-26 09:21:46 +02:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2019-03-28 00:40:27 +01:00
|
|
|
def parse_parameterized_completion(value):
|
2018-08-17 06:16:15 +02:00
|
|
|
"""
|
|
|
|
:type value: str
|
|
|
|
:rtype: tuple[str, dict[str, str]]
|
|
|
|
"""
|
|
|
|
values = value.split()
|
|
|
|
|
|
|
|
if not values:
|
|
|
|
return None
|
|
|
|
|
|
|
|
name = values[0]
|
|
|
|
data = dict((kvp[0], kvp[1] if len(kvp) > 1 else '') for kvp in [item.split('=', 1) for item in values[1:]])
|
|
|
|
|
|
|
|
return name, data
|
|
|
|
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
def remove_file(path):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
"""
|
|
|
|
if os.path.isfile(path):
|
|
|
|
os.remove(path)
|
|
|
|
|
|
|
|
|
2019-07-10 02:31:04 +02:00
|
|
|
def read_lines_without_comments(path, remove_blank_lines=False, optional=False): # type: (str, bool, bool) -> t.List[str]
|
2018-09-20 08:20:27 +02:00
|
|
|
"""
|
2019-07-10 02:31:04 +02:00
|
|
|
Returns lines from the specified text file with comments removed.
|
|
|
|
Comments are any content from a hash symbol to the end of a line.
|
|
|
|
Any spaces immediately before a comment are also removed.
|
2018-09-20 08:20:27 +02:00
|
|
|
"""
|
2019-07-10 02:31:04 +02:00
|
|
|
if optional and not os.path.exists(path):
|
|
|
|
return []
|
|
|
|
|
2020-02-04 20:21:53 +01:00
|
|
|
lines = read_text_file(path).splitlines()
|
2018-09-20 08:20:27 +02:00
|
|
|
|
|
|
|
lines = [re.sub(r' *#.*$', '', line) for line in lines]
|
|
|
|
|
|
|
|
if remove_blank_lines:
|
|
|
|
lines = [line for line in lines if line]
|
|
|
|
|
|
|
|
return lines
|
|
|
|
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
def find_executable(executable, cwd=None, path=None, required=True):
|
|
|
|
"""
|
|
|
|
:type executable: str
|
|
|
|
:type cwd: str
|
|
|
|
:type path: str
|
|
|
|
:type required: bool | str
|
|
|
|
:rtype: str | None
|
|
|
|
"""
|
|
|
|
match = None
|
|
|
|
real_cwd = os.getcwd()
|
|
|
|
|
|
|
|
if not cwd:
|
|
|
|
cwd = real_cwd
|
|
|
|
|
|
|
|
if os.path.dirname(executable):
|
|
|
|
target = os.path.join(cwd, executable)
|
|
|
|
if os.path.exists(target) and os.access(target, os.F_OK | os.X_OK):
|
|
|
|
match = executable
|
|
|
|
else:
|
|
|
|
if path is None:
|
2018-09-21 20:38:22 +02:00
|
|
|
path = os.environ.get('PATH', os.path.defpath)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
if path:
|
2018-09-21 20:38:22 +02:00
|
|
|
path_dirs = path.split(os.path.pathsep)
|
2016-11-30 06:21:53 +01:00
|
|
|
seen_dirs = set()
|
|
|
|
|
|
|
|
for path_dir in path_dirs:
|
|
|
|
if path_dir in seen_dirs:
|
|
|
|
continue
|
|
|
|
|
|
|
|
seen_dirs.add(path_dir)
|
|
|
|
|
|
|
|
if os.path.abspath(path_dir) == real_cwd:
|
|
|
|
path_dir = cwd
|
|
|
|
|
|
|
|
candidate = os.path.join(path_dir, executable)
|
|
|
|
|
|
|
|
if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK):
|
|
|
|
match = candidate
|
|
|
|
break
|
|
|
|
|
|
|
|
if not match and required:
|
|
|
|
message = 'Required program "%s" not found.' % executable
|
|
|
|
|
|
|
|
if required != 'warning':
|
|
|
|
raise ApplicationError(message)
|
|
|
|
|
|
|
|
display.warning(message)
|
|
|
|
|
|
|
|
return match
|
|
|
|
|
|
|
|
|
2019-07-27 01:46:52 +02:00
|
|
|
def find_python(version, path=None, required=True):
|
2018-03-14 19:35:59 +01:00
|
|
|
"""
|
|
|
|
:type version: str
|
|
|
|
:type path: str | None
|
2019-07-27 01:46:52 +02:00
|
|
|
:type required: bool
|
2018-03-14 19:35:59 +01:00
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
version_info = tuple(int(n) for n in version.split('.'))
|
|
|
|
|
|
|
|
if not path and version_info == sys.version_info[:len(version_info)]:
|
|
|
|
python_bin = sys.executable
|
|
|
|
else:
|
2019-07-27 01:46:52 +02:00
|
|
|
python_bin = find_executable('python%s' % version, path=path, required=required)
|
2018-03-14 19:35:59 +01:00
|
|
|
|
|
|
|
return python_bin
|
|
|
|
|
|
|
|
|
2019-08-26 23:02:55 +02:00
|
|
|
def get_ansible_version(): # type: () -> str
|
|
|
|
"""Return the Ansible version."""
|
|
|
|
try:
|
|
|
|
return get_ansible_version.version
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
# ansible may not be in our sys.path
|
|
|
|
# avoids a symlink to release.py since ansible placement relative to ansible-test may change during delegation
|
|
|
|
load_module(os.path.join(ANSIBLE_LIB_ROOT, 'release.py'), 'ansible_release')
|
|
|
|
|
|
|
|
# noinspection PyUnresolvedReferences
|
|
|
|
from ansible_release import __version__ as ansible_version # pylint: disable=import-error
|
|
|
|
|
|
|
|
get_ansible_version.version = ansible_version
|
|
|
|
|
|
|
|
return ansible_version
|
|
|
|
|
|
|
|
|
|
|
|
def get_available_python_versions(versions): # type: (t.List[str]) -> t.Dict[str, str]
|
|
|
|
"""Return a dictionary indicating which of the requested Python versions are available."""
|
2019-08-28 18:10:17 +02:00
|
|
|
try:
|
|
|
|
return get_available_python_versions.result
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
|
|
|
|
get_available_python_versions.result = dict((version, path) for version, path in
|
|
|
|
((version, find_python(version, required=False)) for version in versions) if path)
|
|
|
|
|
|
|
|
return get_available_python_versions.result
|
2019-07-27 01:46:52 +02:00
|
|
|
|
|
|
|
|
2018-03-14 19:35:59 +01:00
|
|
|
def generate_pip_command(python):
|
|
|
|
"""
|
|
|
|
:type python: str
|
|
|
|
:rtype: list[str]
|
|
|
|
"""
|
2020-06-06 03:25:48 +02:00
|
|
|
return [python, os.path.join(ANSIBLE_TEST_DATA_ROOT, 'quiet_pip.py')]
|
2018-03-14 19:35:59 +01:00
|
|
|
|
|
|
|
|
2017-01-26 22:07:10 +01:00
|
|
|
def raw_command(cmd, capture=False, env=None, data=None, cwd=None, explain=False, stdin=None, stdout=None,
|
2017-07-28 03:15:56 +02:00
|
|
|
cmd_verbosity=1, str_errors='strict'):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type cmd: collections.Iterable[str]
|
|
|
|
:type capture: bool
|
|
|
|
:type env: dict[str, str] | None
|
|
|
|
:type data: str | None
|
|
|
|
:type cwd: str | None
|
|
|
|
:type explain: bool
|
|
|
|
:type stdin: file | None
|
|
|
|
:type stdout: file | None
|
2017-01-26 22:07:10 +01:00
|
|
|
:type cmd_verbosity: int
|
2017-07-29 01:17:56 +02:00
|
|
|
:type str_errors: str
|
2016-11-30 06:21:53 +01:00
|
|
|
:rtype: str | None, str | None
|
|
|
|
"""
|
|
|
|
if not cwd:
|
|
|
|
cwd = os.getcwd()
|
|
|
|
|
|
|
|
if not env:
|
|
|
|
env = common_environment()
|
|
|
|
|
|
|
|
cmd = list(cmd)
|
|
|
|
|
2019-05-24 22:10:33 +02:00
|
|
|
escaped_cmd = ' '.join(cmd_quote(c) for c in cmd)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-02-16 07:56:05 +01:00
|
|
|
display.info('Run command: %s' % escaped_cmd, verbosity=cmd_verbosity, truncate=True)
|
2016-11-30 06:21:53 +01:00
|
|
|
display.info('Working directory: %s' % cwd, verbosity=2)
|
|
|
|
|
|
|
|
program = find_executable(cmd[0], cwd=cwd, path=env['PATH'], required='warning')
|
|
|
|
|
|
|
|
if program:
|
|
|
|
display.info('Program found: %s' % program, verbosity=2)
|
|
|
|
|
|
|
|
for key in sorted(env.keys()):
|
|
|
|
display.info('%s=%s' % (key, env[key]), verbosity=2)
|
|
|
|
|
|
|
|
if explain:
|
|
|
|
return None, None
|
|
|
|
|
|
|
|
communicate = False
|
|
|
|
|
|
|
|
if stdin is not None:
|
|
|
|
data = None
|
|
|
|
communicate = True
|
|
|
|
elif data is not None:
|
|
|
|
stdin = subprocess.PIPE
|
|
|
|
communicate = True
|
|
|
|
|
|
|
|
if stdout:
|
|
|
|
communicate = True
|
|
|
|
|
|
|
|
if capture:
|
|
|
|
stdout = stdout or subprocess.PIPE
|
|
|
|
stderr = subprocess.PIPE
|
|
|
|
communicate = True
|
|
|
|
else:
|
|
|
|
stderr = None
|
|
|
|
|
|
|
|
start = time.time()
|
2019-03-05 20:58:13 +01:00
|
|
|
process = None
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
try:
|
2019-03-05 20:58:13 +01:00
|
|
|
try:
|
2019-07-23 04:24:48 +02:00
|
|
|
cmd_bytes = [to_bytes(c) for c in cmd]
|
|
|
|
env_bytes = dict((to_bytes(k), to_bytes(v)) for k, v in env.items())
|
|
|
|
process = subprocess.Popen(cmd_bytes, env=env_bytes, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd)
|
2019-03-05 20:58:13 +01:00
|
|
|
except OSError as ex:
|
|
|
|
if ex.errno == errno.ENOENT:
|
|
|
|
raise ApplicationError('Required program "%s" not found.' % cmd[0])
|
|
|
|
raise
|
|
|
|
|
|
|
|
if communicate:
|
2019-07-23 04:24:48 +02:00
|
|
|
data_bytes = to_optional_bytes(data)
|
2019-03-05 20:58:13 +01:00
|
|
|
stdout_bytes, stderr_bytes = process.communicate(data_bytes)
|
2019-07-23 04:24:48 +02:00
|
|
|
stdout_text = to_optional_text(stdout_bytes, str_errors) or u''
|
|
|
|
stderr_text = to_optional_text(stderr_bytes, str_errors) or u''
|
2019-03-05 20:58:13 +01:00
|
|
|
else:
|
|
|
|
process.wait()
|
|
|
|
stdout_text, stderr_text = None, None
|
|
|
|
finally:
|
|
|
|
if process and process.returncode is None:
|
|
|
|
process.kill()
|
|
|
|
display.info('') # the process we're interrupting may have completed a partial line of output
|
|
|
|
display.notice('Killed command to avoid an orphaned child process during handling of an unexpected exception.')
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
status = process.returncode
|
|
|
|
runtime = time.time() - start
|
|
|
|
|
|
|
|
display.info('Command exited with status %s after %s seconds.' % (status, runtime), verbosity=4)
|
|
|
|
|
|
|
|
if status == 0:
|
2017-03-15 20:17:42 +01:00
|
|
|
return stdout_text, stderr_text
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2017-03-15 20:17:42 +01:00
|
|
|
raise SubprocessError(cmd, status, stdout_text, stderr_text, runtime)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
def common_environment():
|
|
|
|
"""Common environment used for executing all programs."""
|
|
|
|
env = dict(
|
|
|
|
LC_ALL='en_US.UTF-8',
|
2018-09-21 20:38:22 +02:00
|
|
|
PATH=os.environ.get('PATH', os.path.defpath),
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
required = (
|
|
|
|
'HOME',
|
|
|
|
)
|
|
|
|
|
|
|
|
optional = (
|
|
|
|
'HTTPTESTER',
|
2017-08-04 19:22:54 +02:00
|
|
|
'LD_LIBRARY_PATH',
|
|
|
|
'SSH_AUTH_SOCK',
|
2017-10-10 00:41:06 +02:00
|
|
|
# MacOS High Sierra Compatibility
|
|
|
|
# http://sealiesoftware.com/blog/archive/2017/6/5/Objective-C_and_fork_in_macOS_1013.html
|
2019-06-05 06:54:14 +02:00
|
|
|
# Example configuration for macOS:
|
|
|
|
# export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
|
2017-10-10 00:41:06 +02:00
|
|
|
'OBJC_DISABLE_INITIALIZE_FORK_SAFETY',
|
2018-03-07 01:28:06 +01:00
|
|
|
'ANSIBLE_KEEP_REMOTE_FILES',
|
2019-06-05 06:54:54 +02:00
|
|
|
# MacOS Homebrew Compatibility
|
|
|
|
# https://cryptography.io/en/latest/installation/#building-cryptography-on-macos
|
|
|
|
# This may also be required to install pyyaml with libyaml support when installed in non-standard locations.
|
|
|
|
# Example configuration for brew on macOS:
|
|
|
|
# export LDFLAGS="-L$(brew --prefix openssl)/lib/ -L$(brew --prefix libyaml)/lib/"
|
|
|
|
# export CFLAGS="-I$(brew --prefix openssl)/include/ -I$(brew --prefix libyaml)/include/"
|
|
|
|
# However, this is not adequate for PyYAML 3.13, which is the latest version supported on Python 2.6.
|
|
|
|
# For that version the standard location must be used, or `pip install` must be invoked with additional options:
|
|
|
|
# --global-option=build_ext --global-option=-L{path_to_lib_dir}
|
|
|
|
'LDFLAGS',
|
|
|
|
'CFLAGS',
|
2016-11-30 06:21:53 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
env.update(pass_vars(required=required, optional=optional))
|
|
|
|
|
|
|
|
return env
|
|
|
|
|
|
|
|
|
2017-05-18 19:37:53 +02:00
|
|
|
def pass_vars(required, optional):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type required: collections.Iterable[str]
|
|
|
|
:type optional: collections.Iterable[str]
|
|
|
|
:rtype: dict[str, str]
|
|
|
|
"""
|
|
|
|
env = {}
|
|
|
|
|
|
|
|
for name in required:
|
|
|
|
if name not in os.environ:
|
|
|
|
raise MissingEnvironmentVariable(name)
|
|
|
|
env[name] = os.environ[name]
|
|
|
|
|
|
|
|
for name in optional:
|
|
|
|
if name not in os.environ:
|
|
|
|
continue
|
|
|
|
env[name] = os.environ[name]
|
|
|
|
|
|
|
|
return env
|
|
|
|
|
|
|
|
|
|
|
|
def deepest_path(path_a, path_b):
|
|
|
|
"""Return the deepest of two paths, or None if the paths are unrelated.
|
|
|
|
:type path_a: str
|
|
|
|
:type path_b: str
|
2017-02-14 03:49:36 +01:00
|
|
|
:rtype: str | None
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
if path_a == '.':
|
|
|
|
path_a = ''
|
|
|
|
|
|
|
|
if path_b == '.':
|
|
|
|
path_b = ''
|
|
|
|
|
|
|
|
if path_a.startswith(path_b):
|
|
|
|
return path_a or '.'
|
|
|
|
|
|
|
|
if path_b.startswith(path_a):
|
|
|
|
return path_b or '.'
|
|
|
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def remove_tree(path):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
"""
|
|
|
|
try:
|
2019-07-23 04:24:48 +02:00
|
|
|
shutil.rmtree(to_bytes(path))
|
2016-11-30 06:21:53 +01:00
|
|
|
except OSError as ex:
|
|
|
|
if ex.errno != errno.ENOENT:
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
2017-03-15 20:17:42 +01:00
|
|
|
def is_binary_file(path):
|
|
|
|
"""
|
|
|
|
:type path: str
|
|
|
|
:rtype: bool
|
|
|
|
"""
|
2018-03-21 07:32:45 +01:00
|
|
|
assume_text = set([
|
|
|
|
'.cfg',
|
|
|
|
'.conf',
|
|
|
|
'.crt',
|
2018-10-03 00:55:53 +02:00
|
|
|
'.cs',
|
2018-03-21 07:32:45 +01:00
|
|
|
'.css',
|
|
|
|
'.html',
|
|
|
|
'.ini',
|
|
|
|
'.j2',
|
|
|
|
'.js',
|
|
|
|
'.json',
|
|
|
|
'.md',
|
|
|
|
'.pem',
|
|
|
|
'.ps1',
|
|
|
|
'.psm1',
|
|
|
|
'.py',
|
|
|
|
'.rst',
|
|
|
|
'.sh',
|
|
|
|
'.txt',
|
|
|
|
'.xml',
|
|
|
|
'.yaml',
|
|
|
|
'.yml',
|
|
|
|
])
|
|
|
|
|
|
|
|
assume_binary = set([
|
|
|
|
'.bin',
|
|
|
|
'.eot',
|
|
|
|
'.gz',
|
|
|
|
'.ico',
|
|
|
|
'.iso',
|
|
|
|
'.jpg',
|
|
|
|
'.otf',
|
|
|
|
'.p12',
|
|
|
|
'.png',
|
|
|
|
'.pyc',
|
|
|
|
'.rpm',
|
|
|
|
'.ttf',
|
|
|
|
'.woff',
|
|
|
|
'.woff2',
|
|
|
|
'.zip',
|
|
|
|
])
|
|
|
|
|
|
|
|
ext = os.path.splitext(path)[1]
|
|
|
|
|
|
|
|
if ext in assume_text:
|
|
|
|
return False
|
|
|
|
|
|
|
|
if ext in assume_binary:
|
|
|
|
return True
|
|
|
|
|
2020-02-04 20:21:53 +01:00
|
|
|
with open_binary_file(path) as path_fd:
|
|
|
|
# noinspection PyTypeChecker
|
2017-03-15 20:17:42 +01:00
|
|
|
return b'\0' in path_fd.read(1024)
|
|
|
|
|
|
|
|
|
2018-03-07 23:02:31 +01:00
|
|
|
def generate_password():
|
|
|
|
"""Generate a random password.
|
|
|
|
:rtype: str
|
|
|
|
"""
|
|
|
|
chars = [
|
|
|
|
string.ascii_letters,
|
|
|
|
string.digits,
|
|
|
|
string.ascii_letters,
|
|
|
|
string.digits,
|
|
|
|
'-',
|
|
|
|
] * 4
|
|
|
|
|
|
|
|
password = ''.join([random.choice(char) for char in chars[:-1]])
|
|
|
|
|
|
|
|
display.sensitive.add(password)
|
|
|
|
|
|
|
|
return password
|
|
|
|
|
|
|
|
|
2019-07-12 22:17:20 +02:00
|
|
|
class Display:
|
2016-11-30 06:21:53 +01:00
|
|
|
"""Manages color console output."""
|
|
|
|
clear = '\033[0m'
|
|
|
|
red = '\033[31m'
|
|
|
|
green = '\033[32m'
|
|
|
|
yellow = '\033[33m'
|
|
|
|
blue = '\033[34m'
|
|
|
|
purple = '\033[35m'
|
|
|
|
cyan = '\033[36m'
|
|
|
|
|
|
|
|
verbosity_colors = {
|
|
|
|
0: None,
|
|
|
|
1: green,
|
|
|
|
2: blue,
|
|
|
|
3: cyan,
|
|
|
|
}
|
|
|
|
|
|
|
|
def __init__(self):
|
|
|
|
self.verbosity = 0
|
2019-07-23 04:24:48 +02:00
|
|
|
self.color = sys.stdout.isatty()
|
2016-11-30 06:21:53 +01:00
|
|
|
self.warnings = []
|
2017-03-02 21:36:46 +01:00
|
|
|
self.warnings_unique = set()
|
|
|
|
self.info_stderr = False
|
2018-02-16 07:56:05 +01:00
|
|
|
self.rows = 0
|
|
|
|
self.columns = 0
|
|
|
|
self.truncate = 0
|
2019-09-17 07:40:58 +02:00
|
|
|
self.redact = True
|
2018-02-19 22:32:07 +01:00
|
|
|
self.sensitive = set()
|
2018-02-16 07:56:05 +01:00
|
|
|
|
|
|
|
if os.isatty(0):
|
|
|
|
self.rows, self.columns = unpack('HHHH', fcntl.ioctl(0, TIOCGWINSZ, pack('HHHH', 0, 0, 0, 0)))[:2]
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
def __warning(self, message):
|
|
|
|
"""
|
|
|
|
:type message: str
|
|
|
|
"""
|
|
|
|
self.print_message('WARNING: %s' % message, color=self.purple, fd=sys.stderr)
|
|
|
|
|
|
|
|
def review_warnings(self):
|
|
|
|
"""Review all warnings which previously occurred."""
|
|
|
|
if not self.warnings:
|
|
|
|
return
|
|
|
|
|
|
|
|
self.__warning('Reviewing previous %d warning(s):' % len(self.warnings))
|
|
|
|
|
|
|
|
for warning in self.warnings:
|
|
|
|
self.__warning(warning)
|
|
|
|
|
2020-02-04 20:21:53 +01:00
|
|
|
def warning(self, message, unique=False, verbosity=0):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type message: str
|
2017-03-02 21:36:46 +01:00
|
|
|
:type unique: bool
|
2020-02-04 20:21:53 +01:00
|
|
|
:type verbosity: int
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2020-02-04 20:21:53 +01:00
|
|
|
if verbosity > self.verbosity:
|
|
|
|
return
|
|
|
|
|
2017-03-02 21:36:46 +01:00
|
|
|
if unique:
|
|
|
|
if message in self.warnings_unique:
|
|
|
|
return
|
|
|
|
|
|
|
|
self.warnings_unique.add(message)
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
self.__warning(message)
|
|
|
|
self.warnings.append(message)
|
|
|
|
|
|
|
|
def notice(self, message):
|
|
|
|
"""
|
|
|
|
:type message: str
|
|
|
|
"""
|
|
|
|
self.print_message('NOTICE: %s' % message, color=self.purple, fd=sys.stderr)
|
|
|
|
|
|
|
|
def error(self, message):
|
|
|
|
"""
|
|
|
|
:type message: str
|
|
|
|
"""
|
|
|
|
self.print_message('ERROR: %s' % message, color=self.red, fd=sys.stderr)
|
|
|
|
|
2018-02-16 07:56:05 +01:00
|
|
|
def info(self, message, verbosity=0, truncate=False):
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type message: str
|
|
|
|
:type verbosity: int
|
2018-02-16 07:56:05 +01:00
|
|
|
:type truncate: bool
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
if self.verbosity >= verbosity:
|
|
|
|
color = self.verbosity_colors.get(verbosity, self.yellow)
|
2018-02-16 07:56:05 +01:00
|
|
|
self.print_message(message, color=color, fd=sys.stderr if self.info_stderr else sys.stdout, truncate=truncate)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
2018-02-16 07:56:05 +01:00
|
|
|
def print_message(self, message, color=None, fd=sys.stdout, truncate=False): # pylint: disable=locally-disabled, invalid-name
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
|
|
|
:type message: str
|
|
|
|
:type color: str | None
|
|
|
|
:type fd: file
|
2018-02-16 07:56:05 +01:00
|
|
|
:type truncate: bool
|
2016-11-30 06:21:53 +01:00
|
|
|
"""
|
2018-02-19 22:32:07 +01:00
|
|
|
if self.redact and self.sensitive:
|
|
|
|
for item in self.sensitive:
|
2019-09-17 07:40:58 +02:00
|
|
|
if not item:
|
|
|
|
continue
|
|
|
|
|
2018-02-19 22:32:07 +01:00
|
|
|
message = message.replace(item, '*' * len(item))
|
|
|
|
|
2018-02-16 07:56:05 +01:00
|
|
|
if truncate:
|
|
|
|
if len(message) > self.truncate > 5:
|
|
|
|
message = message[:self.truncate - 5] + ' ...'
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
if color and self.color:
|
|
|
|
# convert color resets in message to desired color
|
|
|
|
message = message.replace(self.clear, color)
|
|
|
|
message = '%s%s%s' % (color, message, self.clear)
|
|
|
|
|
2019-07-23 04:24:48 +02:00
|
|
|
if sys.version_info[0] == 2:
|
|
|
|
message = to_bytes(message)
|
2017-09-09 17:59:09 +02:00
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
print(message, file=fd)
|
|
|
|
fd.flush()
|
|
|
|
|
|
|
|
|
|
|
|
class ApplicationError(Exception):
|
|
|
|
"""General application error."""
|
|
|
|
|
|
|
|
|
|
|
|
class ApplicationWarning(Exception):
|
|
|
|
"""General application warning which interrupts normal program flow."""
|
|
|
|
|
|
|
|
|
|
|
|
class SubprocessError(ApplicationError):
|
|
|
|
"""Error resulting from failed subprocess execution."""
|
|
|
|
def __init__(self, cmd, status=0, stdout=None, stderr=None, runtime=None):
|
|
|
|
"""
|
|
|
|
:type cmd: list[str]
|
|
|
|
:type status: int
|
|
|
|
:type stdout: str | None
|
|
|
|
:type stderr: str | None
|
|
|
|
:type runtime: float | None
|
|
|
|
"""
|
2019-05-24 22:10:33 +02:00
|
|
|
message = 'Command "%s" returned exit status %s.\n' % (' '.join(cmd_quote(c) for c in cmd), status)
|
2016-11-30 06:21:53 +01:00
|
|
|
|
|
|
|
if stderr:
|
|
|
|
message += '>>> Standard Error\n'
|
|
|
|
message += '%s%s\n' % (stderr.strip(), Display.clear)
|
|
|
|
|
|
|
|
if stdout:
|
|
|
|
message += '>>> Standard Output\n'
|
|
|
|
message += '%s%s\n' % (stdout.strip(), Display.clear)
|
|
|
|
|
|
|
|
message = message.strip()
|
|
|
|
|
|
|
|
super(SubprocessError, self).__init__(message)
|
|
|
|
|
|
|
|
self.cmd = cmd
|
2019-08-30 21:03:39 +02:00
|
|
|
self.message = message
|
2016-11-30 06:21:53 +01:00
|
|
|
self.status = status
|
|
|
|
self.stdout = stdout
|
|
|
|
self.stderr = stderr
|
|
|
|
self.runtime = runtime
|
|
|
|
|
|
|
|
|
|
|
|
class MissingEnvironmentVariable(ApplicationError):
|
|
|
|
"""Error caused by missing environment variable."""
|
|
|
|
def __init__(self, name):
|
|
|
|
"""
|
|
|
|
:type name: str
|
|
|
|
"""
|
|
|
|
super(MissingEnvironmentVariable, self).__init__('Missing environment variable: %s' % name)
|
|
|
|
|
|
|
|
self.name = name
|
|
|
|
|
|
|
|
|
2020-02-21 00:27:08 +01:00
|
|
|
class NetworkPlatformSettings:
|
|
|
|
"""Settings required for provisioning a network platform."""
|
|
|
|
def __init__(self, collection, inventory_vars): # type: (str, t.Type[str, str]) -> None
|
|
|
|
self.collection = collection
|
|
|
|
self.inventory_vars = inventory_vars
|
|
|
|
|
|
|
|
|
|
|
|
def get_network_settings(args, platform, version): # type: (NetworkIntegrationConfig, str, str) -> NetworkPlatformSettings
|
|
|
|
"""Returns settings for the given network platform and version."""
|
|
|
|
platform_version = '%s/%s' % (platform, version)
|
|
|
|
completion = get_network_completion().get(platform_version, {})
|
|
|
|
collection = args.platform_collection.get(platform, completion.get('collection'))
|
|
|
|
|
|
|
|
settings = NetworkPlatformSettings(
|
|
|
|
collection,
|
|
|
|
dict(
|
|
|
|
ansible_connection=args.platform_connection.get(platform, completion.get('connection')),
|
|
|
|
ansible_network_os='%s.%s' % (collection, platform) if collection else platform,
|
|
|
|
)
|
|
|
|
)
|
|
|
|
|
|
|
|
return settings
|
|
|
|
|
|
|
|
|
2017-01-19 01:31:34 +01:00
|
|
|
def docker_qualify_image(name):
|
|
|
|
"""
|
|
|
|
:type name: str
|
|
|
|
:rtype: str
|
|
|
|
"""
|
2018-08-17 06:16:15 +02:00
|
|
|
config = get_docker_completion().get(name, {})
|
2017-10-26 09:21:46 +02:00
|
|
|
|
2018-08-17 06:16:15 +02:00
|
|
|
return config.get('name', name)
|
2017-01-19 01:31:34 +01:00
|
|
|
|
|
|
|
|
2018-09-21 07:20:08 +02:00
|
|
|
def parse_to_list_of_dict(pattern, value):
|
2017-04-13 19:28:52 +02:00
|
|
|
"""
|
|
|
|
:type pattern: str
|
|
|
|
:type value: str
|
2018-09-21 07:20:08 +02:00
|
|
|
:return: list[dict[str, str]]
|
2017-04-13 19:28:52 +02:00
|
|
|
"""
|
2018-09-21 07:20:08 +02:00
|
|
|
matched = []
|
|
|
|
unmatched = []
|
2017-04-13 19:28:52 +02:00
|
|
|
|
2018-09-21 07:20:08 +02:00
|
|
|
for line in value.splitlines():
|
|
|
|
match = re.search(pattern, line)
|
2017-04-13 19:28:52 +02:00
|
|
|
|
2018-09-21 07:20:08 +02:00
|
|
|
if match:
|
|
|
|
matched.append(match.groupdict())
|
|
|
|
else:
|
|
|
|
unmatched.append(line)
|
|
|
|
|
|
|
|
if unmatched:
|
|
|
|
raise Exception('Pattern "%s" did not match values:\n%s' % (pattern, '\n'.join(unmatched)))
|
|
|
|
|
|
|
|
return matched
|
2017-04-13 19:28:52 +02:00
|
|
|
|
|
|
|
|
2018-05-09 18:24:39 +02:00
|
|
|
def get_available_port():
|
|
|
|
"""
|
|
|
|
:rtype: int
|
|
|
|
"""
|
|
|
|
# this relies on the kernel not reusing previously assigned ports immediately
|
|
|
|
socket_fd = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
|
|
|
|
with contextlib.closing(socket_fd):
|
|
|
|
socket_fd.bind(('', 0))
|
|
|
|
return socket_fd.getsockname()[1]
|
|
|
|
|
|
|
|
|
2019-07-10 02:31:04 +02:00
|
|
|
def get_subclasses(class_type): # type: (t.Type[C]) -> t.Set[t.Type[C]]
|
|
|
|
"""Returns the set of types that are concrete subclasses of the given type."""
|
2019-08-28 08:40:06 +02:00
|
|
|
subclasses = set() # type: t.Set[t.Type[C]]
|
|
|
|
queue = [class_type] # type: t.List[t.Type[C]]
|
2017-05-05 10:23:00 +02:00
|
|
|
|
|
|
|
while queue:
|
|
|
|
parent = queue.pop()
|
|
|
|
|
|
|
|
for child in parent.__subclasses__():
|
|
|
|
if child not in subclasses:
|
2017-08-19 02:21:11 +02:00
|
|
|
if not inspect.isabstract(child):
|
|
|
|
subclasses.add(child)
|
2017-05-05 10:23:00 +02:00
|
|
|
queue.append(child)
|
|
|
|
|
|
|
|
return subclasses
|
|
|
|
|
|
|
|
|
2019-07-10 02:31:04 +02:00
|
|
|
def is_subdir(candidate_path, path): # type: (str, str) -> bool
|
|
|
|
"""Returns true if candidate_path is path or a subdirectory of path."""
|
2020-02-04 20:21:53 +01:00
|
|
|
if not path.endswith(os.path.sep):
|
|
|
|
path += os.path.sep
|
2019-07-10 02:31:04 +02:00
|
|
|
|
2020-02-04 20:21:53 +01:00
|
|
|
if not candidate_path.endswith(os.path.sep):
|
|
|
|
candidate_path += os.path.sep
|
2019-07-10 02:31:04 +02:00
|
|
|
|
|
|
|
return candidate_path.startswith(path)
|
|
|
|
|
|
|
|
|
2019-07-30 18:28:44 +02:00
|
|
|
def paths_to_dirs(paths): # type: (t.List[str]) -> t.List[str]
|
|
|
|
"""Returns a list of directories extracted from the given list of paths."""
|
|
|
|
dir_names = set()
|
|
|
|
|
|
|
|
for path in paths:
|
|
|
|
while True:
|
|
|
|
path = os.path.dirname(path)
|
|
|
|
|
|
|
|
if not path or path == os.path.sep:
|
|
|
|
break
|
|
|
|
|
|
|
|
dir_names.add(path + os.path.sep)
|
|
|
|
|
|
|
|
return sorted(dir_names)
|
|
|
|
|
|
|
|
|
2020-03-29 06:33:13 +02:00
|
|
|
def str_to_version(version): # type: (str) -> t.Tuple[int]
|
|
|
|
"""Return a version tuple from a version string."""
|
|
|
|
return tuple(int(n) for n in version.split('.'))
|
|
|
|
|
|
|
|
|
|
|
|
def version_to_str(version): # type: (t.Tuple[int]) -> str
|
|
|
|
"""Return a version string from a version tuple."""
|
|
|
|
return '.'.join(str(n) for n in version)
|
|
|
|
|
|
|
|
|
2019-07-10 02:31:04 +02:00
|
|
|
def import_plugins(directory, root=None): # type: (str, t.Optional[str]) -> None
|
2017-05-05 10:23:00 +02:00
|
|
|
"""
|
2019-07-10 02:31:04 +02:00
|
|
|
Import plugins from the given directory relative to the given root.
|
|
|
|
If the root is not provided, the 'lib' directory for the test runner will be used.
|
2017-05-05 10:23:00 +02:00
|
|
|
"""
|
2019-07-10 02:31:04 +02:00
|
|
|
if root is None:
|
|
|
|
root = os.path.dirname(__file__)
|
|
|
|
|
|
|
|
path = os.path.join(root, directory)
|
2019-08-06 01:38:21 +02:00
|
|
|
package = __name__.rsplit('.', 1)[0]
|
2020-02-04 20:21:53 +01:00
|
|
|
prefix = '%s.%s.' % (package, directory.replace(os.path.sep, '.'))
|
2017-05-05 10:23:00 +02:00
|
|
|
|
2019-07-15 22:47:16 +02:00
|
|
|
for (_module_loader, name, _ispkg) in pkgutil.iter_modules([path], prefix=prefix):
|
2020-02-04 20:21:53 +01:00
|
|
|
module_path = os.path.join(root, name[len(package) + 1:].replace('.', os.path.sep) + '.py')
|
2019-07-10 02:31:04 +02:00
|
|
|
load_module(module_path, name)
|
2017-05-05 10:23:00 +02:00
|
|
|
|
|
|
|
|
2019-07-10 02:31:04 +02:00
|
|
|
def load_plugins(base_type, database): # type: (t.Type[C], t.Dict[str, t.Type[C]]) -> None
|
2017-05-05 10:23:00 +02:00
|
|
|
"""
|
2019-07-10 02:31:04 +02:00
|
|
|
Load plugins of the specified type and track them in the specified database.
|
|
|
|
Only plugins which have already been imported will be loaded.
|
2017-05-05 10:23:00 +02:00
|
|
|
"""
|
2019-08-06 01:38:21 +02:00
|
|
|
plugins = dict((sc.__module__.rsplit('.', 1)[1], sc) for sc in get_subclasses(base_type)) # type: t.Dict[str, t.Type[C]]
|
2017-05-05 10:23:00 +02:00
|
|
|
|
|
|
|
for plugin in plugins:
|
|
|
|
database[plugin] = plugins[plugin]
|
|
|
|
|
|
|
|
|
2019-07-10 02:31:04 +02:00
|
|
|
def load_module(path, name): # type: (str, str) -> None
|
|
|
|
"""Load a Python module using the given name and path."""
|
2019-08-06 01:38:21 +02:00
|
|
|
if name in sys.modules:
|
|
|
|
return
|
|
|
|
|
2019-07-10 02:31:04 +02:00
|
|
|
if sys.version_info >= (3, 4):
|
2019-07-11 22:03:49 +02:00
|
|
|
# noinspection PyUnresolvedReferences
|
2019-07-10 02:31:04 +02:00
|
|
|
import importlib.util
|
|
|
|
|
2019-07-11 22:03:49 +02:00
|
|
|
# noinspection PyUnresolvedReferences
|
2019-07-10 02:31:04 +02:00
|
|
|
spec = importlib.util.spec_from_file_location(name, path)
|
2019-07-11 22:03:49 +02:00
|
|
|
# noinspection PyUnresolvedReferences
|
2019-07-10 02:31:04 +02:00
|
|
|
module = importlib.util.module_from_spec(spec)
|
|
|
|
spec.loader.exec_module(module)
|
|
|
|
|
|
|
|
sys.modules[name] = module
|
|
|
|
else:
|
2019-07-13 06:58:19 +02:00
|
|
|
# noinspection PyDeprecation
|
2019-07-10 02:31:04 +02:00
|
|
|
import imp
|
|
|
|
|
2020-02-04 20:21:53 +01:00
|
|
|
# load_source (and thus load_module) require a file opened with `open` in text mode
|
|
|
|
with open(to_bytes(path)) as module_file:
|
2019-07-13 06:58:19 +02:00
|
|
|
# noinspection PyDeprecation
|
2019-07-10 02:31:04 +02:00
|
|
|
imp.load_module(name, module_file, path, ('.py', 'r', imp.PY_SOURCE))
|
|
|
|
|
|
|
|
|
2019-08-27 23:03:23 +02:00
|
|
|
@contextlib.contextmanager
|
2019-08-28 18:10:17 +02:00
|
|
|
def tempdir(): # type: () -> str
|
2019-08-27 23:03:23 +02:00
|
|
|
"""Creates a temporary directory that is deleted outside the context scope."""
|
|
|
|
temp_path = tempfile.mkdtemp()
|
|
|
|
yield temp_path
|
|
|
|
shutil.rmtree(temp_path)
|
|
|
|
|
|
|
|
|
|
|
|
@contextlib.contextmanager
|
|
|
|
def open_zipfile(path, mode='r'):
|
|
|
|
"""Opens a zip file and closes the file automatically."""
|
|
|
|
zib_obj = zipfile.ZipFile(path, mode=mode)
|
|
|
|
yield zib_obj
|
|
|
|
zib_obj.close()
|
|
|
|
|
|
|
|
|
2016-11-30 06:21:53 +01:00
|
|
|
display = Display() # pylint: disable=locally-disabled, invalid-name
|