Update ansible-test sanity command. (#31958)
* Use correct pip version in ansible-test. * Add git fallback for validate-modules. * Run sanity tests in a docker container. * Use correct python version for sanity tests. * Pin docker completion images and add default. * Split pylint execution into multiple contexts. * Only test .py files in use-argspec-type-path test. * Accept identical python interpeter name or binary. * Switch cloud tests to default container. * Remove unused extras from pip install. * Filter out empty pip commands. * Don't force running of pip list. * Support delegation for windows and network tests. * Fix ansible-test python version usage. * Fix ansible-test python version skipping. * Use absolute path for log in ansible-test. * Run vyos_command test on python 3. * Fix windows/network instance persistence. * Add `test/cache` dir to classification. * Enable more python versions for network tests. * Fix cs_router test.
This commit is contained in:
parent
602a618e60
commit
cf1337ca9a
37 changed files with 788 additions and 456 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -69,6 +69,7 @@ results.xml
|
||||||
coverage.xml
|
coverage.xml
|
||||||
/test/units/cover-html
|
/test/units/cover-html
|
||||||
/test/integration/targets/*/backup/
|
/test/integration/targets/*/backup/
|
||||||
|
/test/cache/*
|
||||||
# Development
|
# Development
|
||||||
/test/develop
|
/test/develop
|
||||||
venv
|
venv
|
||||||
|
|
|
@ -62,11 +62,11 @@ matrix:
|
||||||
- env: T=linux/ubuntu1604/3
|
- env: T=linux/ubuntu1604/3
|
||||||
- env: T=linux/ubuntu1604py3/3
|
- env: T=linux/ubuntu1604py3/3
|
||||||
|
|
||||||
- env: T=cloud/ubuntu1604/1
|
- env: T=cloud/default/2.7/1
|
||||||
- env: T=cloud/ubuntu1604py3/1
|
- env: T=cloud/default/3.6/1
|
||||||
|
|
||||||
- env: T=cloud/ubuntu1604/2
|
- env: T=cloud/default/2.7/2
|
||||||
- env: T=cloud/ubuntu1604py3/2
|
- env: T=cloud/default/3.6/2
|
||||||
|
|
||||||
branches:
|
branches:
|
||||||
except:
|
except:
|
||||||
|
|
|
@ -49,12 +49,8 @@
|
||||||
- instance.name == "instance-vm"
|
- instance.name == "instance-vm"
|
||||||
- instance.state == "Running"
|
- instance.state == "Running"
|
||||||
|
|
||||||
- name: install jq
|
|
||||||
package:
|
|
||||||
name: jq
|
|
||||||
|
|
||||||
- name: setup find the routers name
|
- name: setup find the routers name
|
||||||
shell: cs listRouters listall=true networkid="{{ net.id }}" zone="{{ cs_common_zone_adv }}" | jq ".router[].name" | tr -d '"'
|
shell: cs listRouters listall=true networkid="{{ net.id }}" zone="{{ cs_common_zone_adv }}"
|
||||||
args:
|
args:
|
||||||
chdir: "{{ playbook_dir }}"
|
chdir: "{{ playbook_dir }}"
|
||||||
register: router
|
register: router
|
||||||
|
@ -63,7 +59,10 @@
|
||||||
var: router.stdout
|
var: router.stdout
|
||||||
|
|
||||||
- set_fact:
|
- set_fact:
|
||||||
router_name: "{{ router.stdout }}"
|
router_json: "{{ router.stdout | from_json }}"
|
||||||
|
|
||||||
|
- set_fact:
|
||||||
|
router_name: "{{ router_json.router[0].name }}"
|
||||||
|
|
||||||
- name: test router started
|
- name: test router started
|
||||||
cs_router:
|
cs_router:
|
||||||
|
|
|
@ -1,2 +1 @@
|
||||||
network/ci
|
network/ci
|
||||||
skip/python3
|
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
centos6
|
centos6@sha256:41eb4b870ce400202945ccf572d45bf5f2f5ebb50e9dee244de73b9d0278db30
|
||||||
centos7
|
centos7@sha256:bd571611112cccefdaa951ea640177cbb77c8ee011f958d2562781d90594ea9c
|
||||||
fedora24
|
default@sha256:424161033bf1342bc463c27c5fad182c171aa3bc17b3c1fe7aac44623cc8d304
|
||||||
fedora25
|
fedora24@sha256:7b642c5d25b779a3a605fb8f70d9d92972f2004a5266fe364264809899fb1117
|
||||||
opensuse42.2
|
fedora25@sha256:828c71d87f1636f4d09916b8e2d87fc9a615d361a9afed22e8843ffb3d2729d2
|
||||||
opensuse42.3
|
opensuse42.2@sha256:fc22d6684910018d2e5f2e8613391b5ae5aca7760d365ac3098971b7aa41d8a2
|
||||||
ubuntu1404
|
opensuse42.3@sha256:7f48e874367528711a1df7ff16da5667d67d2eb15902b8e5151d34546e6af04d
|
||||||
ubuntu1604
|
ubuntu1404@sha256:ba27d23e815a4c3fb361001aea2ef70241d66f08bdf962cf5717037e882ff78a
|
||||||
ubuntu1604py3
|
ubuntu1604@sha256:ff3898ac817a10ec7129f6483721a717ed0d98c6ba42c27be1472d73908568da
|
||||||
|
ubuntu1604py3@sha256:f0b7883eb3f17ee7cb3a77f4aeea0d743101e103f93a76f4f5120aed9c44c0bc
|
||||||
|
|
1
test/runner/injector/ansible-connection
Symbolic link
1
test/runner/injector/ansible-connection
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
injector.py
|
|
@ -49,6 +49,9 @@ def ansible_environment(args, color=True):
|
||||||
env.update(ansible)
|
env.update(ansible)
|
||||||
|
|
||||||
if args.debug:
|
if args.debug:
|
||||||
env.update(dict(ANSIBLE_DEBUG='true'))
|
env.update(dict(
|
||||||
|
ANSIBLE_DEBUG='true',
|
||||||
|
ANSIBLE_LOG_PATH=os.path.abspath('test/results/logs/debug.log'),
|
||||||
|
))
|
||||||
|
|
||||||
return env
|
return env
|
||||||
|
|
|
@ -367,6 +367,9 @@ class PathMapper(object):
|
||||||
|
|
||||||
return minimal
|
return minimal
|
||||||
|
|
||||||
|
if path.startswith('test/cache/'):
|
||||||
|
return minimal
|
||||||
|
|
||||||
if path.startswith('test/compile/'):
|
if path.startswith('test/compile/'):
|
||||||
return {
|
return {
|
||||||
'compile': 'all',
|
'compile': 'all',
|
||||||
|
|
|
@ -43,6 +43,7 @@ class EnvironmentConfig(CommonConfig):
|
||||||
self.docker_privileged = args.docker_privileged if 'docker_privileged' in args else False # type: bool
|
self.docker_privileged = args.docker_privileged if 'docker_privileged' in args else False # type: bool
|
||||||
self.docker_util = docker_qualify_image(args.docker_util if 'docker_util' in args else '') # type: str
|
self.docker_util = docker_qualify_image(args.docker_util if 'docker_util' in args else '') # type: str
|
||||||
self.docker_pull = args.docker_pull if 'docker_pull' in args else False # type: bool
|
self.docker_pull = args.docker_pull if 'docker_pull' in args else False # type: bool
|
||||||
|
self.docker_keep_git = args.docker_keep_git if 'docker_keep_git' in args else False # type: bool
|
||||||
|
|
||||||
self.tox_sitepackages = args.tox_sitepackages # type: bool
|
self.tox_sitepackages = args.tox_sitepackages # type: bool
|
||||||
|
|
||||||
|
@ -53,7 +54,7 @@ class EnvironmentConfig(CommonConfig):
|
||||||
self.requirements = args.requirements # type: bool
|
self.requirements = args.requirements # type: bool
|
||||||
|
|
||||||
if self.python == 'default':
|
if self.python == 'default':
|
||||||
self.python = '.'.join(str(i) for i in sys.version_info[:2])
|
self.python = None
|
||||||
|
|
||||||
self.python_version = self.python or '.'.join(str(i) for i in sys.version_info[:2])
|
self.python_version = self.python or '.'.join(str(i) for i in sys.version_info[:2])
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,7 @@ import traceback
|
||||||
import uuid
|
import uuid
|
||||||
import errno
|
import errno
|
||||||
import time
|
import time
|
||||||
|
import shutil
|
||||||
|
|
||||||
from lib.http import (
|
from lib.http import (
|
||||||
HttpClient,
|
HttpClient,
|
||||||
|
@ -35,13 +36,14 @@ AWS_ENDPOINTS = {
|
||||||
|
|
||||||
class AnsibleCoreCI(object):
|
class AnsibleCoreCI(object):
|
||||||
"""Client for Ansible Core CI services."""
|
"""Client for Ansible Core CI services."""
|
||||||
def __init__(self, args, platform, version, stage='prod', persist=True, name=None):
|
def __init__(self, args, platform, version, stage='prod', persist=True, load=True, name=None):
|
||||||
"""
|
"""
|
||||||
:type args: EnvironmentConfig
|
:type args: EnvironmentConfig
|
||||||
:type platform: str
|
:type platform: str
|
||||||
:type version: str
|
:type version: str
|
||||||
:type stage: str
|
:type stage: str
|
||||||
:type persist: bool
|
:type persist: bool
|
||||||
|
:type load: bool
|
||||||
:type name: str
|
:type name: str
|
||||||
"""
|
"""
|
||||||
self.args = args
|
self.args = args
|
||||||
|
@ -106,7 +108,7 @@ class AnsibleCoreCI(object):
|
||||||
|
|
||||||
self.path = os.path.expanduser('~/.ansible/test/instances/%s-%s' % (self.name, self.stage))
|
self.path = os.path.expanduser('~/.ansible/test/instances/%s-%s' % (self.name, self.stage))
|
||||||
|
|
||||||
if persist and self._load():
|
if persist and load and self._load():
|
||||||
try:
|
try:
|
||||||
display.info('Checking existing %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
display.info('Checking existing %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
||||||
verbosity=1)
|
verbosity=1)
|
||||||
|
@ -125,7 +127,7 @@ class AnsibleCoreCI(object):
|
||||||
|
|
||||||
self.instance_id = None
|
self.instance_id = None
|
||||||
self.endpoint = None
|
self.endpoint = None
|
||||||
else:
|
elif not persist:
|
||||||
self.instance_id = None
|
self.instance_id = None
|
||||||
self.endpoint = None
|
self.endpoint = None
|
||||||
self._clear()
|
self._clear()
|
||||||
|
@ -160,6 +162,11 @@ class AnsibleCoreCI(object):
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
"""Start instance."""
|
"""Start instance."""
|
||||||
|
if self.started:
|
||||||
|
display.info('Skipping started %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
||||||
|
verbosity=1)
|
||||||
|
return
|
||||||
|
|
||||||
if is_shippable():
|
if is_shippable():
|
||||||
return self.start_shippable()
|
return self.start_shippable()
|
||||||
|
|
||||||
|
@ -289,11 +296,6 @@ class AnsibleCoreCI(object):
|
||||||
|
|
||||||
def _start(self, auth):
|
def _start(self, auth):
|
||||||
"""Start instance."""
|
"""Start instance."""
|
||||||
if self.started:
|
|
||||||
display.info('Skipping started %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
|
||||||
verbosity=1)
|
|
||||||
return
|
|
||||||
|
|
||||||
display.info('Initializing new %s/%s instance %s.' % (self.platform, self.version, self.instance_id), verbosity=1)
|
display.info('Initializing new %s/%s instance %s.' % (self.platform, self.version, self.instance_id), verbosity=1)
|
||||||
|
|
||||||
if self.platform == 'windows':
|
if self.platform == 'windows':
|
||||||
|
@ -413,6 +415,13 @@ class AnsibleCoreCI(object):
|
||||||
|
|
||||||
config = json.loads(data)
|
config = json.loads(data)
|
||||||
|
|
||||||
|
return self.load(config)
|
||||||
|
|
||||||
|
def load(self, config):
|
||||||
|
"""
|
||||||
|
:type config: dict[str, str]
|
||||||
|
:rtype: bool
|
||||||
|
"""
|
||||||
self.instance_id = config['instance_id']
|
self.instance_id = config['instance_id']
|
||||||
self.endpoint = config['endpoint']
|
self.endpoint = config['endpoint']
|
||||||
self.started = True
|
self.started = True
|
||||||
|
@ -424,16 +433,23 @@ class AnsibleCoreCI(object):
|
||||||
if self.args.explain:
|
if self.args.explain:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
config = self.save()
|
||||||
|
|
||||||
make_dirs(os.path.dirname(self.path))
|
make_dirs(os.path.dirname(self.path))
|
||||||
|
|
||||||
with open(self.path, 'w') as instance_fd:
|
with open(self.path, 'w') as instance_fd:
|
||||||
config = dict(
|
|
||||||
instance_id=self.instance_id,
|
|
||||||
endpoint=self.endpoint,
|
|
||||||
)
|
|
||||||
|
|
||||||
instance_fd.write(json.dumps(config, indent=4, sort_keys=True))
|
instance_fd.write(json.dumps(config, indent=4, sort_keys=True))
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
"""
|
||||||
|
:rtype: dict[str, str]
|
||||||
|
"""
|
||||||
|
return dict(
|
||||||
|
platform_version='%s/%s' % (self.platform, self.version),
|
||||||
|
instance_id=self.instance_id,
|
||||||
|
endpoint=self.endpoint,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _create_http_error(response):
|
def _create_http_error(response):
|
||||||
"""
|
"""
|
||||||
|
@ -472,20 +488,33 @@ class CoreHttpError(HttpError):
|
||||||
|
|
||||||
class SshKey(object):
|
class SshKey(object):
|
||||||
"""Container for SSH key used to connect to remote instances."""
|
"""Container for SSH key used to connect to remote instances."""
|
||||||
|
KEY_NAME = 'id_rsa'
|
||||||
|
PUB_NAME = 'id_rsa.pub'
|
||||||
|
|
||||||
def __init__(self, args):
|
def __init__(self, args):
|
||||||
"""
|
"""
|
||||||
:type args: EnvironmentConfig
|
:type args: EnvironmentConfig
|
||||||
"""
|
"""
|
||||||
tmp = os.path.expanduser('~/.ansible/test/')
|
cache_dir = 'test/cache'
|
||||||
|
|
||||||
self.key = os.path.join(tmp, 'id_rsa')
|
self.key = os.path.join(cache_dir, self.KEY_NAME)
|
||||||
self.pub = os.path.join(tmp, 'id_rsa.pub')
|
self.pub = os.path.join(cache_dir, self.PUB_NAME)
|
||||||
|
|
||||||
|
if not os.path.isfile(self.key) or not os.path.isfile(self.pub):
|
||||||
|
base_dir = os.path.expanduser('~/.ansible/test/')
|
||||||
|
|
||||||
|
key = os.path.join(base_dir, self.KEY_NAME)
|
||||||
|
pub = os.path.join(base_dir, self.PUB_NAME)
|
||||||
|
|
||||||
if not os.path.isfile(self.pub):
|
|
||||||
if not args.explain:
|
if not args.explain:
|
||||||
make_dirs(tmp)
|
make_dirs(base_dir)
|
||||||
|
|
||||||
run_command(args, ['ssh-keygen', '-q', '-t', 'rsa', '-N', '', '-f', self.key])
|
if not os.path.isfile(key) or not os.path.isfile(pub):
|
||||||
|
run_command(args, ['ssh-keygen', '-q', '-t', 'rsa', '-N', '', '-f', key])
|
||||||
|
|
||||||
|
if not args.explain:
|
||||||
|
shutil.copy2(key, self.key)
|
||||||
|
shutil.copy2(pub, self.pub)
|
||||||
|
|
||||||
if args.explain:
|
if args.explain:
|
||||||
self.pub_contents = None
|
self.pub_contents = None
|
||||||
|
|
|
@ -102,10 +102,10 @@ def delegate_tox(args, exclude, require):
|
||||||
:type require: list[str]
|
:type require: list[str]
|
||||||
"""
|
"""
|
||||||
if args.python:
|
if args.python:
|
||||||
versions = args.python,
|
versions = args.python_version,
|
||||||
|
|
||||||
if args.python not in SUPPORTED_PYTHON_VERSIONS:
|
if args.python_version not in SUPPORTED_PYTHON_VERSIONS:
|
||||||
raise ApplicationError('tox does not support Python version %s' % args.python)
|
raise ApplicationError('tox does not support Python version %s' % args.python_version)
|
||||||
else:
|
else:
|
||||||
versions = SUPPORTED_PYTHON_VERSIONS
|
versions = SUPPORTED_PYTHON_VERSIONS
|
||||||
|
|
||||||
|
@ -189,7 +189,12 @@ def delegate_docker(args, exclude, require):
|
||||||
with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd:
|
with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd:
|
||||||
try:
|
try:
|
||||||
if not args.explain:
|
if not args.explain:
|
||||||
lib.pytar.create_tarfile(local_source_fd.name, '.', lib.pytar.ignore)
|
if args.docker_keep_git:
|
||||||
|
tar_filter = lib.pytar.AllowGitTarFilter()
|
||||||
|
else:
|
||||||
|
tar_filter = lib.pytar.DefaultTarFilter()
|
||||||
|
|
||||||
|
lib.pytar.create_tarfile(local_source_fd.name, '.', tar_filter)
|
||||||
|
|
||||||
if util_image:
|
if util_image:
|
||||||
util_options = [
|
util_options = [
|
||||||
|
|
|
@ -11,12 +11,7 @@ import tempfile
|
||||||
import time
|
import time
|
||||||
import textwrap
|
import textwrap
|
||||||
import functools
|
import functools
|
||||||
import shutil
|
|
||||||
import stat
|
|
||||||
import pipes
|
import pipes
|
||||||
import random
|
|
||||||
import string
|
|
||||||
import atexit
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
|
||||||
import lib.pytar
|
import lib.pytar
|
||||||
|
@ -45,11 +40,12 @@ from lib.util import (
|
||||||
SubprocessError,
|
SubprocessError,
|
||||||
display,
|
display,
|
||||||
run_command,
|
run_command,
|
||||||
common_environment,
|
intercept_command,
|
||||||
remove_tree,
|
remove_tree,
|
||||||
make_dirs,
|
make_dirs,
|
||||||
is_shippable,
|
is_shippable,
|
||||||
is_binary_file,
|
is_binary_file,
|
||||||
|
find_pip,
|
||||||
find_executable,
|
find_executable,
|
||||||
raw_command,
|
raw_command,
|
||||||
)
|
)
|
||||||
|
@ -110,8 +106,6 @@ SUPPORTED_PYTHON_VERSIONS = (
|
||||||
|
|
||||||
COMPILE_PYTHON_VERSIONS = SUPPORTED_PYTHON_VERSIONS
|
COMPILE_PYTHON_VERSIONS = SUPPORTED_PYTHON_VERSIONS
|
||||||
|
|
||||||
coverage_path = '' # pylint: disable=locally-disabled, invalid-name
|
|
||||||
|
|
||||||
|
|
||||||
def check_startup():
|
def check_startup():
|
||||||
"""Checks to perform at startup before running commands."""
|
"""Checks to perform at startup before running commands."""
|
||||||
|
@ -163,23 +157,27 @@ def install_command_requirements(args):
|
||||||
if args.junit:
|
if args.junit:
|
||||||
packages.append('junit-xml')
|
packages.append('junit-xml')
|
||||||
|
|
||||||
commands = [generate_pip_install(args.command, packages=packages)]
|
pip = find_pip(version=args.python_version)
|
||||||
|
|
||||||
|
commands = [generate_pip_install(pip, args.command, packages=packages)]
|
||||||
|
|
||||||
if isinstance(args, IntegrationConfig):
|
if isinstance(args, IntegrationConfig):
|
||||||
for cloud_platform in get_cloud_platforms(args):
|
for cloud_platform in get_cloud_platforms(args):
|
||||||
commands.append(generate_pip_install('%s.cloud.%s' % (args.command, cloud_platform)))
|
commands.append(generate_pip_install(pip, '%s.cloud.%s' % (args.command, cloud_platform)))
|
||||||
|
|
||||||
|
commands = [cmd for cmd in commands if cmd]
|
||||||
|
|
||||||
# only look for changes when more than one requirements file is needed
|
# only look for changes when more than one requirements file is needed
|
||||||
detect_pip_changes = len(commands) > 1
|
detect_pip_changes = len(commands) > 1
|
||||||
|
|
||||||
# first pass to install requirements, changes expected unless environment is already set up
|
# first pass to install requirements, changes expected unless environment is already set up
|
||||||
changes = run_pip_commands(args, commands, detect_pip_changes)
|
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
|
||||||
|
|
||||||
if not changes:
|
if not changes:
|
||||||
return # no changes means we can stop early
|
return # no changes means we can stop early
|
||||||
|
|
||||||
# second pass to check for conflicts in requirements, changes are not expected here
|
# second pass to check for conflicts in requirements, changes are not expected here
|
||||||
changes = run_pip_commands(args, commands, detect_pip_changes)
|
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
|
||||||
|
|
||||||
if not changes:
|
if not changes:
|
||||||
return # no changes means no conflicts
|
return # no changes means no conflicts
|
||||||
|
@ -188,16 +186,17 @@ def install_command_requirements(args):
|
||||||
'\n'.join((' '.join(pipes.quote(c) for c in cmd) for cmd in changes)))
|
'\n'.join((' '.join(pipes.quote(c) for c in cmd) for cmd in changes)))
|
||||||
|
|
||||||
|
|
||||||
def run_pip_commands(args, commands, detect_pip_changes=False):
|
def run_pip_commands(args, pip, commands, detect_pip_changes=False):
|
||||||
"""
|
"""
|
||||||
:type args: EnvironmentConfig
|
:type args: EnvironmentConfig
|
||||||
|
:type pip: str
|
||||||
:type commands: list[list[str]]
|
:type commands: list[list[str]]
|
||||||
:type detect_pip_changes: bool
|
:type detect_pip_changes: bool
|
||||||
:rtype: list[list[str]]
|
:rtype: list[list[str]]
|
||||||
"""
|
"""
|
||||||
changes = []
|
changes = []
|
||||||
|
|
||||||
after_list = pip_list(args) if detect_pip_changes else None
|
after_list = pip_list(args, pip) if detect_pip_changes else None
|
||||||
|
|
||||||
for cmd in commands:
|
for cmd in commands:
|
||||||
if not cmd:
|
if not cmd:
|
||||||
|
@ -217,10 +216,10 @@ def run_pip_commands(args, commands, detect_pip_changes=False):
|
||||||
# AttributeError: 'Requirement' object has no attribute 'project_name'
|
# AttributeError: 'Requirement' object has no attribute 'project_name'
|
||||||
# See: https://bugs.launchpad.net/ubuntu/xenial/+source/python-pip/+bug/1626258
|
# See: https://bugs.launchpad.net/ubuntu/xenial/+source/python-pip/+bug/1626258
|
||||||
# Upgrading pip works around the issue.
|
# Upgrading pip works around the issue.
|
||||||
run_command(args, ['pip', 'install', '--upgrade', 'pip'])
|
run_command(args, [pip, 'install', '--upgrade', 'pip'])
|
||||||
run_command(args, cmd)
|
run_command(args, cmd)
|
||||||
|
|
||||||
after_list = pip_list(args) if detect_pip_changes else None
|
after_list = pip_list(args, pip) if detect_pip_changes else None
|
||||||
|
|
||||||
if before_list != after_list:
|
if before_list != after_list:
|
||||||
changes.append(cmd)
|
changes.append(cmd)
|
||||||
|
@ -228,12 +227,13 @@ def run_pip_commands(args, commands, detect_pip_changes=False):
|
||||||
return changes
|
return changes
|
||||||
|
|
||||||
|
|
||||||
def pip_list(args):
|
def pip_list(args, pip):
|
||||||
"""
|
"""
|
||||||
:type args: EnvironmentConfig
|
:type args: EnvironmentConfig
|
||||||
|
:type pip: str
|
||||||
:rtype: str
|
:rtype: str
|
||||||
"""
|
"""
|
||||||
stdout, _ = run_command(args, ['pip', 'list'], capture=True, always=True)
|
stdout, _ = run_command(args, [pip, 'list'], capture=True)
|
||||||
return stdout
|
return stdout
|
||||||
|
|
||||||
|
|
||||||
|
@ -244,14 +244,14 @@ def generate_egg_info(args):
|
||||||
if os.path.isdir('lib/ansible.egg-info'):
|
if os.path.isdir('lib/ansible.egg-info'):
|
||||||
return
|
return
|
||||||
|
|
||||||
run_command(args, ['python', 'setup.py', 'egg_info'], capture=args.verbosity < 3)
|
run_command(args, ['python%s' % args.python_version, 'setup.py', 'egg_info'], capture=args.verbosity < 3)
|
||||||
|
|
||||||
|
|
||||||
def generate_pip_install(command, packages=None, extras=None):
|
def generate_pip_install(pip, command, packages=None):
|
||||||
"""
|
"""
|
||||||
|
:type pip: str
|
||||||
:type command: str
|
:type command: str
|
||||||
:type packages: list[str] | None
|
:type packages: list[str] | None
|
||||||
:type extras: list[str] | None
|
|
||||||
:rtype: list[str] | None
|
:rtype: list[str] | None
|
||||||
"""
|
"""
|
||||||
constraints = 'test/runner/requirements/constraints.txt'
|
constraints = 'test/runner/requirements/constraints.txt'
|
||||||
|
@ -259,15 +259,8 @@ def generate_pip_install(command, packages=None, extras=None):
|
||||||
|
|
||||||
options = []
|
options = []
|
||||||
|
|
||||||
requirements_list = [requirements]
|
if os.path.exists(requirements) and os.path.getsize(requirements):
|
||||||
|
options += ['-r', requirements]
|
||||||
if extras:
|
|
||||||
for extra in extras:
|
|
||||||
requirements_list.append('test/runner/requirements/%s.%s.txt' % (command, extra))
|
|
||||||
|
|
||||||
for requirements in requirements_list:
|
|
||||||
if os.path.exists(requirements) and os.path.getsize(requirements):
|
|
||||||
options += ['-r', requirements]
|
|
||||||
|
|
||||||
if packages:
|
if packages:
|
||||||
options += packages
|
options += packages
|
||||||
|
@ -275,7 +268,7 @@ def generate_pip_install(command, packages=None, extras=None):
|
||||||
if not options:
|
if not options:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return ['pip', 'install', '--disable-pip-version-check', '-c', constraints] + options
|
return [pip, 'install', '--disable-pip-version-check', '-c', constraints] + options
|
||||||
|
|
||||||
|
|
||||||
def command_shell(args):
|
def command_shell(args):
|
||||||
|
@ -323,31 +316,24 @@ def command_network_integration(args):
|
||||||
)
|
)
|
||||||
|
|
||||||
all_targets = tuple(walk_network_integration_targets(include_hidden=True))
|
all_targets = tuple(walk_network_integration_targets(include_hidden=True))
|
||||||
internal_targets = command_integration_filter(args, all_targets)
|
internal_targets = command_integration_filter(args, all_targets, init_callback=network_init)
|
||||||
platform_targets = set(a for t in internal_targets for a in t.aliases if a.startswith('network/'))
|
|
||||||
|
|
||||||
if args.platform:
|
if args.platform:
|
||||||
|
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
|
||||||
instances = [] # type: list [lib.thread.WrappedThread]
|
instances = [] # type: list [lib.thread.WrappedThread]
|
||||||
|
|
||||||
# generate an ssh key (if needed) up front once, instead of for each instance
|
|
||||||
SshKey(args)
|
|
||||||
|
|
||||||
for platform_version in args.platform:
|
for platform_version in args.platform:
|
||||||
platform, version = platform_version.split('/', 1)
|
platform, version = platform_version.split('/', 1)
|
||||||
platform_target = 'network/%s/' % platform
|
config = configs.get(platform_version)
|
||||||
|
|
||||||
if platform_target not in platform_targets and 'network/basics/' not in platform_targets:
|
if not config:
|
||||||
display.warning('Skipping "%s" because selected tests do not target the "%s" platform.' % (
|
|
||||||
platform_version, platform))
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
instance = lib.thread.WrappedThread(functools.partial(network_run, args, platform, version))
|
instance = lib.thread.WrappedThread(functools.partial(network_run, args, platform, version, config))
|
||||||
instance.daemon = True
|
instance.daemon = True
|
||||||
instance.start()
|
instance.start()
|
||||||
instances.append(instance)
|
instances.append(instance)
|
||||||
|
|
||||||
install_command_requirements(args)
|
|
||||||
|
|
||||||
while any(instance.is_alive() for instance in instances):
|
while any(instance.is_alive() for instance in instances):
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
|
@ -359,22 +345,71 @@ def command_network_integration(args):
|
||||||
if not args.explain:
|
if not args.explain:
|
||||||
with open(filename, 'w') as inventory_fd:
|
with open(filename, 'w') as inventory_fd:
|
||||||
inventory_fd.write(inventory)
|
inventory_fd.write(inventory)
|
||||||
else:
|
|
||||||
install_command_requirements(args)
|
|
||||||
|
|
||||||
command_integration_filtered(args, internal_targets, all_targets)
|
command_integration_filtered(args, internal_targets, all_targets)
|
||||||
|
|
||||||
|
|
||||||
def network_run(args, platform, version):
|
def network_init(args, internal_targets):
|
||||||
|
"""
|
||||||
|
:type args: NetworkIntegrationConfig
|
||||||
|
:type internal_targets: tuple[IntegrationTarget]
|
||||||
|
"""
|
||||||
|
if not args.platform:
|
||||||
|
return
|
||||||
|
|
||||||
|
if args.metadata.instance_config is not None:
|
||||||
|
return
|
||||||
|
|
||||||
|
platform_targets = set(a for t in internal_targets for a in t.aliases if a.startswith('network/'))
|
||||||
|
|
||||||
|
instances = [] # type: list [lib.thread.WrappedThread]
|
||||||
|
|
||||||
|
# generate an ssh key (if needed) up front once, instead of for each instance
|
||||||
|
SshKey(args)
|
||||||
|
|
||||||
|
for platform_version in args.platform:
|
||||||
|
platform, version = platform_version.split('/', 1)
|
||||||
|
platform_target = 'network/%s/' % platform
|
||||||
|
|
||||||
|
if platform_target not in platform_targets and 'network/basics/' not in platform_targets:
|
||||||
|
display.warning('Skipping "%s" because selected tests do not target the "%s" platform.' % (
|
||||||
|
platform_version, platform))
|
||||||
|
continue
|
||||||
|
|
||||||
|
instance = lib.thread.WrappedThread(functools.partial(network_start, args, platform, version))
|
||||||
|
instance.daemon = True
|
||||||
|
instance.start()
|
||||||
|
instances.append(instance)
|
||||||
|
|
||||||
|
while any(instance.is_alive() for instance in instances):
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
|
||||||
|
|
||||||
|
|
||||||
|
def network_start(args, platform, version):
|
||||||
"""
|
"""
|
||||||
:type args: NetworkIntegrationConfig
|
:type args: NetworkIntegrationConfig
|
||||||
:type platform: str
|
:type platform: str
|
||||||
:type version: str
|
:type version: str
|
||||||
:rtype: AnsibleCoreCI
|
:rtype: AnsibleCoreCI
|
||||||
"""
|
"""
|
||||||
|
|
||||||
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage)
|
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage)
|
||||||
core_ci.start()
|
core_ci.start()
|
||||||
|
|
||||||
|
return core_ci.save()
|
||||||
|
|
||||||
|
|
||||||
|
def network_run(args, platform, version, config):
|
||||||
|
"""
|
||||||
|
:type args: NetworkIntegrationConfig
|
||||||
|
:type platform: str
|
||||||
|
:type version: str
|
||||||
|
:type config: dict[str, str]
|
||||||
|
:rtype: AnsibleCoreCI
|
||||||
|
"""
|
||||||
|
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, load=False)
|
||||||
|
core_ci.load(config)
|
||||||
core_ci.wait()
|
core_ci.wait()
|
||||||
|
|
||||||
manage = ManageNetworkCI(core_ci)
|
manage = ManageNetworkCI(core_ci)
|
||||||
|
@ -431,19 +466,20 @@ def command_windows_integration(args):
|
||||||
raise ApplicationError('Use the --windows option or provide an inventory file (see %s.template).' % filename)
|
raise ApplicationError('Use the --windows option or provide an inventory file (see %s.template).' % filename)
|
||||||
|
|
||||||
all_targets = tuple(walk_windows_integration_targets(include_hidden=True))
|
all_targets = tuple(walk_windows_integration_targets(include_hidden=True))
|
||||||
internal_targets = command_integration_filter(args, all_targets)
|
internal_targets = command_integration_filter(args, all_targets, init_callback=windows_init)
|
||||||
|
|
||||||
if args.windows:
|
if args.windows:
|
||||||
|
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
|
||||||
instances = [] # type: list [lib.thread.WrappedThread]
|
instances = [] # type: list [lib.thread.WrappedThread]
|
||||||
|
|
||||||
for version in args.windows:
|
for version in args.windows:
|
||||||
instance = lib.thread.WrappedThread(functools.partial(windows_run, args, version))
|
config = configs['windows/%s' % version]
|
||||||
|
|
||||||
|
instance = lib.thread.WrappedThread(functools.partial(windows_run, args, version, config))
|
||||||
instance.daemon = True
|
instance.daemon = True
|
||||||
instance.start()
|
instance.start()
|
||||||
instances.append(instance)
|
instances.append(instance)
|
||||||
|
|
||||||
install_command_requirements(args)
|
|
||||||
|
|
||||||
while any(instance.is_alive() for instance in instances):
|
while any(instance.is_alive() for instance in instances):
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
|
@ -455,16 +491,36 @@ def command_windows_integration(args):
|
||||||
if not args.explain:
|
if not args.explain:
|
||||||
with open(filename, 'w') as inventory_fd:
|
with open(filename, 'w') as inventory_fd:
|
||||||
inventory_fd.write(inventory)
|
inventory_fd.write(inventory)
|
||||||
else:
|
|
||||||
install_command_requirements(args)
|
|
||||||
|
|
||||||
try:
|
command_integration_filtered(args, internal_targets, all_targets)
|
||||||
command_integration_filtered(args, internal_targets, all_targets)
|
|
||||||
finally:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def windows_run(args, version):
|
def windows_init(args, internal_targets): # pylint: disable=locally-disabled, unused-argument
|
||||||
|
"""
|
||||||
|
:type args: WindowsIntegrationConfig
|
||||||
|
:type internal_targets: tuple[IntegrationTarget]
|
||||||
|
"""
|
||||||
|
if not args.windows:
|
||||||
|
return
|
||||||
|
|
||||||
|
if args.metadata.instance_config is not None:
|
||||||
|
return
|
||||||
|
|
||||||
|
instances = [] # type: list [lib.thread.WrappedThread]
|
||||||
|
|
||||||
|
for version in args.windows:
|
||||||
|
instance = lib.thread.WrappedThread(functools.partial(windows_start, args, version))
|
||||||
|
instance.daemon = True
|
||||||
|
instance.start()
|
||||||
|
instances.append(instance)
|
||||||
|
|
||||||
|
while any(instance.is_alive() for instance in instances):
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
|
||||||
|
|
||||||
|
|
||||||
|
def windows_start(args, version):
|
||||||
"""
|
"""
|
||||||
:type args: WindowsIntegrationConfig
|
:type args: WindowsIntegrationConfig
|
||||||
:type version: str
|
:type version: str
|
||||||
|
@ -472,6 +528,19 @@ def windows_run(args, version):
|
||||||
"""
|
"""
|
||||||
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage)
|
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage)
|
||||||
core_ci.start()
|
core_ci.start()
|
||||||
|
|
||||||
|
return core_ci.save()
|
||||||
|
|
||||||
|
|
||||||
|
def windows_run(args, version, config):
|
||||||
|
"""
|
||||||
|
:type args: WindowsIntegrationConfig
|
||||||
|
:type version: str
|
||||||
|
:type config: dict[str, str]
|
||||||
|
:rtype: AnsibleCoreCI
|
||||||
|
"""
|
||||||
|
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, load=False)
|
||||||
|
core_ci.load(config)
|
||||||
core_ci.wait()
|
core_ci.wait()
|
||||||
|
|
||||||
manage = ManageWindowsCI(core_ci)
|
manage = ManageWindowsCI(core_ci)
|
||||||
|
@ -525,10 +594,11 @@ def windows_inventory(remotes):
|
||||||
return inventory
|
return inventory
|
||||||
|
|
||||||
|
|
||||||
def command_integration_filter(args, targets):
|
def command_integration_filter(args, targets, init_callback=None):
|
||||||
"""
|
"""
|
||||||
:type args: IntegrationConfig
|
:type args: IntegrationConfig
|
||||||
:type targets: collections.Iterable[IntegrationTarget]
|
:type targets: collections.Iterable[IntegrationTarget]
|
||||||
|
:type init_callback: (IntegrationConfig, tuple[IntegrationTarget]) -> None
|
||||||
:rtype: tuple[IntegrationTarget]
|
:rtype: tuple[IntegrationTarget]
|
||||||
"""
|
"""
|
||||||
targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
|
targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
|
||||||
|
@ -551,6 +621,9 @@ def command_integration_filter(args, targets):
|
||||||
if args.start_at and not any(t.name == args.start_at for t in internal_targets):
|
if args.start_at and not any(t.name == args.start_at for t in internal_targets):
|
||||||
raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
|
raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
|
||||||
|
|
||||||
|
if init_callback:
|
||||||
|
init_callback(args, internal_targets)
|
||||||
|
|
||||||
cloud_init(args, internal_targets)
|
cloud_init(args, internal_targets)
|
||||||
|
|
||||||
if args.delegate:
|
if args.delegate:
|
||||||
|
@ -880,7 +953,7 @@ def command_units(args):
|
||||||
|
|
||||||
for version in SUPPORTED_PYTHON_VERSIONS:
|
for version in SUPPORTED_PYTHON_VERSIONS:
|
||||||
# run all versions unless version given, in which case run only that version
|
# run all versions unless version given, in which case run only that version
|
||||||
if args.python and version != args.python:
|
if args.python and version != args.python_version:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
env = ansible_environment(args)
|
env = ansible_environment(args)
|
||||||
|
@ -940,7 +1013,7 @@ def command_compile(args):
|
||||||
|
|
||||||
for version in COMPILE_PYTHON_VERSIONS:
|
for version in COMPILE_PYTHON_VERSIONS:
|
||||||
# run all versions unless version given, in which case run only that version
|
# run all versions unless version given, in which case run only that version
|
||||||
if args.python and version != args.python:
|
if args.python and version != args.python_version:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
display.info('Compile with Python %s' % version)
|
display.info('Compile with Python %s' % version)
|
||||||
|
@ -1027,104 +1100,6 @@ def compile_version(args, python_version, include, exclude):
|
||||||
return TestSuccess(command, test, python_version=python_version)
|
return TestSuccess(command, test, python_version=python_version)
|
||||||
|
|
||||||
|
|
||||||
def intercept_command(args, cmd, target_name, capture=False, env=None, data=None, cwd=None, python_version=None, path=None):
|
|
||||||
"""
|
|
||||||
:type args: TestConfig
|
|
||||||
:type cmd: collections.Iterable[str]
|
|
||||||
:type target_name: str
|
|
||||||
:type capture: bool
|
|
||||||
:type env: dict[str, str] | None
|
|
||||||
:type data: str | None
|
|
||||||
:type cwd: str | None
|
|
||||||
:type python_version: str | None
|
|
||||||
:type path: str | None
|
|
||||||
:rtype: str | None, str | None
|
|
||||||
"""
|
|
||||||
if not env:
|
|
||||||
env = common_environment()
|
|
||||||
|
|
||||||
cmd = list(cmd)
|
|
||||||
inject_path = get_coverage_path(args)
|
|
||||||
config_path = os.path.join(inject_path, 'injector.json')
|
|
||||||
version = python_version or args.python_version
|
|
||||||
interpreter = find_executable('python%s' % version, path=path)
|
|
||||||
coverage_file = os.path.abspath(os.path.join(inject_path, '..', 'output', '%s=%s=%s=%s=coverage' % (
|
|
||||||
args.command, target_name, args.coverage_label or 'local-%s' % version, 'python-%s' % version)))
|
|
||||||
|
|
||||||
env['PATH'] = inject_path + os.pathsep + env['PATH']
|
|
||||||
env['ANSIBLE_TEST_PYTHON_VERSION'] = version
|
|
||||||
env['ANSIBLE_TEST_PYTHON_INTERPRETER'] = interpreter
|
|
||||||
|
|
||||||
config = dict(
|
|
||||||
python_interpreter=interpreter,
|
|
||||||
coverage_file=coverage_file if args.coverage else None,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not args.explain:
|
|
||||||
with open(config_path, 'w') as config_fd:
|
|
||||||
json.dump(config, config_fd, indent=4, sort_keys=True)
|
|
||||||
|
|
||||||
return run_command(args, cmd, capture=capture, env=env, data=data, cwd=cwd)
|
|
||||||
|
|
||||||
|
|
||||||
def get_coverage_path(args):
|
|
||||||
"""
|
|
||||||
:type args: TestConfig
|
|
||||||
:rtype: str
|
|
||||||
"""
|
|
||||||
global coverage_path # pylint: disable=locally-disabled, global-statement, invalid-name
|
|
||||||
|
|
||||||
if coverage_path:
|
|
||||||
return os.path.join(coverage_path, 'coverage')
|
|
||||||
|
|
||||||
prefix = 'ansible-test-coverage-'
|
|
||||||
tmp_dir = '/tmp'
|
|
||||||
|
|
||||||
if args.explain:
|
|
||||||
return os.path.join(tmp_dir, '%stmp' % prefix, 'coverage')
|
|
||||||
|
|
||||||
src = os.path.abspath(os.path.join(os.getcwd(), 'test/runner/injector/'))
|
|
||||||
|
|
||||||
coverage_path = tempfile.mkdtemp('', prefix, dir=tmp_dir)
|
|
||||||
os.chmod(coverage_path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
|
|
||||||
|
|
||||||
shutil.copytree(src, os.path.join(coverage_path, 'coverage'))
|
|
||||||
shutil.copy('.coveragerc', os.path.join(coverage_path, 'coverage', '.coveragerc'))
|
|
||||||
|
|
||||||
for root, dir_names, file_names in os.walk(coverage_path):
|
|
||||||
for name in dir_names + file_names:
|
|
||||||
os.chmod(os.path.join(root, name), stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
|
|
||||||
|
|
||||||
for directory in 'output', 'logs':
|
|
||||||
os.mkdir(os.path.join(coverage_path, directory))
|
|
||||||
os.chmod(os.path.join(coverage_path, directory), stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
|
|
||||||
|
|
||||||
atexit.register(cleanup_coverage_dir)
|
|
||||||
|
|
||||||
return os.path.join(coverage_path, 'coverage')
|
|
||||||
|
|
||||||
|
|
||||||
def cleanup_coverage_dir():
|
|
||||||
"""Copy over coverage data from temporary directory and purge temporary directory."""
|
|
||||||
output_dir = os.path.join(coverage_path, 'output')
|
|
||||||
|
|
||||||
for filename in os.listdir(output_dir):
|
|
||||||
src = os.path.join(output_dir, filename)
|
|
||||||
dst = os.path.join(os.getcwd(), 'test', 'results', 'coverage')
|
|
||||||
shutil.copy(src, dst)
|
|
||||||
|
|
||||||
logs_dir = os.path.join(coverage_path, 'logs')
|
|
||||||
|
|
||||||
for filename in os.listdir(logs_dir):
|
|
||||||
random_suffix = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(8))
|
|
||||||
new_name = '%s.%s.log' % (os.path.splitext(os.path.basename(filename))[0], random_suffix)
|
|
||||||
src = os.path.join(logs_dir, filename)
|
|
||||||
dst = os.path.join(os.getcwd(), 'test', 'results', 'logs', new_name)
|
|
||||||
shutil.copy(src, dst)
|
|
||||||
|
|
||||||
shutil.rmtree(coverage_path)
|
|
||||||
|
|
||||||
|
|
||||||
def get_changes_filter(args):
|
def get_changes_filter(args):
|
||||||
"""
|
"""
|
||||||
:type args: TestConfig
|
:type args: TestConfig
|
||||||
|
@ -1306,12 +1281,16 @@ def get_integration_local_filter(args, targets):
|
||||||
% (skip.rstrip('/'), ', '.join(skipped)))
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
||||||
|
|
||||||
if args.python_version.startswith('3'):
|
if args.python_version.startswith('3'):
|
||||||
skip = 'skip/python3/'
|
python_version = 3
|
||||||
skipped = [target.name for target in targets if skip in target.aliases]
|
else:
|
||||||
if skipped:
|
python_version = 2
|
||||||
exclude.append(skip)
|
|
||||||
display.warning('Excluding tests marked "%s" which are not yet supported on python 3: %s'
|
skip = 'skip/python%d/' % python_version
|
||||||
% (skip.rstrip('/'), ', '.join(skipped)))
|
skipped = [target.name for target in targets if skip in target.aliases]
|
||||||
|
if skipped:
|
||||||
|
exclude.append(skip)
|
||||||
|
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
|
||||||
|
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
|
||||||
|
|
||||||
return exclude
|
return exclude
|
||||||
|
|
||||||
|
@ -1332,13 +1311,26 @@ def get_integration_docker_filter(args, targets):
|
||||||
display.warning('Excluding tests marked "%s" which require --docker-privileged to run under docker: %s'
|
display.warning('Excluding tests marked "%s" which require --docker-privileged to run under docker: %s'
|
||||||
% (skip.rstrip('/'), ', '.join(skipped)))
|
% (skip.rstrip('/'), ', '.join(skipped)))
|
||||||
|
|
||||||
|
python_version = 2 # images are expected to default to python 2 unless otherwise specified
|
||||||
|
|
||||||
if args.docker.endswith('py3'):
|
if args.docker.endswith('py3'):
|
||||||
skip = 'skip/python3/'
|
python_version = 3 # docker images ending in 'py3' are expected to default to python 3
|
||||||
skipped = [target.name for target in targets if skip in target.aliases]
|
|
||||||
if skipped:
|
if args.docker.endswith(':default'):
|
||||||
exclude.append(skip)
|
python_version = 3 # docker images tagged 'default' are expected to default to python 3
|
||||||
display.warning('Excluding tests marked "%s" which are not yet supported on python 3: %s'
|
|
||||||
% (skip.rstrip('/'), ', '.join(skipped)))
|
if args.python: # specifying a numeric --python option overrides the default python
|
||||||
|
if args.python.startswith('3'):
|
||||||
|
python_version = 3
|
||||||
|
elif args.python.startswith('2'):
|
||||||
|
python_version = 2
|
||||||
|
|
||||||
|
skip = 'skip/python%d/' % python_version
|
||||||
|
skipped = [target.name for target in targets if skip in target.aliases]
|
||||||
|
if skipped:
|
||||||
|
exclude.append(skip)
|
||||||
|
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
|
||||||
|
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
|
||||||
|
|
||||||
return exclude
|
return exclude
|
||||||
|
|
||||||
|
@ -1359,9 +1351,18 @@ def get_integration_remote_filter(args, targets):
|
||||||
skipped = [target.name for target in targets if skip in target.aliases]
|
skipped = [target.name for target in targets if skip in target.aliases]
|
||||||
if skipped:
|
if skipped:
|
||||||
exclude.append(skip)
|
exclude.append(skip)
|
||||||
display.warning('Excluding tests marked "%s" which are not yet supported on %s: %s'
|
display.warning('Excluding tests marked "%s" which are not supported on %s: %s'
|
||||||
% (skip.rstrip('/'), platform, ', '.join(skipped)))
|
% (skip.rstrip('/'), platform, ', '.join(skipped)))
|
||||||
|
|
||||||
|
python_version = 2 # remotes are expected to default to python 2
|
||||||
|
|
||||||
|
skip = 'skip/python%d/' % python_version
|
||||||
|
skipped = [target.name for target in targets if skip in target.aliases]
|
||||||
|
if skipped:
|
||||||
|
exclude.append(skip)
|
||||||
|
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
|
||||||
|
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
|
||||||
|
|
||||||
return exclude
|
return exclude
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,7 @@ from lib.util import (
|
||||||
SubprocessError,
|
SubprocessError,
|
||||||
ApplicationError,
|
ApplicationError,
|
||||||
run_command,
|
run_command,
|
||||||
|
intercept_command,
|
||||||
)
|
)
|
||||||
|
|
||||||
from lib.core_ci import (
|
from lib.core_ci import (
|
||||||
|
@ -51,7 +52,7 @@ class ManageWindowsCI(object):
|
||||||
|
|
||||||
for _ in range(1, 120):
|
for _ in range(1, 120):
|
||||||
try:
|
try:
|
||||||
run_command(self.core_ci.args, cmd, env=env)
|
intercept_command(self.core_ci.args, cmd, 'ping', env=env)
|
||||||
return
|
return
|
||||||
except SubprocessError:
|
except SubprocessError:
|
||||||
sleep(10)
|
sleep(10)
|
||||||
|
@ -93,7 +94,7 @@ class ManageNetworkCI(object):
|
||||||
|
|
||||||
for _ in range(1, 90):
|
for _ in range(1, 90):
|
||||||
try:
|
try:
|
||||||
run_command(self.core_ci.args, cmd, env=env)
|
intercept_command(self.core_ci.args, cmd, 'ping', env=env)
|
||||||
return
|
return
|
||||||
except SubprocessError:
|
except SubprocessError:
|
||||||
sleep(10)
|
sleep(10)
|
||||||
|
@ -161,7 +162,7 @@ class ManagePosixCI(object):
|
||||||
remote_source_path = os.path.join(remote_source_dir, os.path.basename(local_source_fd.name))
|
remote_source_path = os.path.join(remote_source_dir, os.path.basename(local_source_fd.name))
|
||||||
|
|
||||||
if not self.core_ci.args.explain:
|
if not self.core_ci.args.explain:
|
||||||
lib.pytar.create_tarfile(local_source_fd.name, '.', lib.pytar.ignore)
|
lib.pytar.create_tarfile(local_source_fd.name, '.', lib.pytar.DefaultTarFilter())
|
||||||
|
|
||||||
self.upload(local_source_fd.name, remote_source_dir)
|
self.upload(local_source_fd.name, remote_source_dir)
|
||||||
self.ssh('rm -rf ~/ansible && mkdir ~/ansible && cd ~/ansible && tar oxzf %s' % remote_source_path)
|
self.ssh('rm -rf ~/ansible && mkdir ~/ansible && cd ~/ansible && tar oxzf %s' % remote_source_path)
|
||||||
|
|
|
@ -20,6 +20,7 @@ class Metadata(object):
|
||||||
"""Initialize metadata."""
|
"""Initialize metadata."""
|
||||||
self.changes = {} # type: dict [str, tuple[tuple[int, int]]
|
self.changes = {} # type: dict [str, tuple[tuple[int, int]]
|
||||||
self.cloud_config = None # type: dict [str, str]
|
self.cloud_config = None # type: dict [str, str]
|
||||||
|
self.instance_config = None # type: list[dict[str, str]]
|
||||||
|
|
||||||
if is_shippable():
|
if is_shippable():
|
||||||
self.ci_provider = 'shippable'
|
self.ci_provider = 'shippable'
|
||||||
|
@ -54,6 +55,7 @@ class Metadata(object):
|
||||||
return dict(
|
return dict(
|
||||||
changes=self.changes,
|
changes=self.changes,
|
||||||
cloud_config=self.cloud_config,
|
cloud_config=self.cloud_config,
|
||||||
|
instance_config=self.instance_config,
|
||||||
ci_provider=self.ci_provider,
|
ci_provider=self.ci_provider,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -88,6 +90,7 @@ class Metadata(object):
|
||||||
metadata = Metadata()
|
metadata = Metadata()
|
||||||
metadata.changes = data['changes']
|
metadata.changes = data['changes']
|
||||||
metadata.cloud_config = data['cloud_config']
|
metadata.cloud_config = data['cloud_config']
|
||||||
|
metadata.instance_config = data['instance_config']
|
||||||
metadata.ci_provider = data['ci_provider']
|
metadata.ci_provider = data['ci_provider']
|
||||||
|
|
||||||
return metadata
|
return metadata
|
||||||
|
|
|
@ -2,76 +2,103 @@
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function
|
from __future__ import absolute_import, print_function
|
||||||
|
|
||||||
|
import abc
|
||||||
import tarfile
|
import tarfile
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from lib.util import (
|
from lib.util import (
|
||||||
display,
|
display,
|
||||||
|
ABC,
|
||||||
)
|
)
|
||||||
|
|
||||||
# improve performance by disabling uid/gid lookups
|
# improve performance by disabling uid/gid lookups
|
||||||
tarfile.pwd = None
|
tarfile.pwd = None
|
||||||
tarfile.grp = None
|
tarfile.grp = None
|
||||||
|
|
||||||
# To reduce archive time and size, ignore non-versioned files which are large or numerous.
|
|
||||||
# Also ignore miscellaneous git related files since the .git directory is ignored.
|
|
||||||
|
|
||||||
IGNORE_DIRS = (
|
class TarFilter(ABC):
|
||||||
'.tox',
|
"""Filter to use when creating a tar file."""
|
||||||
'.git',
|
@abc.abstractmethod
|
||||||
'.idea',
|
def ignore(self, item):
|
||||||
'__pycache__',
|
"""
|
||||||
'ansible.egg-info',
|
:type item: tarfile.TarInfo
|
||||||
)
|
:rtype: tarfile.TarInfo | None
|
||||||
|
"""
|
||||||
IGNORE_FILES = (
|
pass
|
||||||
'.gitignore',
|
|
||||||
'.gitdir',
|
|
||||||
)
|
|
||||||
|
|
||||||
IGNORE_EXTENSIONS = (
|
|
||||||
'.pyc',
|
|
||||||
'.retry',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def ignore(item):
|
class DefaultTarFilter(TarFilter):
|
||||||
"""
|
"""
|
||||||
:type item: tarfile.TarInfo
|
To reduce archive time and size, ignore non-versioned files which are large or numerous.
|
||||||
:rtype: tarfile.TarInfo | None
|
Also ignore miscellaneous git related files since the .git directory is ignored.
|
||||||
"""
|
"""
|
||||||
filename = os.path.basename(item.path)
|
def __init__(self):
|
||||||
name, ext = os.path.splitext(filename)
|
self.ignore_dirs = (
|
||||||
dirs = os.path.split(item.path)
|
'.tox',
|
||||||
|
'.git',
|
||||||
|
'.idea',
|
||||||
|
'__pycache__',
|
||||||
|
'ansible.egg-info',
|
||||||
|
)
|
||||||
|
|
||||||
if not item.isdir():
|
self.ignore_files = (
|
||||||
if item.path.startswith('./test/results/'):
|
'.gitignore',
|
||||||
|
'.gitdir',
|
||||||
|
)
|
||||||
|
|
||||||
|
self.ignore_extensions = (
|
||||||
|
'.pyc',
|
||||||
|
'.retry',
|
||||||
|
)
|
||||||
|
|
||||||
|
def ignore(self, item):
|
||||||
|
"""
|
||||||
|
:type item: tarfile.TarInfo
|
||||||
|
:rtype: tarfile.TarInfo | None
|
||||||
|
"""
|
||||||
|
filename = os.path.basename(item.path)
|
||||||
|
name, ext = os.path.splitext(filename)
|
||||||
|
dirs = os.path.split(item.path)
|
||||||
|
|
||||||
|
if not item.isdir():
|
||||||
|
if item.path.startswith('./test/results/'):
|
||||||
|
return None
|
||||||
|
|
||||||
|
if item.path.startswith('./docs/docsite/_build/'):
|
||||||
|
return None
|
||||||
|
|
||||||
|
if name in self.ignore_files:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if item.path.startswith('./docs/docsite/_build/'):
|
if ext in self.ignore_extensions:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if name in IGNORE_FILES:
|
if any(d in self.ignore_dirs for d in dirs):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if ext in IGNORE_EXTENSIONS:
|
return item
|
||||||
return None
|
|
||||||
|
|
||||||
if any(d in IGNORE_DIRS for d in dirs):
|
|
||||||
return None
|
|
||||||
|
|
||||||
return item
|
class AllowGitTarFilter(DefaultTarFilter):
|
||||||
|
"""
|
||||||
|
Filter that allows git related files normally excluded by the default tar filter.
|
||||||
|
"""
|
||||||
|
def __init__(self):
|
||||||
|
super(AllowGitTarFilter, self).__init__()
|
||||||
|
|
||||||
|
self.ignore_dirs = tuple(d for d in self.ignore_dirs if not d.startswith('.git'))
|
||||||
|
self.ignore_files = tuple(f for f in self.ignore_files if not f.startswith('.git'))
|
||||||
|
|
||||||
|
|
||||||
def create_tarfile(dst_path, src_path, tar_filter):
|
def create_tarfile(dst_path, src_path, tar_filter):
|
||||||
"""
|
"""
|
||||||
:type dst_path: str
|
:type dst_path: str
|
||||||
:type src_path: str
|
:type src_path: str
|
||||||
:type tar_filter: (tarfile.TarInfo) -> tarfile.TarInfo | None
|
:type tar_filter: TarFilter
|
||||||
"""
|
"""
|
||||||
display.info('Creating a compressed tar archive of path: %s' % src_path, verbosity=1)
|
display.info('Creating a compressed tar archive of path: %s' % src_path, verbosity=1)
|
||||||
|
|
||||||
with tarfile.TarFile.gzopen(dst_path, mode='w', compresslevel=4) as tar:
|
with tarfile.TarFile.gzopen(dst_path, mode='w', compresslevel=4) as tar:
|
||||||
tar.add(src_path, filter=tar_filter)
|
tar.add(src_path, filter=tar_filter.ignore)
|
||||||
|
|
||||||
display.info('Resulting archive is %d bytes.' % os.path.getsize(dst_path), verbosity=1)
|
display.info('Resulting archive is %d bytes.' % os.path.getsize(dst_path), verbosity=1)
|
||||||
|
|
|
@ -86,7 +86,7 @@ def command_sanity(args):
|
||||||
versions = (None,)
|
versions = (None,)
|
||||||
|
|
||||||
for version in versions:
|
for version in versions:
|
||||||
if args.python and version and version != args.python:
|
if args.python and version and version != args.python_version:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
display.info('Sanity check using %s%s' % (test.name, ' with Python %s' % version if version else ''))
|
display.info('Sanity check using %s%s' % (test.name, ' with Python %s' % version if version else ''))
|
||||||
|
|
|
@ -11,16 +11,13 @@ from lib.sanity import (
|
||||||
from lib.util import (
|
from lib.util import (
|
||||||
SubprocessError,
|
SubprocessError,
|
||||||
display,
|
display,
|
||||||
|
intercept_command,
|
||||||
)
|
)
|
||||||
|
|
||||||
from lib.ansible_util import (
|
from lib.ansible_util import (
|
||||||
ansible_environment,
|
ansible_environment,
|
||||||
)
|
)
|
||||||
|
|
||||||
from lib.executor import (
|
|
||||||
intercept_command,
|
|
||||||
)
|
|
||||||
|
|
||||||
from lib.config import (
|
from lib.config import (
|
||||||
SanityConfig,
|
SanityConfig,
|
||||||
)
|
)
|
||||||
|
|
|
@ -15,6 +15,7 @@ from lib.sanity import (
|
||||||
from lib.util import (
|
from lib.util import (
|
||||||
SubprocessError,
|
SubprocessError,
|
||||||
run_command,
|
run_command,
|
||||||
|
intercept_command,
|
||||||
remove_tree,
|
remove_tree,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -23,7 +24,6 @@ from lib.ansible_util import (
|
||||||
)
|
)
|
||||||
|
|
||||||
from lib.executor import (
|
from lib.executor import (
|
||||||
intercept_command,
|
|
||||||
generate_pip_install,
|
generate_pip_install,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -83,7 +83,7 @@ class ImportTest(SanityMultipleVersion):
|
||||||
|
|
||||||
# make sure coverage is available in the virtual environment if needed
|
# make sure coverage is available in the virtual environment if needed
|
||||||
if args.coverage:
|
if args.coverage:
|
||||||
run_command(args, generate_pip_install('sanity.import', packages=['coverage']), env=env)
|
run_command(args, generate_pip_install('pip', 'sanity.import', packages=['coverage']), env=env)
|
||||||
run_command(args, ['pip', 'uninstall', '--disable-pip-version-check', '-y', 'pip'], env=env)
|
run_command(args, ['pip', 'uninstall', '--disable-pip-version-check', '-y', 'pip'], env=env)
|
||||||
|
|
||||||
cmd = ['importer.py'] + paths
|
cmd = ['importer.py'] + paths
|
||||||
|
|
|
@ -15,6 +15,7 @@ from lib.util import (
|
||||||
SubprocessError,
|
SubprocessError,
|
||||||
display,
|
display,
|
||||||
run_command,
|
run_command,
|
||||||
|
find_executable,
|
||||||
)
|
)
|
||||||
|
|
||||||
from lib.config import (
|
from lib.config import (
|
||||||
|
@ -55,7 +56,8 @@ class Pep8Test(SanitySingleVersion):
|
||||||
paths = sorted(i.path for i in targets.include if (os.path.splitext(i.path)[1] == '.py' or i.path.startswith('bin/')) and i.path not in skip_paths_set)
|
paths = sorted(i.path for i in targets.include if (os.path.splitext(i.path)[1] == '.py' or i.path.startswith('bin/')) and i.path not in skip_paths_set)
|
||||||
|
|
||||||
cmd = [
|
cmd = [
|
||||||
'pycodestyle',
|
'python%s' % args.python_version,
|
||||||
|
find_executable('pycodestyle'),
|
||||||
'--max-line-length', '160',
|
'--max-line-length', '160',
|
||||||
'--config', '/dev/null',
|
'--config', '/dev/null',
|
||||||
'--ignore', ','.join(sorted(current_ignore)),
|
'--ignore', ','.join(sorted(current_ignore)),
|
||||||
|
|
|
@ -3,6 +3,7 @@ from __future__ import absolute_import, print_function
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
import datetime
|
||||||
|
|
||||||
from lib.sanity import (
|
from lib.sanity import (
|
||||||
SanitySingleVersion,
|
SanitySingleVersion,
|
||||||
|
@ -16,6 +17,7 @@ from lib.util import (
|
||||||
SubprocessError,
|
SubprocessError,
|
||||||
run_command,
|
run_command,
|
||||||
display,
|
display,
|
||||||
|
find_executable,
|
||||||
)
|
)
|
||||||
|
|
||||||
from lib.ansible_util import (
|
from lib.ansible_util import (
|
||||||
|
@ -52,51 +54,54 @@ class PylintTest(SanitySingleVersion):
|
||||||
with open(PYLINT_SKIP_PATH, 'r') as skip_fd:
|
with open(PYLINT_SKIP_PATH, 'r') as skip_fd:
|
||||||
skip_paths = skip_fd.read().splitlines()
|
skip_paths = skip_fd.read().splitlines()
|
||||||
|
|
||||||
with open('test/sanity/pylint/disable.txt', 'r') as disable_fd:
|
|
||||||
disable = set(c for c in disable_fd.read().splitlines() if not c.strip().startswith('#'))
|
|
||||||
|
|
||||||
with open('test/sanity/pylint/enable.txt', 'r') as enable_fd:
|
|
||||||
enable = set(c for c in enable_fd.read().splitlines() if not c.strip().startswith('#'))
|
|
||||||
|
|
||||||
skip_paths_set = set(skip_paths)
|
skip_paths_set = set(skip_paths)
|
||||||
|
|
||||||
paths = sorted(i.path for i in targets.include if (os.path.splitext(i.path)[1] == '.py' or i.path.startswith('bin/')) and i.path not in skip_paths_set)
|
paths = sorted(i.path for i in targets.include if (os.path.splitext(i.path)[1] == '.py' or i.path.startswith('bin/')) and i.path not in skip_paths_set)
|
||||||
|
|
||||||
cmd = [
|
contexts = {}
|
||||||
'pylint',
|
remaining_paths = set(paths)
|
||||||
'--jobs', '0',
|
|
||||||
'--reports', 'n',
|
|
||||||
'--max-line-length', '160',
|
|
||||||
'--rcfile', '/dev/null',
|
|
||||||
'--ignored-modules', '_MovedItems',
|
|
||||||
'--output-format', 'json',
|
|
||||||
'--disable', ','.join(sorted(disable)),
|
|
||||||
'--enable', ','.join(sorted(enable)),
|
|
||||||
] + paths
|
|
||||||
|
|
||||||
env = ansible_environment(args)
|
def add_context(available_paths, context_name, context_filter):
|
||||||
|
"""
|
||||||
|
:type available_paths: set[str]
|
||||||
|
:type context_name: str
|
||||||
|
:type context_filter: (str) -> bool
|
||||||
|
"""
|
||||||
|
filtered_paths = set(p for p in available_paths if context_filter(p))
|
||||||
|
contexts[context_name] = sorted(filtered_paths)
|
||||||
|
available_paths -= filtered_paths
|
||||||
|
|
||||||
if paths:
|
add_context(remaining_paths, 'ansible-test', lambda p: p.startswith('test/runner/'))
|
||||||
try:
|
add_context(remaining_paths, 'units', lambda p: p.startswith('test/units/'))
|
||||||
stdout, stderr = run_command(args, cmd, env=env, capture=True)
|
add_context(remaining_paths, 'test', lambda p: p.startswith('test/'))
|
||||||
status = 0
|
add_context(remaining_paths, 'hacking', lambda p: p.startswith('hacking/'))
|
||||||
except SubprocessError as ex:
|
add_context(remaining_paths, 'modules', lambda p: p.startswith('lib/ansible/modules/'))
|
||||||
stdout = ex.stdout
|
add_context(remaining_paths, 'module_utils', lambda p: p.startswith('lib/ansible/module_utils/'))
|
||||||
stderr = ex.stderr
|
add_context(remaining_paths, 'ansible', lambda p: True)
|
||||||
status = ex.status
|
|
||||||
|
|
||||||
if stderr or status >= 32:
|
messages = []
|
||||||
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
|
context_times = []
|
||||||
else:
|
|
||||||
stdout = None
|
|
||||||
|
|
||||||
if args.explain:
|
test_start = datetime.datetime.utcnow()
|
||||||
return SanitySuccess(self.name)
|
|
||||||
|
|
||||||
if stdout:
|
for context in sorted(contexts):
|
||||||
messages = json.loads(stdout)
|
context_paths = contexts[context]
|
||||||
else:
|
|
||||||
messages = []
|
if not context_paths:
|
||||||
|
continue
|
||||||
|
|
||||||
|
context_start = datetime.datetime.utcnow()
|
||||||
|
messages += self.pylint(args, context, context_paths)
|
||||||
|
context_end = datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
context_times.append('%s: %d (%s)' % (context, len(context_paths), context_end - context_start))
|
||||||
|
|
||||||
|
test_end = datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
for context_time in context_times:
|
||||||
|
display.info(context_time, verbosity=4)
|
||||||
|
|
||||||
|
display.info('total: %d (%s)' % (len(paths), test_end - test_start), verbosity=4)
|
||||||
|
|
||||||
errors = [SanityMessage(
|
errors = [SanityMessage(
|
||||||
message=m['message'].replace('\n', ' '),
|
message=m['message'].replace('\n', ' '),
|
||||||
|
@ -127,3 +132,48 @@ class PylintTest(SanitySingleVersion):
|
||||||
return SanityFailure(self.name, messages=errors)
|
return SanityFailure(self.name, messages=errors)
|
||||||
|
|
||||||
return SanitySuccess(self.name)
|
return SanitySuccess(self.name)
|
||||||
|
|
||||||
|
def pylint(self, args, context, paths):
|
||||||
|
"""
|
||||||
|
:type args: SanityConfig
|
||||||
|
:param context: str
|
||||||
|
:param paths: list[str]
|
||||||
|
:return: list[dict[str, str]]
|
||||||
|
"""
|
||||||
|
rcfile = 'test/sanity/pylint/config/%s' % context
|
||||||
|
|
||||||
|
if not os.path.exists(rcfile):
|
||||||
|
rcfile = 'test/sanity/pylint/config/default'
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
'python%s' % args.python_version,
|
||||||
|
find_executable('pylint'),
|
||||||
|
'--jobs', '0',
|
||||||
|
'--reports', 'n',
|
||||||
|
'--max-line-length', '160',
|
||||||
|
'--rcfile', rcfile,
|
||||||
|
'--output-format', 'json',
|
||||||
|
] + paths
|
||||||
|
|
||||||
|
env = ansible_environment(args)
|
||||||
|
|
||||||
|
if paths:
|
||||||
|
try:
|
||||||
|
stdout, stderr = run_command(args, cmd, env=env, capture=True)
|
||||||
|
status = 0
|
||||||
|
except SubprocessError as ex:
|
||||||
|
stdout = ex.stdout
|
||||||
|
stderr = ex.stderr
|
||||||
|
status = ex.status
|
||||||
|
|
||||||
|
if stderr or status >= 32:
|
||||||
|
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
|
||||||
|
else:
|
||||||
|
stdout = None
|
||||||
|
|
||||||
|
if not args.explain and stdout:
|
||||||
|
messages = json.loads(stdout)
|
||||||
|
else:
|
||||||
|
messages = []
|
||||||
|
|
||||||
|
return messages
|
||||||
|
|
|
@ -15,6 +15,7 @@ from lib.util import (
|
||||||
SubprocessError,
|
SubprocessError,
|
||||||
run_command,
|
run_command,
|
||||||
parse_to_dict,
|
parse_to_dict,
|
||||||
|
find_executable,
|
||||||
)
|
)
|
||||||
|
|
||||||
from lib.config import (
|
from lib.config import (
|
||||||
|
@ -39,7 +40,8 @@ class RstcheckTest(SanitySingleVersion):
|
||||||
return SanitySkipped(self.name)
|
return SanitySkipped(self.name)
|
||||||
|
|
||||||
cmd = [
|
cmd = [
|
||||||
'rstcheck',
|
'python%s' % args.python_version,
|
||||||
|
find_executable('rstcheck'),
|
||||||
'--report', 'warning',
|
'--report', 'warning',
|
||||||
'--ignore-substitutions', ','.join(ignore_substitutions),
|
'--ignore-substitutions', ','.join(ignore_substitutions),
|
||||||
] + paths
|
] + paths
|
||||||
|
|
|
@ -44,6 +44,7 @@ class ValidateModulesTest(SanitySingleVersion):
|
||||||
return SanitySkipped(self.name)
|
return SanitySkipped(self.name)
|
||||||
|
|
||||||
cmd = [
|
cmd = [
|
||||||
|
'python%s' % args.python_version,
|
||||||
'test/sanity/validate-modules/validate-modules',
|
'test/sanity/validate-modules/validate-modules',
|
||||||
'--format', 'json',
|
'--format', 'json',
|
||||||
] + paths
|
] + paths
|
||||||
|
|
|
@ -15,6 +15,7 @@ from lib.sanity import (
|
||||||
from lib.util import (
|
from lib.util import (
|
||||||
SubprocessError,
|
SubprocessError,
|
||||||
run_command,
|
run_command,
|
||||||
|
find_executable,
|
||||||
)
|
)
|
||||||
|
|
||||||
from lib.config import (
|
from lib.config import (
|
||||||
|
@ -36,7 +37,8 @@ class YamllintTest(SanitySingleVersion):
|
||||||
return SanitySkipped(self.name)
|
return SanitySkipped(self.name)
|
||||||
|
|
||||||
cmd = [
|
cmd = [
|
||||||
'yamllint',
|
'python%s' % args.python_version,
|
||||||
|
find_executable('yamllint'),
|
||||||
'--format', 'parsable',
|
'--format', 'parsable',
|
||||||
] + paths
|
] + paths
|
||||||
|
|
||||||
|
|
|
@ -2,15 +2,22 @@
|
||||||
|
|
||||||
from __future__ import absolute_import, print_function
|
from __future__ import absolute_import, print_function
|
||||||
|
|
||||||
|
import atexit
|
||||||
import errno
|
import errno
|
||||||
|
import filecmp
|
||||||
import inspect
|
import inspect
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import pipes
|
import pipes
|
||||||
import pkgutil
|
import pkgutil
|
||||||
import shutil
|
import random
|
||||||
import subprocess
|
|
||||||
import re
|
import re
|
||||||
|
import shutil
|
||||||
|
import stat
|
||||||
|
import string
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
import tempfile
|
||||||
import time
|
import time
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -19,6 +26,23 @@ except ImportError:
|
||||||
from abc import ABCMeta
|
from abc import ABCMeta
|
||||||
ABC = ABCMeta('ABC', (), {})
|
ABC = ABCMeta('ABC', (), {})
|
||||||
|
|
||||||
|
DOCKER_COMPLETION = {}
|
||||||
|
|
||||||
|
coverage_path = '' # pylint: disable=locally-disabled, invalid-name
|
||||||
|
|
||||||
|
|
||||||
|
def get_docker_completion():
|
||||||
|
"""
|
||||||
|
:rtype: dict[str, str]
|
||||||
|
"""
|
||||||
|
if not DOCKER_COMPLETION:
|
||||||
|
with open('test/runner/completion/docker.txt', 'r') as completion_fd:
|
||||||
|
images = completion_fd.read().splitlines()
|
||||||
|
|
||||||
|
DOCKER_COMPLETION.update(dict((i.split('@')[0], i) for i in images))
|
||||||
|
|
||||||
|
return DOCKER_COMPLETION
|
||||||
|
|
||||||
|
|
||||||
def is_shippable():
|
def is_shippable():
|
||||||
"""
|
"""
|
||||||
|
@ -35,6 +59,51 @@ def remove_file(path):
|
||||||
os.remove(path)
|
os.remove(path)
|
||||||
|
|
||||||
|
|
||||||
|
def find_pip(path=None, version=None):
|
||||||
|
"""
|
||||||
|
:type path: str | None
|
||||||
|
:type version: str | None
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
if version:
|
||||||
|
version_info = version.split('.')
|
||||||
|
python_bin = find_executable('python%s' % version, path=path)
|
||||||
|
else:
|
||||||
|
version_info = sys.version_info
|
||||||
|
python_bin = sys.executable
|
||||||
|
|
||||||
|
choices = (
|
||||||
|
'pip%s' % '.'.join(str(i) for i in version_info[:2]),
|
||||||
|
'pip%s' % version_info[0],
|
||||||
|
'pip',
|
||||||
|
)
|
||||||
|
|
||||||
|
pip = None
|
||||||
|
|
||||||
|
for choice in choices:
|
||||||
|
pip = find_executable(choice, required=False, path=path)
|
||||||
|
|
||||||
|
if pip:
|
||||||
|
break
|
||||||
|
|
||||||
|
if not pip:
|
||||||
|
raise ApplicationError('Required program not found: %s' % ', '.join(choices))
|
||||||
|
|
||||||
|
with open(pip) as pip_fd:
|
||||||
|
shebang = pip_fd.readline().strip()
|
||||||
|
|
||||||
|
if not shebang.startswith('#!') or ' ' in shebang:
|
||||||
|
raise ApplicationError('Unexpected shebang in "%s": %s' % (pip, shebang))
|
||||||
|
|
||||||
|
our_python = os.path.realpath(python_bin)
|
||||||
|
pip_python = os.path.realpath(shebang[2:])
|
||||||
|
|
||||||
|
if our_python != pip_python and not filecmp.cmp(our_python, pip_python, False):
|
||||||
|
raise ApplicationError('Current interpreter "%s" does not match "%s" interpreter "%s".' % (our_python, pip, pip_python))
|
||||||
|
|
||||||
|
return pip
|
||||||
|
|
||||||
|
|
||||||
def find_executable(executable, cwd=None, path=None, required=True):
|
def find_executable(executable, cwd=None, path=None, required=True):
|
||||||
"""
|
"""
|
||||||
:type executable: str
|
:type executable: str
|
||||||
|
@ -87,6 +156,104 @@ def find_executable(executable, cwd=None, path=None, required=True):
|
||||||
return match
|
return match
|
||||||
|
|
||||||
|
|
||||||
|
def intercept_command(args, cmd, target_name, capture=False, env=None, data=None, cwd=None, python_version=None, path=None):
|
||||||
|
"""
|
||||||
|
:type args: TestConfig
|
||||||
|
:type cmd: collections.Iterable[str]
|
||||||
|
:type target_name: str
|
||||||
|
:type capture: bool
|
||||||
|
:type env: dict[str, str] | None
|
||||||
|
:type data: str | None
|
||||||
|
:type cwd: str | None
|
||||||
|
:type python_version: str | None
|
||||||
|
:type path: str | None
|
||||||
|
:rtype: str | None, str | None
|
||||||
|
"""
|
||||||
|
if not env:
|
||||||
|
env = common_environment()
|
||||||
|
|
||||||
|
cmd = list(cmd)
|
||||||
|
inject_path = get_coverage_path(args)
|
||||||
|
config_path = os.path.join(inject_path, 'injector.json')
|
||||||
|
version = python_version or args.python_version
|
||||||
|
interpreter = find_executable('python%s' % version, path=path)
|
||||||
|
coverage_file = os.path.abspath(os.path.join(inject_path, '..', 'output', '%s=%s=%s=%s=coverage' % (
|
||||||
|
args.command, target_name, args.coverage_label or 'local-%s' % version, 'python-%s' % version)))
|
||||||
|
|
||||||
|
env['PATH'] = inject_path + os.pathsep + env['PATH']
|
||||||
|
env['ANSIBLE_TEST_PYTHON_VERSION'] = version
|
||||||
|
env['ANSIBLE_TEST_PYTHON_INTERPRETER'] = interpreter
|
||||||
|
|
||||||
|
config = dict(
|
||||||
|
python_interpreter=interpreter,
|
||||||
|
coverage_file=coverage_file if args.coverage else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not args.explain:
|
||||||
|
with open(config_path, 'w') as config_fd:
|
||||||
|
json.dump(config, config_fd, indent=4, sort_keys=True)
|
||||||
|
|
||||||
|
return run_command(args, cmd, capture=capture, env=env, data=data, cwd=cwd)
|
||||||
|
|
||||||
|
|
||||||
|
def get_coverage_path(args):
|
||||||
|
"""
|
||||||
|
:type args: TestConfig
|
||||||
|
:rtype: str
|
||||||
|
"""
|
||||||
|
global coverage_path # pylint: disable=locally-disabled, global-statement, invalid-name
|
||||||
|
|
||||||
|
if coverage_path:
|
||||||
|
return os.path.join(coverage_path, 'coverage')
|
||||||
|
|
||||||
|
prefix = 'ansible-test-coverage-'
|
||||||
|
tmp_dir = '/tmp'
|
||||||
|
|
||||||
|
if args.explain:
|
||||||
|
return os.path.join(tmp_dir, '%stmp' % prefix, 'coverage')
|
||||||
|
|
||||||
|
src = os.path.abspath(os.path.join(os.getcwd(), 'test/runner/injector/'))
|
||||||
|
|
||||||
|
coverage_path = tempfile.mkdtemp('', prefix, dir=tmp_dir)
|
||||||
|
os.chmod(coverage_path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
|
||||||
|
|
||||||
|
shutil.copytree(src, os.path.join(coverage_path, 'coverage'))
|
||||||
|
shutil.copy('.coveragerc', os.path.join(coverage_path, 'coverage', '.coveragerc'))
|
||||||
|
|
||||||
|
for root, dir_names, file_names in os.walk(coverage_path):
|
||||||
|
for name in dir_names + file_names:
|
||||||
|
os.chmod(os.path.join(root, name), stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
|
||||||
|
|
||||||
|
for directory in 'output', 'logs':
|
||||||
|
os.mkdir(os.path.join(coverage_path, directory))
|
||||||
|
os.chmod(os.path.join(coverage_path, directory), stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
|
||||||
|
|
||||||
|
atexit.register(cleanup_coverage_dir)
|
||||||
|
|
||||||
|
return os.path.join(coverage_path, 'coverage')
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup_coverage_dir():
|
||||||
|
"""Copy over coverage data from temporary directory and purge temporary directory."""
|
||||||
|
output_dir = os.path.join(coverage_path, 'output')
|
||||||
|
|
||||||
|
for filename in os.listdir(output_dir):
|
||||||
|
src = os.path.join(output_dir, filename)
|
||||||
|
dst = os.path.join(os.getcwd(), 'test', 'results', 'coverage')
|
||||||
|
shutil.copy(src, dst)
|
||||||
|
|
||||||
|
logs_dir = os.path.join(coverage_path, 'logs')
|
||||||
|
|
||||||
|
for filename in os.listdir(logs_dir):
|
||||||
|
random_suffix = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(8))
|
||||||
|
new_name = '%s.%s.log' % (os.path.splitext(os.path.basename(filename))[0], random_suffix)
|
||||||
|
src = os.path.join(logs_dir, filename)
|
||||||
|
dst = os.path.join(os.getcwd(), 'test', 'results', 'logs', new_name)
|
||||||
|
shutil.copy(src, dst)
|
||||||
|
|
||||||
|
shutil.rmtree(coverage_path)
|
||||||
|
|
||||||
|
|
||||||
def run_command(args, cmd, capture=False, env=None, data=None, cwd=None, always=False, stdin=None, stdout=None,
|
def run_command(args, cmd, capture=False, env=None, data=None, cwd=None, always=False, stdin=None, stdout=None,
|
||||||
cmd_verbosity=1, str_errors='strict'):
|
cmd_verbosity=1, str_errors='strict'):
|
||||||
"""
|
"""
|
||||||
|
@ -459,6 +626,8 @@ def docker_qualify_image(name):
|
||||||
if not name or any((c in name) for c in ('/', ':')):
|
if not name or any((c in name) for c in ('/', ':')):
|
||||||
return name
|
return name
|
||||||
|
|
||||||
|
name = get_docker_completion().get(name, name)
|
||||||
|
|
||||||
return 'ansible/ansible:%s' % name
|
return 'ansible/ansible:%s' % name
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -12,6 +12,8 @@ from lib.util import (
|
||||||
ApplicationError,
|
ApplicationError,
|
||||||
display,
|
display,
|
||||||
raw_command,
|
raw_command,
|
||||||
|
find_pip,
|
||||||
|
get_docker_completion,
|
||||||
)
|
)
|
||||||
|
|
||||||
from lib.delegation import (
|
from lib.delegation import (
|
||||||
|
@ -112,7 +114,7 @@ def parse_args():
|
||||||
except ImportError:
|
except ImportError:
|
||||||
if '--requirements' not in sys.argv:
|
if '--requirements' not in sys.argv:
|
||||||
raise
|
raise
|
||||||
raw_command(generate_pip_install('ansible-test'))
|
raw_command(generate_pip_install(find_pip(), 'ansible-test'))
|
||||||
import argparse
|
import argparse
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -582,6 +584,10 @@ def add_extra_docker_options(parser, integration=True):
|
||||||
dest='docker_pull',
|
dest='docker_pull',
|
||||||
help='do not explicitly pull the latest docker images')
|
help='do not explicitly pull the latest docker images')
|
||||||
|
|
||||||
|
docker.add_argument('--docker-keep-git',
|
||||||
|
action='store_true',
|
||||||
|
help='transfer git related files into the docker container')
|
||||||
|
|
||||||
if not integration:
|
if not integration:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -626,8 +632,7 @@ def complete_docker(prefix, parsed_args, **_):
|
||||||
"""
|
"""
|
||||||
del parsed_args
|
del parsed_args
|
||||||
|
|
||||||
with open('test/runner/completion/docker.txt', 'r') as completion_fd:
|
images = sorted(get_docker_completion().keys())
|
||||||
images = completion_fd.read().splitlines()
|
|
||||||
|
|
||||||
return [i for i in images if i.startswith(prefix)]
|
return [i for i in images if i.startswith(prefix)]
|
||||||
|
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
cd test/runner/
|
|
||||||
|
|
||||||
pylint --max-line-length=160 --reports=n ./*.py ./*/*.py ./*/*/*.py \
|
|
||||||
--jobs 2 \
|
|
||||||
--rcfile /dev/null \
|
|
||||||
--function-rgx '[a-z_][a-z0-9_]{2,40}$' \
|
|
||||||
--method-rgx '[a-z_][a-z0-9_]{2,40}$' \
|
|
||||||
-d unused-import \
|
|
||||||
-d too-few-public-methods \
|
|
||||||
-d too-many-arguments \
|
|
||||||
-d too-many-branches \
|
|
||||||
-d too-many-locals \
|
|
||||||
-d too-many-statements \
|
|
||||||
-d too-many-nested-blocks \
|
|
||||||
-d too-many-instance-attributes \
|
|
||||||
-d too-many-lines \
|
|
||||||
-d too-many-return-statements
|
|
|
@ -31,7 +31,7 @@ done
|
||||||
|
|
||||||
# GREP_FORMAT_WHITELIST has been formatted so that wordsplitting is wanted. Therefore no double quotes around the var
|
# GREP_FORMAT_WHITELIST has been formatted so that wordsplitting is wanted. Therefore no double quotes around the var
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
egrep -r 'expanduser' lib/ansible/modules | egrep -v $GREP_FORMAT_WHITELIST
|
egrep -r 'expanduser' lib/ansible/modules | egrep '\.py:' | egrep -v $GREP_FORMAT_WHITELIST
|
||||||
|
|
||||||
if [ $? -ne 1 ]; then
|
if [ $? -ne 1 ]; then
|
||||||
printf 'The module(s) listed above use expanduser.\n'
|
printf 'The module(s) listed above use expanduser.\n'
|
||||||
|
|
19
test/sanity/pylint/config/ansible-test
Normal file
19
test/sanity/pylint/config/ansible-test
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
[MESSAGES CONTROL]
|
||||||
|
|
||||||
|
disable=
|
||||||
|
no-self-use,
|
||||||
|
too-few-public-methods,
|
||||||
|
too-many-arguments,
|
||||||
|
too-many-branches,
|
||||||
|
too-many-instance-attributes,
|
||||||
|
too-many-lines,
|
||||||
|
too-many-locals,
|
||||||
|
too-many-nested-blocks,
|
||||||
|
too-many-return-statements,
|
||||||
|
too-many-statements,
|
||||||
|
unused-import,
|
||||||
|
|
||||||
|
[BASIC]
|
||||||
|
|
||||||
|
method-rgx=[a-z_][a-z0-9_]{2,40}$
|
||||||
|
function-rgx=[a-z_][a-z0-9_]{2,40}$
|
107
test/sanity/pylint/config/default
Normal file
107
test/sanity/pylint/config/default
Normal file
|
@ -0,0 +1,107 @@
|
||||||
|
[MESSAGES CONTROL]
|
||||||
|
|
||||||
|
disable=
|
||||||
|
abstract-method,
|
||||||
|
access-member-before-definition,
|
||||||
|
anomalous-backslash-in-string,
|
||||||
|
arguments-differ,
|
||||||
|
assignment-from-no-return,
|
||||||
|
attribute-defined-outside-init,
|
||||||
|
bad-continuation,
|
||||||
|
bad-indentation,
|
||||||
|
bad-mcs-classmethod-argument,
|
||||||
|
bad-whitespace,
|
||||||
|
bare-except,
|
||||||
|
blacklisted-name,
|
||||||
|
broad-except,
|
||||||
|
cell-var-from-loop,
|
||||||
|
consider-iterating-dictionary,
|
||||||
|
consider-merging-isinstance,
|
||||||
|
consider-using-enumerate,
|
||||||
|
consider-using-ternary,
|
||||||
|
deprecated-lambda,
|
||||||
|
deprecated-method,
|
||||||
|
deprecated-module,
|
||||||
|
eval-used,
|
||||||
|
exec-used,
|
||||||
|
expression-not-assigned,
|
||||||
|
fixme,
|
||||||
|
function-redefined,
|
||||||
|
global-at-module-level,
|
||||||
|
global-statement,
|
||||||
|
global-variable-not-assigned,
|
||||||
|
global-variable-undefined,
|
||||||
|
import-error,
|
||||||
|
import-self,
|
||||||
|
invalid-name,
|
||||||
|
invalid-sequence-index,
|
||||||
|
invalid-unary-operand-type,
|
||||||
|
len-as-condition,
|
||||||
|
line-too-long,
|
||||||
|
literal-comparison,
|
||||||
|
locally-disabled,
|
||||||
|
method-hidden,
|
||||||
|
misplaced-comparison-constant,
|
||||||
|
missing-docstring,
|
||||||
|
no-else-return,
|
||||||
|
no-init,
|
||||||
|
no-member,
|
||||||
|
no-name-in-module,
|
||||||
|
no-self-use,
|
||||||
|
no-value-for-parameter,
|
||||||
|
non-iterator-returned,
|
||||||
|
not-a-mapping,
|
||||||
|
not-an-iterable,
|
||||||
|
not-callable,
|
||||||
|
old-style-class,
|
||||||
|
pointless-statement,
|
||||||
|
pointless-string-statement,
|
||||||
|
protected-access,
|
||||||
|
redefined-argument-from-local,
|
||||||
|
redefined-builtin,
|
||||||
|
redefined-outer-name,
|
||||||
|
redefined-variable-type,
|
||||||
|
reimported,
|
||||||
|
relative-import,
|
||||||
|
signature-differs,
|
||||||
|
simplifiable-if-statement,
|
||||||
|
super-init-not-called,
|
||||||
|
superfluous-parens,
|
||||||
|
too-few-public-methods,
|
||||||
|
too-many-ancestors,
|
||||||
|
too-many-arguments,
|
||||||
|
too-many-boolean-expressions,
|
||||||
|
too-many-branches,
|
||||||
|
too-many-function-args,
|
||||||
|
too-many-instance-attributes,
|
||||||
|
too-many-lines,
|
||||||
|
too-many-locals,
|
||||||
|
too-many-nested-blocks,
|
||||||
|
too-many-public-methods,
|
||||||
|
too-many-return-statements,
|
||||||
|
too-many-statements,
|
||||||
|
trailing-comma-tuple,
|
||||||
|
unbalanced-tuple-unpacking,
|
||||||
|
undefined-loop-variable,
|
||||||
|
unexpected-keyword-arg,
|
||||||
|
ungrouped-imports,
|
||||||
|
unidiomatic-typecheck,
|
||||||
|
unneeded-not,
|
||||||
|
unsubscriptable-object,
|
||||||
|
unsupported-assignment-operation,
|
||||||
|
unsupported-delete-operation,
|
||||||
|
unsupported-membership-test,
|
||||||
|
unused-argument,
|
||||||
|
unused-import,
|
||||||
|
unused-variable,
|
||||||
|
unused-wildcard-import,
|
||||||
|
used-before-assignment,
|
||||||
|
useless-super-delegation,
|
||||||
|
wildcard-import,
|
||||||
|
wrong-import-order,
|
||||||
|
wrong-import-position,
|
||||||
|
|
||||||
|
[TYPECHECK]
|
||||||
|
|
||||||
|
ignored-modules=
|
||||||
|
_MovedItems,
|
|
@ -1,99 +0,0 @@
|
||||||
abstract-method
|
|
||||||
access-member-before-definition
|
|
||||||
anomalous-backslash-in-string
|
|
||||||
arguments-differ
|
|
||||||
assignment-from-no-return
|
|
||||||
attribute-defined-outside-init
|
|
||||||
bad-continuation
|
|
||||||
bad-indentation
|
|
||||||
bad-mcs-classmethod-argument
|
|
||||||
bad-whitespace
|
|
||||||
bare-except
|
|
||||||
blacklisted-name
|
|
||||||
broad-except
|
|
||||||
cell-var-from-loop
|
|
||||||
consider-iterating-dictionary
|
|
||||||
consider-merging-isinstance
|
|
||||||
consider-using-enumerate
|
|
||||||
consider-using-ternary
|
|
||||||
deprecated-lambda
|
|
||||||
deprecated-method
|
|
||||||
deprecated-module
|
|
||||||
eval-used
|
|
||||||
exec-used
|
|
||||||
expression-not-assigned
|
|
||||||
fixme
|
|
||||||
function-redefined
|
|
||||||
global-at-module-level
|
|
||||||
global-statement
|
|
||||||
global-variable-not-assigned
|
|
||||||
global-variable-undefined
|
|
||||||
import-error
|
|
||||||
import-self
|
|
||||||
invalid-name
|
|
||||||
invalid-sequence-index
|
|
||||||
invalid-unary-operand-type
|
|
||||||
len-as-condition
|
|
||||||
line-too-long
|
|
||||||
literal-comparison
|
|
||||||
locally-disabled
|
|
||||||
method-hidden
|
|
||||||
misplaced-comparison-constant
|
|
||||||
missing-docstring
|
|
||||||
no-else-return
|
|
||||||
no-init
|
|
||||||
no-member
|
|
||||||
no-name-in-module
|
|
||||||
no-self-use
|
|
||||||
no-value-for-parameter
|
|
||||||
non-iterator-returned
|
|
||||||
not-a-mapping
|
|
||||||
not-an-iterable
|
|
||||||
not-callable
|
|
||||||
old-style-class
|
|
||||||
pointless-statement
|
|
||||||
pointless-string-statement
|
|
||||||
protected-access
|
|
||||||
pylint
|
|
||||||
redefined-argument-from-local
|
|
||||||
redefined-builtin
|
|
||||||
redefined-outer-name
|
|
||||||
redefined-variable-type
|
|
||||||
reimported
|
|
||||||
relative-import
|
|
||||||
signature-differs
|
|
||||||
simplifiable-if-statement
|
|
||||||
super-init-not-called
|
|
||||||
superfluous-parens
|
|
||||||
too-few-public-methods
|
|
||||||
too-many-ancestors
|
|
||||||
too-many-arguments
|
|
||||||
too-many-boolean-expressions
|
|
||||||
too-many-branches
|
|
||||||
too-many-function-args
|
|
||||||
too-many-instance-attributes
|
|
||||||
too-many-lines
|
|
||||||
too-many-locals
|
|
||||||
too-many-nested-blocks
|
|
||||||
too-many-public-methods
|
|
||||||
too-many-return-statements
|
|
||||||
too-many-statements
|
|
||||||
trailing-comma-tuple
|
|
||||||
unbalanced-tuple-unpacking
|
|
||||||
undefined-loop-variable
|
|
||||||
ungrouped-imports
|
|
||||||
unidiomatic-typecheck
|
|
||||||
unneeded-not
|
|
||||||
unsubscriptable-object
|
|
||||||
unsupported-assignment-operation
|
|
||||||
unsupported-delete-operation
|
|
||||||
unsupported-membership-test
|
|
||||||
unused-argument
|
|
||||||
unused-import
|
|
||||||
unused-variable
|
|
||||||
unused-wildcard-import
|
|
||||||
used-before-assignment
|
|
||||||
useless-super-delegation
|
|
||||||
wildcard-import
|
|
||||||
wrong-import-order
|
|
||||||
wrong-import-position
|
|
|
@ -23,6 +23,7 @@ import abc
|
||||||
import argparse
|
import argparse
|
||||||
import ast
|
import ast
|
||||||
import json
|
import json
|
||||||
|
import errno
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
|
@ -1253,7 +1254,20 @@ class GitCache(object):
|
||||||
else:
|
else:
|
||||||
self.base_tree = []
|
self.base_tree = []
|
||||||
|
|
||||||
self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', 'lib/ansible/modules/'])
|
try:
|
||||||
|
self.head_tree = self._git(['ls-tree', '-r', '--name-only', 'HEAD', 'lib/ansible/modules/'])
|
||||||
|
except GitError as ex:
|
||||||
|
if ex.status == 128:
|
||||||
|
# fallback when there is no .git directory
|
||||||
|
self.head_tree = self._get_module_files()
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
except OSError as ex:
|
||||||
|
if ex.errno == errno.ENOENT:
|
||||||
|
# fallback when git is not installed
|
||||||
|
self.head_tree = self._get_module_files()
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in ('.py', '.ps1'))
|
self.base_module_paths = dict((os.path.basename(p), p) for p in self.base_tree if os.path.splitext(p)[1] in ('.py', '.ps1'))
|
||||||
|
|
||||||
|
@ -1268,14 +1282,33 @@ class GitCache(object):
|
||||||
if os.path.islink(path):
|
if os.path.islink(path):
|
||||||
self.head_aliased_modules.add(os.path.basename(os.path.realpath(path)))
|
self.head_aliased_modules.add(os.path.basename(os.path.realpath(path)))
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_module_files():
|
||||||
|
module_files = []
|
||||||
|
|
||||||
|
for (dir_path, dir_names, file_names) in os.walk('lib/ansible/modules/'):
|
||||||
|
for file_name in file_names:
|
||||||
|
module_files.append(os.path.join(dir_path, file_name))
|
||||||
|
|
||||||
|
return module_files
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _git(args):
|
def _git(args):
|
||||||
cmd = ['git'] + args
|
cmd = ['git'] + args
|
||||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
|
||||||
stdout, stderr = p.communicate()
|
stdout, stderr = p.communicate()
|
||||||
|
if p.returncode != 0:
|
||||||
|
raise GitError(stderr, p.returncode)
|
||||||
return stdout.decode('utf-8').splitlines()
|
return stdout.decode('utf-8').splitlines()
|
||||||
|
|
||||||
|
|
||||||
|
class GitError(Exception):
|
||||||
|
def __init__(self, message, status):
|
||||||
|
super(GitError, self).__init__(message)
|
||||||
|
|
||||||
|
self.status = status
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
try:
|
try:
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -6,8 +6,9 @@ declare -a args
|
||||||
IFS='/:' read -ra args <<< "$1"
|
IFS='/:' read -ra args <<< "$1"
|
||||||
|
|
||||||
image="${args[1]}"
|
image="${args[1]}"
|
||||||
target="posix/ci/cloud/group${args[2]}/"
|
python="${args[2]}"
|
||||||
|
target="posix/ci/cloud/group${args[3]}/"
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
|
ansible-test integration --color -v --retry-on-error "${target}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
|
||||||
--docker "${image}" --changed-all-target "${target}smoketest/"
|
--docker "${image}" --python "${python}" --changed-all-target "${target}smoketest/"
|
||||||
|
|
|
@ -15,7 +15,10 @@ target="network/ci/"
|
||||||
# python versions to test in order
|
# python versions to test in order
|
||||||
# all versions run full tests
|
# all versions run full tests
|
||||||
python_versions=(
|
python_versions=(
|
||||||
|
2.6
|
||||||
2.7
|
2.7
|
||||||
|
3.5
|
||||||
|
3.6
|
||||||
)
|
)
|
||||||
|
|
||||||
if [ -s /tmp/network.txt ]; then
|
if [ -s /tmp/network.txt ]; then
|
||||||
|
@ -37,13 +40,8 @@ else
|
||||||
)
|
)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
retry.py pip install tox --disable-pip-version-check
|
|
||||||
|
|
||||||
for version in "${python_versions[@]}"; do
|
for version in "${python_versions[@]}"; do
|
||||||
# clean up between test runs until we switch from --tox to --docker
|
|
||||||
rm -rf ~/.ansible/{cp,pc,tmp}/
|
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
ansible-test network-integration --color -v --retry-on-error "${target}" --tox --python "${version}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
|
ansible-test network-integration --color -v --retry-on-error "${target}" --docker default --python "${version}" \
|
||||||
"${platforms[@]}"
|
${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} "${platforms[@]}"
|
||||||
done
|
done
|
||||||
|
|
|
@ -4,20 +4,17 @@ set -o pipefail
|
||||||
|
|
||||||
shippable.py
|
shippable.py
|
||||||
|
|
||||||
retry.py apt-get update -qq
|
|
||||||
retry.py apt-get install -qq \
|
|
||||||
shellcheck \
|
|
||||||
|
|
||||||
retry.py pip install tox --disable-pip-version-check
|
|
||||||
|
|
||||||
echo '{"verified": false, "results": []}' > test/results/bot/ansible-test-failure.json
|
echo '{"verified": false, "results": []}' > test/results/bot/ansible-test-failure.json
|
||||||
|
|
||||||
|
if [ "${BASE_BRANCH:-}" ]; then
|
||||||
|
base_branch="origin/${BASE_BRANCH}"
|
||||||
|
else
|
||||||
|
base_branch=""
|
||||||
|
fi
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
ansible-test compile --failure-ok --color -v --junit --requirements --coverage ${CHANGED:+"$CHANGED"}
|
ansible-test compile --failure-ok --color -v --junit --coverage ${CHANGED:+"$CHANGED"} --docker default
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
ansible-test sanity --failure-ok --color -v --junit --tox --skip-test ansible-doc --skip-test import --python 3.5 --coverage ${CHANGED:+"$CHANGED"}
|
ansible-test sanity --failure-ok --color -v --junit --coverage ${CHANGED:+"$CHANGED"} --docker default --docker-keep-git --base-branch "${base_branch}"
|
||||||
# shellcheck disable=SC2086
|
|
||||||
ansible-test sanity --failure-ok --color -v --junit --tox --test ansible-doc --test import --coverage ${CHANGED:+"$CHANGED"}
|
|
||||||
|
|
||||||
rm test/results/bot/ansible-test-failure.json
|
rm test/results/bot/ansible-test-failure.json
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,5 @@ IFS='/:' read -ra args <<< "$1"
|
||||||
|
|
||||||
version="${args[1]}"
|
version="${args[1]}"
|
||||||
|
|
||||||
retry.py pip install tox --disable-pip-version-check
|
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
ansible-test units --color -v --tox --python "${version}" --coverage ${CHANGED:+"$CHANGED"} \
|
ansible-test units --color -v --docker default --python "${version}" --coverage ${CHANGED:+"$CHANGED"} \
|
||||||
|
|
|
@ -40,12 +40,7 @@ else
|
||||||
)
|
)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
retry.py pip install tox --disable-pip-version-check
|
|
||||||
|
|
||||||
for version in "${python_versions[@]}"; do
|
for version in "${python_versions[@]}"; do
|
||||||
# clean up between test runs until we switch from --tox to --docker
|
|
||||||
rm -rf ~/.ansible/{cp,pc,tmp}/
|
|
||||||
|
|
||||||
changed_all_target="all"
|
changed_all_target="all"
|
||||||
|
|
||||||
if [ "${version}" == "2.7" ]; then
|
if [ "${version}" == "2.7" ]; then
|
||||||
|
@ -73,6 +68,6 @@ for version in "${python_versions[@]}"; do
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# shellcheck disable=SC2086
|
# shellcheck disable=SC2086
|
||||||
ansible-test windows-integration --color -v --retry-on-error "${ci}" --tox --python "${version}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
|
ansible-test windows-integration --color -v --retry-on-error "${ci}" --docker default --python "${version}" ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
|
||||||
"${platforms[@]}" --changed-all-target "${changed_all_target}"
|
"${platforms[@]}" --changed-all-target "${changed_all_target}"
|
||||||
done
|
done
|
||||||
|
|
Loading…
Reference in a new issue