Update ansible-test sanity command. (#31958)

* Use correct pip version in ansible-test.
* Add git fallback for validate-modules.
* Run sanity tests in a docker container.
* Use correct python version for sanity tests.
* Pin docker completion images and add default.
* Split pylint execution into multiple contexts.
* Only test .py files in use-argspec-type-path test.
* Accept identical python interpeter name or binary.
* Switch cloud tests to default container.
* Remove unused extras from pip install.
* Filter out empty pip commands.
* Don't force running of pip list.
* Support delegation for windows and network tests.
* Fix ansible-test python version usage.
* Fix ansible-test python version skipping.
* Use absolute path for log in ansible-test.
* Run vyos_command test on python 3.
* Fix windows/network instance persistence.
* Add `test/cache` dir to classification.
* Enable more python versions for network tests.
* Fix cs_router test.
This commit is contained in:
Matt Clay 2017-10-26 00:21:46 -07:00 committed by GitHub
parent 602a618e60
commit cf1337ca9a
37 changed files with 788 additions and 456 deletions

View file

@ -49,6 +49,9 @@ def ansible_environment(args, color=True):
env.update(ansible)
if args.debug:
env.update(dict(ANSIBLE_DEBUG='true'))
env.update(dict(
ANSIBLE_DEBUG='true',
ANSIBLE_LOG_PATH=os.path.abspath('test/results/logs/debug.log'),
))
return env

View file

@ -367,6 +367,9 @@ class PathMapper(object):
return minimal
if path.startswith('test/cache/'):
return minimal
if path.startswith('test/compile/'):
return {
'compile': 'all',

View file

@ -43,6 +43,7 @@ class EnvironmentConfig(CommonConfig):
self.docker_privileged = args.docker_privileged if 'docker_privileged' in args else False # type: bool
self.docker_util = docker_qualify_image(args.docker_util if 'docker_util' in args else '') # type: str
self.docker_pull = args.docker_pull if 'docker_pull' in args else False # type: bool
self.docker_keep_git = args.docker_keep_git if 'docker_keep_git' in args else False # type: bool
self.tox_sitepackages = args.tox_sitepackages # type: bool
@ -53,7 +54,7 @@ class EnvironmentConfig(CommonConfig):
self.requirements = args.requirements # type: bool
if self.python == 'default':
self.python = '.'.join(str(i) for i in sys.version_info[:2])
self.python = None
self.python_version = self.python or '.'.join(str(i) for i in sys.version_info[:2])

View file

@ -8,6 +8,7 @@ import traceback
import uuid
import errno
import time
import shutil
from lib.http import (
HttpClient,
@ -35,13 +36,14 @@ AWS_ENDPOINTS = {
class AnsibleCoreCI(object):
"""Client for Ansible Core CI services."""
def __init__(self, args, platform, version, stage='prod', persist=True, name=None):
def __init__(self, args, platform, version, stage='prod', persist=True, load=True, name=None):
"""
:type args: EnvironmentConfig
:type platform: str
:type version: str
:type stage: str
:type persist: bool
:type load: bool
:type name: str
"""
self.args = args
@ -106,7 +108,7 @@ class AnsibleCoreCI(object):
self.path = os.path.expanduser('~/.ansible/test/instances/%s-%s' % (self.name, self.stage))
if persist and self._load():
if persist and load and self._load():
try:
display.info('Checking existing %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
@ -125,7 +127,7 @@ class AnsibleCoreCI(object):
self.instance_id = None
self.endpoint = None
else:
elif not persist:
self.instance_id = None
self.endpoint = None
self._clear()
@ -160,6 +162,11 @@ class AnsibleCoreCI(object):
def start(self):
"""Start instance."""
if self.started:
display.info('Skipping started %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
return
if is_shippable():
return self.start_shippable()
@ -289,11 +296,6 @@ class AnsibleCoreCI(object):
def _start(self, auth):
"""Start instance."""
if self.started:
display.info('Skipping started %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
return
display.info('Initializing new %s/%s instance %s.' % (self.platform, self.version, self.instance_id), verbosity=1)
if self.platform == 'windows':
@ -413,6 +415,13 @@ class AnsibleCoreCI(object):
config = json.loads(data)
return self.load(config)
def load(self, config):
"""
:type config: dict[str, str]
:rtype: bool
"""
self.instance_id = config['instance_id']
self.endpoint = config['endpoint']
self.started = True
@ -424,16 +433,23 @@ class AnsibleCoreCI(object):
if self.args.explain:
return
config = self.save()
make_dirs(os.path.dirname(self.path))
with open(self.path, 'w') as instance_fd:
config = dict(
instance_id=self.instance_id,
endpoint=self.endpoint,
)
instance_fd.write(json.dumps(config, indent=4, sort_keys=True))
def save(self):
"""
:rtype: dict[str, str]
"""
return dict(
platform_version='%s/%s' % (self.platform, self.version),
instance_id=self.instance_id,
endpoint=self.endpoint,
)
@staticmethod
def _create_http_error(response):
"""
@ -472,20 +488,33 @@ class CoreHttpError(HttpError):
class SshKey(object):
"""Container for SSH key used to connect to remote instances."""
KEY_NAME = 'id_rsa'
PUB_NAME = 'id_rsa.pub'
def __init__(self, args):
"""
:type args: EnvironmentConfig
"""
tmp = os.path.expanduser('~/.ansible/test/')
cache_dir = 'test/cache'
self.key = os.path.join(tmp, 'id_rsa')
self.pub = os.path.join(tmp, 'id_rsa.pub')
self.key = os.path.join(cache_dir, self.KEY_NAME)
self.pub = os.path.join(cache_dir, self.PUB_NAME)
if not os.path.isfile(self.key) or not os.path.isfile(self.pub):
base_dir = os.path.expanduser('~/.ansible/test/')
key = os.path.join(base_dir, self.KEY_NAME)
pub = os.path.join(base_dir, self.PUB_NAME)
if not os.path.isfile(self.pub):
if not args.explain:
make_dirs(tmp)
make_dirs(base_dir)
run_command(args, ['ssh-keygen', '-q', '-t', 'rsa', '-N', '', '-f', self.key])
if not os.path.isfile(key) or not os.path.isfile(pub):
run_command(args, ['ssh-keygen', '-q', '-t', 'rsa', '-N', '', '-f', key])
if not args.explain:
shutil.copy2(key, self.key)
shutil.copy2(pub, self.pub)
if args.explain:
self.pub_contents = None

View file

@ -102,10 +102,10 @@ def delegate_tox(args, exclude, require):
:type require: list[str]
"""
if args.python:
versions = args.python,
versions = args.python_version,
if args.python not in SUPPORTED_PYTHON_VERSIONS:
raise ApplicationError('tox does not support Python version %s' % args.python)
if args.python_version not in SUPPORTED_PYTHON_VERSIONS:
raise ApplicationError('tox does not support Python version %s' % args.python_version)
else:
versions = SUPPORTED_PYTHON_VERSIONS
@ -189,7 +189,12 @@ def delegate_docker(args, exclude, require):
with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd:
try:
if not args.explain:
lib.pytar.create_tarfile(local_source_fd.name, '.', lib.pytar.ignore)
if args.docker_keep_git:
tar_filter = lib.pytar.AllowGitTarFilter()
else:
tar_filter = lib.pytar.DefaultTarFilter()
lib.pytar.create_tarfile(local_source_fd.name, '.', tar_filter)
if util_image:
util_options = [

View file

@ -11,12 +11,7 @@ import tempfile
import time
import textwrap
import functools
import shutil
import stat
import pipes
import random
import string
import atexit
import hashlib
import lib.pytar
@ -45,11 +40,12 @@ from lib.util import (
SubprocessError,
display,
run_command,
common_environment,
intercept_command,
remove_tree,
make_dirs,
is_shippable,
is_binary_file,
find_pip,
find_executable,
raw_command,
)
@ -110,8 +106,6 @@ SUPPORTED_PYTHON_VERSIONS = (
COMPILE_PYTHON_VERSIONS = SUPPORTED_PYTHON_VERSIONS
coverage_path = '' # pylint: disable=locally-disabled, invalid-name
def check_startup():
"""Checks to perform at startup before running commands."""
@ -163,23 +157,27 @@ def install_command_requirements(args):
if args.junit:
packages.append('junit-xml')
commands = [generate_pip_install(args.command, packages=packages)]
pip = find_pip(version=args.python_version)
commands = [generate_pip_install(pip, args.command, packages=packages)]
if isinstance(args, IntegrationConfig):
for cloud_platform in get_cloud_platforms(args):
commands.append(generate_pip_install('%s.cloud.%s' % (args.command, cloud_platform)))
commands.append(generate_pip_install(pip, '%s.cloud.%s' % (args.command, cloud_platform)))
commands = [cmd for cmd in commands if cmd]
# only look for changes when more than one requirements file is needed
detect_pip_changes = len(commands) > 1
# first pass to install requirements, changes expected unless environment is already set up
changes = run_pip_commands(args, commands, detect_pip_changes)
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
if not changes:
return # no changes means we can stop early
# second pass to check for conflicts in requirements, changes are not expected here
changes = run_pip_commands(args, commands, detect_pip_changes)
changes = run_pip_commands(args, pip, commands, detect_pip_changes)
if not changes:
return # no changes means no conflicts
@ -188,16 +186,17 @@ def install_command_requirements(args):
'\n'.join((' '.join(pipes.quote(c) for c in cmd) for cmd in changes)))
def run_pip_commands(args, commands, detect_pip_changes=False):
def run_pip_commands(args, pip, commands, detect_pip_changes=False):
"""
:type args: EnvironmentConfig
:type pip: str
:type commands: list[list[str]]
:type detect_pip_changes: bool
:rtype: list[list[str]]
"""
changes = []
after_list = pip_list(args) if detect_pip_changes else None
after_list = pip_list(args, pip) if detect_pip_changes else None
for cmd in commands:
if not cmd:
@ -217,10 +216,10 @@ def run_pip_commands(args, commands, detect_pip_changes=False):
# AttributeError: 'Requirement' object has no attribute 'project_name'
# See: https://bugs.launchpad.net/ubuntu/xenial/+source/python-pip/+bug/1626258
# Upgrading pip works around the issue.
run_command(args, ['pip', 'install', '--upgrade', 'pip'])
run_command(args, [pip, 'install', '--upgrade', 'pip'])
run_command(args, cmd)
after_list = pip_list(args) if detect_pip_changes else None
after_list = pip_list(args, pip) if detect_pip_changes else None
if before_list != after_list:
changes.append(cmd)
@ -228,12 +227,13 @@ def run_pip_commands(args, commands, detect_pip_changes=False):
return changes
def pip_list(args):
def pip_list(args, pip):
"""
:type args: EnvironmentConfig
:type pip: str
:rtype: str
"""
stdout, _ = run_command(args, ['pip', 'list'], capture=True, always=True)
stdout, _ = run_command(args, [pip, 'list'], capture=True)
return stdout
@ -244,14 +244,14 @@ def generate_egg_info(args):
if os.path.isdir('lib/ansible.egg-info'):
return
run_command(args, ['python', 'setup.py', 'egg_info'], capture=args.verbosity < 3)
run_command(args, ['python%s' % args.python_version, 'setup.py', 'egg_info'], capture=args.verbosity < 3)
def generate_pip_install(command, packages=None, extras=None):
def generate_pip_install(pip, command, packages=None):
"""
:type pip: str
:type command: str
:type packages: list[str] | None
:type extras: list[str] | None
:rtype: list[str] | None
"""
constraints = 'test/runner/requirements/constraints.txt'
@ -259,15 +259,8 @@ def generate_pip_install(command, packages=None, extras=None):
options = []
requirements_list = [requirements]
if extras:
for extra in extras:
requirements_list.append('test/runner/requirements/%s.%s.txt' % (command, extra))
for requirements in requirements_list:
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
if os.path.exists(requirements) and os.path.getsize(requirements):
options += ['-r', requirements]
if packages:
options += packages
@ -275,7 +268,7 @@ def generate_pip_install(command, packages=None, extras=None):
if not options:
return None
return ['pip', 'install', '--disable-pip-version-check', '-c', constraints] + options
return [pip, 'install', '--disable-pip-version-check', '-c', constraints] + options
def command_shell(args):
@ -323,31 +316,24 @@ def command_network_integration(args):
)
all_targets = tuple(walk_network_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets)
platform_targets = set(a for t in internal_targets for a in t.aliases if a.startswith('network/'))
internal_targets = command_integration_filter(args, all_targets, init_callback=network_init)
if args.platform:
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
instances = [] # type: list [lib.thread.WrappedThread]
# generate an ssh key (if needed) up front once, instead of for each instance
SshKey(args)
for platform_version in args.platform:
platform, version = platform_version.split('/', 1)
platform_target = 'network/%s/' % platform
config = configs.get(platform_version)
if platform_target not in platform_targets and 'network/basics/' not in platform_targets:
display.warning('Skipping "%s" because selected tests do not target the "%s" platform.' % (
platform_version, platform))
if not config:
continue
instance = lib.thread.WrappedThread(functools.partial(network_run, args, platform, version))
instance = lib.thread.WrappedThread(functools.partial(network_run, args, platform, version, config))
instance.daemon = True
instance.start()
instances.append(instance)
install_command_requirements(args)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
@ -359,22 +345,71 @@ def command_network_integration(args):
if not args.explain:
with open(filename, 'w') as inventory_fd:
inventory_fd.write(inventory)
else:
install_command_requirements(args)
command_integration_filtered(args, internal_targets, all_targets)
def network_run(args, platform, version):
def network_init(args, internal_targets):
"""
:type args: NetworkIntegrationConfig
:type internal_targets: tuple[IntegrationTarget]
"""
if not args.platform:
return
if args.metadata.instance_config is not None:
return
platform_targets = set(a for t in internal_targets for a in t.aliases if a.startswith('network/'))
instances = [] # type: list [lib.thread.WrappedThread]
# generate an ssh key (if needed) up front once, instead of for each instance
SshKey(args)
for platform_version in args.platform:
platform, version = platform_version.split('/', 1)
platform_target = 'network/%s/' % platform
if platform_target not in platform_targets and 'network/basics/' not in platform_targets:
display.warning('Skipping "%s" because selected tests do not target the "%s" platform.' % (
platform_version, platform))
continue
instance = lib.thread.WrappedThread(functools.partial(network_start, args, platform, version))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
def network_start(args, platform, version):
"""
:type args: NetworkIntegrationConfig
:type platform: str
:type version: str
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage)
core_ci.start()
return core_ci.save()
def network_run(args, platform, version, config):
"""
:type args: NetworkIntegrationConfig
:type platform: str
:type version: str
:type config: dict[str, str]
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage, load=False)
core_ci.load(config)
core_ci.wait()
manage = ManageNetworkCI(core_ci)
@ -431,19 +466,20 @@ def command_windows_integration(args):
raise ApplicationError('Use the --windows option or provide an inventory file (see %s.template).' % filename)
all_targets = tuple(walk_windows_integration_targets(include_hidden=True))
internal_targets = command_integration_filter(args, all_targets)
internal_targets = command_integration_filter(args, all_targets, init_callback=windows_init)
if args.windows:
configs = dict((config['platform_version'], config) for config in args.metadata.instance_config)
instances = [] # type: list [lib.thread.WrappedThread]
for version in args.windows:
instance = lib.thread.WrappedThread(functools.partial(windows_run, args, version))
config = configs['windows/%s' % version]
instance = lib.thread.WrappedThread(functools.partial(windows_run, args, version, config))
instance.daemon = True
instance.start()
instances.append(instance)
install_command_requirements(args)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
@ -455,16 +491,36 @@ def command_windows_integration(args):
if not args.explain:
with open(filename, 'w') as inventory_fd:
inventory_fd.write(inventory)
else:
install_command_requirements(args)
try:
command_integration_filtered(args, internal_targets, all_targets)
finally:
pass
command_integration_filtered(args, internal_targets, all_targets)
def windows_run(args, version):
def windows_init(args, internal_targets): # pylint: disable=locally-disabled, unused-argument
"""
:type args: WindowsIntegrationConfig
:type internal_targets: tuple[IntegrationTarget]
"""
if not args.windows:
return
if args.metadata.instance_config is not None:
return
instances = [] # type: list [lib.thread.WrappedThread]
for version in args.windows:
instance = lib.thread.WrappedThread(functools.partial(windows_start, args, version))
instance.daemon = True
instance.start()
instances.append(instance)
while any(instance.is_alive() for instance in instances):
time.sleep(1)
args.metadata.instance_config = [instance.wait_for_result() for instance in instances]
def windows_start(args, version):
"""
:type args: WindowsIntegrationConfig
:type version: str
@ -472,6 +528,19 @@ def windows_run(args, version):
"""
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage)
core_ci.start()
return core_ci.save()
def windows_run(args, version, config):
"""
:type args: WindowsIntegrationConfig
:type version: str
:type config: dict[str, str]
:rtype: AnsibleCoreCI
"""
core_ci = AnsibleCoreCI(args, 'windows', version, stage=args.remote_stage, load=False)
core_ci.load(config)
core_ci.wait()
manage = ManageWindowsCI(core_ci)
@ -525,10 +594,11 @@ def windows_inventory(remotes):
return inventory
def command_integration_filter(args, targets):
def command_integration_filter(args, targets, init_callback=None):
"""
:type args: IntegrationConfig
:type targets: collections.Iterable[IntegrationTarget]
:type init_callback: (IntegrationConfig, tuple[IntegrationTarget]) -> None
:rtype: tuple[IntegrationTarget]
"""
targets = tuple(target for target in targets if 'hidden/' not in target.aliases)
@ -551,6 +621,9 @@ def command_integration_filter(args, targets):
if args.start_at and not any(t.name == args.start_at for t in internal_targets):
raise ApplicationError('Start at target matches nothing: %s' % args.start_at)
if init_callback:
init_callback(args, internal_targets)
cloud_init(args, internal_targets)
if args.delegate:
@ -880,7 +953,7 @@ def command_units(args):
for version in SUPPORTED_PYTHON_VERSIONS:
# run all versions unless version given, in which case run only that version
if args.python and version != args.python:
if args.python and version != args.python_version:
continue
env = ansible_environment(args)
@ -940,7 +1013,7 @@ def command_compile(args):
for version in COMPILE_PYTHON_VERSIONS:
# run all versions unless version given, in which case run only that version
if args.python and version != args.python:
if args.python and version != args.python_version:
continue
display.info('Compile with Python %s' % version)
@ -1027,104 +1100,6 @@ def compile_version(args, python_version, include, exclude):
return TestSuccess(command, test, python_version=python_version)
def intercept_command(args, cmd, target_name, capture=False, env=None, data=None, cwd=None, python_version=None, path=None):
"""
:type args: TestConfig
:type cmd: collections.Iterable[str]
:type target_name: str
:type capture: bool
:type env: dict[str, str] | None
:type data: str | None
:type cwd: str | None
:type python_version: str | None
:type path: str | None
:rtype: str | None, str | None
"""
if not env:
env = common_environment()
cmd = list(cmd)
inject_path = get_coverage_path(args)
config_path = os.path.join(inject_path, 'injector.json')
version = python_version or args.python_version
interpreter = find_executable('python%s' % version, path=path)
coverage_file = os.path.abspath(os.path.join(inject_path, '..', 'output', '%s=%s=%s=%s=coverage' % (
args.command, target_name, args.coverage_label or 'local-%s' % version, 'python-%s' % version)))
env['PATH'] = inject_path + os.pathsep + env['PATH']
env['ANSIBLE_TEST_PYTHON_VERSION'] = version
env['ANSIBLE_TEST_PYTHON_INTERPRETER'] = interpreter
config = dict(
python_interpreter=interpreter,
coverage_file=coverage_file if args.coverage else None,
)
if not args.explain:
with open(config_path, 'w') as config_fd:
json.dump(config, config_fd, indent=4, sort_keys=True)
return run_command(args, cmd, capture=capture, env=env, data=data, cwd=cwd)
def get_coverage_path(args):
"""
:type args: TestConfig
:rtype: str
"""
global coverage_path # pylint: disable=locally-disabled, global-statement, invalid-name
if coverage_path:
return os.path.join(coverage_path, 'coverage')
prefix = 'ansible-test-coverage-'
tmp_dir = '/tmp'
if args.explain:
return os.path.join(tmp_dir, '%stmp' % prefix, 'coverage')
src = os.path.abspath(os.path.join(os.getcwd(), 'test/runner/injector/'))
coverage_path = tempfile.mkdtemp('', prefix, dir=tmp_dir)
os.chmod(coverage_path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
shutil.copytree(src, os.path.join(coverage_path, 'coverage'))
shutil.copy('.coveragerc', os.path.join(coverage_path, 'coverage', '.coveragerc'))
for root, dir_names, file_names in os.walk(coverage_path):
for name in dir_names + file_names:
os.chmod(os.path.join(root, name), stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
for directory in 'output', 'logs':
os.mkdir(os.path.join(coverage_path, directory))
os.chmod(os.path.join(coverage_path, directory), stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
atexit.register(cleanup_coverage_dir)
return os.path.join(coverage_path, 'coverage')
def cleanup_coverage_dir():
"""Copy over coverage data from temporary directory and purge temporary directory."""
output_dir = os.path.join(coverage_path, 'output')
for filename in os.listdir(output_dir):
src = os.path.join(output_dir, filename)
dst = os.path.join(os.getcwd(), 'test', 'results', 'coverage')
shutil.copy(src, dst)
logs_dir = os.path.join(coverage_path, 'logs')
for filename in os.listdir(logs_dir):
random_suffix = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(8))
new_name = '%s.%s.log' % (os.path.splitext(os.path.basename(filename))[0], random_suffix)
src = os.path.join(logs_dir, filename)
dst = os.path.join(os.getcwd(), 'test', 'results', 'logs', new_name)
shutil.copy(src, dst)
shutil.rmtree(coverage_path)
def get_changes_filter(args):
"""
:type args: TestConfig
@ -1306,12 +1281,16 @@ def get_integration_local_filter(args, targets):
% (skip.rstrip('/'), ', '.join(skipped)))
if args.python_version.startswith('3'):
skip = 'skip/python3/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not yet supported on python 3: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
python_version = 3
else:
python_version = 2
skip = 'skip/python%d/' % python_version
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
return exclude
@ -1332,13 +1311,26 @@ def get_integration_docker_filter(args, targets):
display.warning('Excluding tests marked "%s" which require --docker-privileged to run under docker: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
python_version = 2 # images are expected to default to python 2 unless otherwise specified
if args.docker.endswith('py3'):
skip = 'skip/python3/'
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not yet supported on python 3: %s'
% (skip.rstrip('/'), ', '.join(skipped)))
python_version = 3 # docker images ending in 'py3' are expected to default to python 3
if args.docker.endswith(':default'):
python_version = 3 # docker images tagged 'default' are expected to default to python 3
if args.python: # specifying a numeric --python option overrides the default python
if args.python.startswith('3'):
python_version = 3
elif args.python.startswith('2'):
python_version = 2
skip = 'skip/python%d/' % python_version
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
return exclude
@ -1359,9 +1351,18 @@ def get_integration_remote_filter(args, targets):
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not yet supported on %s: %s'
display.warning('Excluding tests marked "%s" which are not supported on %s: %s'
% (skip.rstrip('/'), platform, ', '.join(skipped)))
python_version = 2 # remotes are expected to default to python 2
skip = 'skip/python%d/' % python_version
skipped = [target.name for target in targets if skip in target.aliases]
if skipped:
exclude.append(skip)
display.warning('Excluding tests marked "%s" which are not supported on python %d: %s'
% (skip.rstrip('/'), python_version, ', '.join(skipped)))
return exclude

View file

@ -14,6 +14,7 @@ from lib.util import (
SubprocessError,
ApplicationError,
run_command,
intercept_command,
)
from lib.core_ci import (
@ -51,7 +52,7 @@ class ManageWindowsCI(object):
for _ in range(1, 120):
try:
run_command(self.core_ci.args, cmd, env=env)
intercept_command(self.core_ci.args, cmd, 'ping', env=env)
return
except SubprocessError:
sleep(10)
@ -93,7 +94,7 @@ class ManageNetworkCI(object):
for _ in range(1, 90):
try:
run_command(self.core_ci.args, cmd, env=env)
intercept_command(self.core_ci.args, cmd, 'ping', env=env)
return
except SubprocessError:
sleep(10)
@ -161,7 +162,7 @@ class ManagePosixCI(object):
remote_source_path = os.path.join(remote_source_dir, os.path.basename(local_source_fd.name))
if not self.core_ci.args.explain:
lib.pytar.create_tarfile(local_source_fd.name, '.', lib.pytar.ignore)
lib.pytar.create_tarfile(local_source_fd.name, '.', lib.pytar.DefaultTarFilter())
self.upload(local_source_fd.name, remote_source_dir)
self.ssh('rm -rf ~/ansible && mkdir ~/ansible && cd ~/ansible && tar oxzf %s' % remote_source_path)

View file

@ -20,6 +20,7 @@ class Metadata(object):
"""Initialize metadata."""
self.changes = {} # type: dict [str, tuple[tuple[int, int]]
self.cloud_config = None # type: dict [str, str]
self.instance_config = None # type: list[dict[str, str]]
if is_shippable():
self.ci_provider = 'shippable'
@ -54,6 +55,7 @@ class Metadata(object):
return dict(
changes=self.changes,
cloud_config=self.cloud_config,
instance_config=self.instance_config,
ci_provider=self.ci_provider,
)
@ -88,6 +90,7 @@ class Metadata(object):
metadata = Metadata()
metadata.changes = data['changes']
metadata.cloud_config = data['cloud_config']
metadata.instance_config = data['instance_config']
metadata.ci_provider = data['ci_provider']
return metadata

View file

@ -2,76 +2,103 @@
from __future__ import absolute_import, print_function
import abc
import tarfile
import os
from lib.util import (
display,
ABC,
)
# improve performance by disabling uid/gid lookups
tarfile.pwd = None
tarfile.grp = None
# To reduce archive time and size, ignore non-versioned files which are large or numerous.
# Also ignore miscellaneous git related files since the .git directory is ignored.
IGNORE_DIRS = (
'.tox',
'.git',
'.idea',
'__pycache__',
'ansible.egg-info',
)
IGNORE_FILES = (
'.gitignore',
'.gitdir',
)
IGNORE_EXTENSIONS = (
'.pyc',
'.retry',
)
class TarFilter(ABC):
"""Filter to use when creating a tar file."""
@abc.abstractmethod
def ignore(self, item):
"""
:type item: tarfile.TarInfo
:rtype: tarfile.TarInfo | None
"""
pass
def ignore(item):
class DefaultTarFilter(TarFilter):
"""
:type item: tarfile.TarInfo
:rtype: tarfile.TarInfo | None
To reduce archive time and size, ignore non-versioned files which are large or numerous.
Also ignore miscellaneous git related files since the .git directory is ignored.
"""
filename = os.path.basename(item.path)
name, ext = os.path.splitext(filename)
dirs = os.path.split(item.path)
def __init__(self):
self.ignore_dirs = (
'.tox',
'.git',
'.idea',
'__pycache__',
'ansible.egg-info',
)
if not item.isdir():
if item.path.startswith('./test/results/'):
self.ignore_files = (
'.gitignore',
'.gitdir',
)
self.ignore_extensions = (
'.pyc',
'.retry',
)
def ignore(self, item):
"""
:type item: tarfile.TarInfo
:rtype: tarfile.TarInfo | None
"""
filename = os.path.basename(item.path)
name, ext = os.path.splitext(filename)
dirs = os.path.split(item.path)
if not item.isdir():
if item.path.startswith('./test/results/'):
return None
if item.path.startswith('./docs/docsite/_build/'):
return None
if name in self.ignore_files:
return None
if item.path.startswith('./docs/docsite/_build/'):
if ext in self.ignore_extensions:
return None
if name in IGNORE_FILES:
return None
if any(d in self.ignore_dirs for d in dirs):
return None
if ext in IGNORE_EXTENSIONS:
return None
return item
if any(d in IGNORE_DIRS for d in dirs):
return None
return item
class AllowGitTarFilter(DefaultTarFilter):
"""
Filter that allows git related files normally excluded by the default tar filter.
"""
def __init__(self):
super(AllowGitTarFilter, self).__init__()
self.ignore_dirs = tuple(d for d in self.ignore_dirs if not d.startswith('.git'))
self.ignore_files = tuple(f for f in self.ignore_files if not f.startswith('.git'))
def create_tarfile(dst_path, src_path, tar_filter):
"""
:type dst_path: str
:type src_path: str
:type tar_filter: (tarfile.TarInfo) -> tarfile.TarInfo | None
:type tar_filter: TarFilter
"""
display.info('Creating a compressed tar archive of path: %s' % src_path, verbosity=1)
with tarfile.TarFile.gzopen(dst_path, mode='w', compresslevel=4) as tar:
tar.add(src_path, filter=tar_filter)
tar.add(src_path, filter=tar_filter.ignore)
display.info('Resulting archive is %d bytes.' % os.path.getsize(dst_path), verbosity=1)

View file

@ -86,7 +86,7 @@ def command_sanity(args):
versions = (None,)
for version in versions:
if args.python and version and version != args.python:
if args.python and version and version != args.python_version:
continue
display.info('Sanity check using %s%s' % (test.name, ' with Python %s' % version if version else ''))

View file

@ -11,16 +11,13 @@ from lib.sanity import (
from lib.util import (
SubprocessError,
display,
intercept_command,
)
from lib.ansible_util import (
ansible_environment,
)
from lib.executor import (
intercept_command,
)
from lib.config import (
SanityConfig,
)

View file

@ -15,6 +15,7 @@ from lib.sanity import (
from lib.util import (
SubprocessError,
run_command,
intercept_command,
remove_tree,
)
@ -23,7 +24,6 @@ from lib.ansible_util import (
)
from lib.executor import (
intercept_command,
generate_pip_install,
)
@ -83,7 +83,7 @@ class ImportTest(SanityMultipleVersion):
# make sure coverage is available in the virtual environment if needed
if args.coverage:
run_command(args, generate_pip_install('sanity.import', packages=['coverage']), env=env)
run_command(args, generate_pip_install('pip', 'sanity.import', packages=['coverage']), env=env)
run_command(args, ['pip', 'uninstall', '--disable-pip-version-check', '-y', 'pip'], env=env)
cmd = ['importer.py'] + paths

View file

@ -15,6 +15,7 @@ from lib.util import (
SubprocessError,
display,
run_command,
find_executable,
)
from lib.config import (
@ -55,7 +56,8 @@ class Pep8Test(SanitySingleVersion):
paths = sorted(i.path for i in targets.include if (os.path.splitext(i.path)[1] == '.py' or i.path.startswith('bin/')) and i.path not in skip_paths_set)
cmd = [
'pycodestyle',
'python%s' % args.python_version,
find_executable('pycodestyle'),
'--max-line-length', '160',
'--config', '/dev/null',
'--ignore', ','.join(sorted(current_ignore)),

View file

@ -3,6 +3,7 @@ from __future__ import absolute_import, print_function
import json
import os
import datetime
from lib.sanity import (
SanitySingleVersion,
@ -16,6 +17,7 @@ from lib.util import (
SubprocessError,
run_command,
display,
find_executable,
)
from lib.ansible_util import (
@ -52,51 +54,54 @@ class PylintTest(SanitySingleVersion):
with open(PYLINT_SKIP_PATH, 'r') as skip_fd:
skip_paths = skip_fd.read().splitlines()
with open('test/sanity/pylint/disable.txt', 'r') as disable_fd:
disable = set(c for c in disable_fd.read().splitlines() if not c.strip().startswith('#'))
with open('test/sanity/pylint/enable.txt', 'r') as enable_fd:
enable = set(c for c in enable_fd.read().splitlines() if not c.strip().startswith('#'))
skip_paths_set = set(skip_paths)
paths = sorted(i.path for i in targets.include if (os.path.splitext(i.path)[1] == '.py' or i.path.startswith('bin/')) and i.path not in skip_paths_set)
cmd = [
'pylint',
'--jobs', '0',
'--reports', 'n',
'--max-line-length', '160',
'--rcfile', '/dev/null',
'--ignored-modules', '_MovedItems',
'--output-format', 'json',
'--disable', ','.join(sorted(disable)),
'--enable', ','.join(sorted(enable)),
] + paths
contexts = {}
remaining_paths = set(paths)
env = ansible_environment(args)
def add_context(available_paths, context_name, context_filter):
"""
:type available_paths: set[str]
:type context_name: str
:type context_filter: (str) -> bool
"""
filtered_paths = set(p for p in available_paths if context_filter(p))
contexts[context_name] = sorted(filtered_paths)
available_paths -= filtered_paths
if paths:
try:
stdout, stderr = run_command(args, cmd, env=env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
add_context(remaining_paths, 'ansible-test', lambda p: p.startswith('test/runner/'))
add_context(remaining_paths, 'units', lambda p: p.startswith('test/units/'))
add_context(remaining_paths, 'test', lambda p: p.startswith('test/'))
add_context(remaining_paths, 'hacking', lambda p: p.startswith('hacking/'))
add_context(remaining_paths, 'modules', lambda p: p.startswith('lib/ansible/modules/'))
add_context(remaining_paths, 'module_utils', lambda p: p.startswith('lib/ansible/module_utils/'))
add_context(remaining_paths, 'ansible', lambda p: True)
if stderr or status >= 32:
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
else:
stdout = None
messages = []
context_times = []
if args.explain:
return SanitySuccess(self.name)
test_start = datetime.datetime.utcnow()
if stdout:
messages = json.loads(stdout)
else:
messages = []
for context in sorted(contexts):
context_paths = contexts[context]
if not context_paths:
continue
context_start = datetime.datetime.utcnow()
messages += self.pylint(args, context, context_paths)
context_end = datetime.datetime.utcnow()
context_times.append('%s: %d (%s)' % (context, len(context_paths), context_end - context_start))
test_end = datetime.datetime.utcnow()
for context_time in context_times:
display.info(context_time, verbosity=4)
display.info('total: %d (%s)' % (len(paths), test_end - test_start), verbosity=4)
errors = [SanityMessage(
message=m['message'].replace('\n', ' '),
@ -127,3 +132,48 @@ class PylintTest(SanitySingleVersion):
return SanityFailure(self.name, messages=errors)
return SanitySuccess(self.name)
def pylint(self, args, context, paths):
"""
:type args: SanityConfig
:param context: str
:param paths: list[str]
:return: list[dict[str, str]]
"""
rcfile = 'test/sanity/pylint/config/%s' % context
if not os.path.exists(rcfile):
rcfile = 'test/sanity/pylint/config/default'
cmd = [
'python%s' % args.python_version,
find_executable('pylint'),
'--jobs', '0',
'--reports', 'n',
'--max-line-length', '160',
'--rcfile', rcfile,
'--output-format', 'json',
] + paths
env = ansible_environment(args)
if paths:
try:
stdout, stderr = run_command(args, cmd, env=env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr or status >= 32:
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
else:
stdout = None
if not args.explain and stdout:
messages = json.loads(stdout)
else:
messages = []
return messages

View file

@ -15,6 +15,7 @@ from lib.util import (
SubprocessError,
run_command,
parse_to_dict,
find_executable,
)
from lib.config import (
@ -39,7 +40,8 @@ class RstcheckTest(SanitySingleVersion):
return SanitySkipped(self.name)
cmd = [
'rstcheck',
'python%s' % args.python_version,
find_executable('rstcheck'),
'--report', 'warning',
'--ignore-substitutions', ','.join(ignore_substitutions),
] + paths

View file

@ -44,6 +44,7 @@ class ValidateModulesTest(SanitySingleVersion):
return SanitySkipped(self.name)
cmd = [
'python%s' % args.python_version,
'test/sanity/validate-modules/validate-modules',
'--format', 'json',
] + paths

View file

@ -15,6 +15,7 @@ from lib.sanity import (
from lib.util import (
SubprocessError,
run_command,
find_executable,
)
from lib.config import (
@ -36,7 +37,8 @@ class YamllintTest(SanitySingleVersion):
return SanitySkipped(self.name)
cmd = [
'yamllint',
'python%s' % args.python_version,
find_executable('yamllint'),
'--format', 'parsable',
] + paths

View file

@ -2,15 +2,22 @@
from __future__ import absolute_import, print_function
import atexit
import errno
import filecmp
import inspect
import json
import os
import pipes
import pkgutil
import shutil
import subprocess
import random
import re
import shutil
import stat
import string
import subprocess
import sys
import tempfile
import time
try:
@ -19,6 +26,23 @@ except ImportError:
from abc import ABCMeta
ABC = ABCMeta('ABC', (), {})
DOCKER_COMPLETION = {}
coverage_path = '' # pylint: disable=locally-disabled, invalid-name
def get_docker_completion():
"""
:rtype: dict[str, str]
"""
if not DOCKER_COMPLETION:
with open('test/runner/completion/docker.txt', 'r') as completion_fd:
images = completion_fd.read().splitlines()
DOCKER_COMPLETION.update(dict((i.split('@')[0], i) for i in images))
return DOCKER_COMPLETION
def is_shippable():
"""
@ -35,6 +59,51 @@ def remove_file(path):
os.remove(path)
def find_pip(path=None, version=None):
"""
:type path: str | None
:type version: str | None
:rtype: str
"""
if version:
version_info = version.split('.')
python_bin = find_executable('python%s' % version, path=path)
else:
version_info = sys.version_info
python_bin = sys.executable
choices = (
'pip%s' % '.'.join(str(i) for i in version_info[:2]),
'pip%s' % version_info[0],
'pip',
)
pip = None
for choice in choices:
pip = find_executable(choice, required=False, path=path)
if pip:
break
if not pip:
raise ApplicationError('Required program not found: %s' % ', '.join(choices))
with open(pip) as pip_fd:
shebang = pip_fd.readline().strip()
if not shebang.startswith('#!') or ' ' in shebang:
raise ApplicationError('Unexpected shebang in "%s": %s' % (pip, shebang))
our_python = os.path.realpath(python_bin)
pip_python = os.path.realpath(shebang[2:])
if our_python != pip_python and not filecmp.cmp(our_python, pip_python, False):
raise ApplicationError('Current interpreter "%s" does not match "%s" interpreter "%s".' % (our_python, pip, pip_python))
return pip
def find_executable(executable, cwd=None, path=None, required=True):
"""
:type executable: str
@ -87,6 +156,104 @@ def find_executable(executable, cwd=None, path=None, required=True):
return match
def intercept_command(args, cmd, target_name, capture=False, env=None, data=None, cwd=None, python_version=None, path=None):
"""
:type args: TestConfig
:type cmd: collections.Iterable[str]
:type target_name: str
:type capture: bool
:type env: dict[str, str] | None
:type data: str | None
:type cwd: str | None
:type python_version: str | None
:type path: str | None
:rtype: str | None, str | None
"""
if not env:
env = common_environment()
cmd = list(cmd)
inject_path = get_coverage_path(args)
config_path = os.path.join(inject_path, 'injector.json')
version = python_version or args.python_version
interpreter = find_executable('python%s' % version, path=path)
coverage_file = os.path.abspath(os.path.join(inject_path, '..', 'output', '%s=%s=%s=%s=coverage' % (
args.command, target_name, args.coverage_label or 'local-%s' % version, 'python-%s' % version)))
env['PATH'] = inject_path + os.pathsep + env['PATH']
env['ANSIBLE_TEST_PYTHON_VERSION'] = version
env['ANSIBLE_TEST_PYTHON_INTERPRETER'] = interpreter
config = dict(
python_interpreter=interpreter,
coverage_file=coverage_file if args.coverage else None,
)
if not args.explain:
with open(config_path, 'w') as config_fd:
json.dump(config, config_fd, indent=4, sort_keys=True)
return run_command(args, cmd, capture=capture, env=env, data=data, cwd=cwd)
def get_coverage_path(args):
"""
:type args: TestConfig
:rtype: str
"""
global coverage_path # pylint: disable=locally-disabled, global-statement, invalid-name
if coverage_path:
return os.path.join(coverage_path, 'coverage')
prefix = 'ansible-test-coverage-'
tmp_dir = '/tmp'
if args.explain:
return os.path.join(tmp_dir, '%stmp' % prefix, 'coverage')
src = os.path.abspath(os.path.join(os.getcwd(), 'test/runner/injector/'))
coverage_path = tempfile.mkdtemp('', prefix, dir=tmp_dir)
os.chmod(coverage_path, stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
shutil.copytree(src, os.path.join(coverage_path, 'coverage'))
shutil.copy('.coveragerc', os.path.join(coverage_path, 'coverage', '.coveragerc'))
for root, dir_names, file_names in os.walk(coverage_path):
for name in dir_names + file_names:
os.chmod(os.path.join(root, name), stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
for directory in 'output', 'logs':
os.mkdir(os.path.join(coverage_path, directory))
os.chmod(os.path.join(coverage_path, directory), stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
atexit.register(cleanup_coverage_dir)
return os.path.join(coverage_path, 'coverage')
def cleanup_coverage_dir():
"""Copy over coverage data from temporary directory and purge temporary directory."""
output_dir = os.path.join(coverage_path, 'output')
for filename in os.listdir(output_dir):
src = os.path.join(output_dir, filename)
dst = os.path.join(os.getcwd(), 'test', 'results', 'coverage')
shutil.copy(src, dst)
logs_dir = os.path.join(coverage_path, 'logs')
for filename in os.listdir(logs_dir):
random_suffix = ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(8))
new_name = '%s.%s.log' % (os.path.splitext(os.path.basename(filename))[0], random_suffix)
src = os.path.join(logs_dir, filename)
dst = os.path.join(os.getcwd(), 'test', 'results', 'logs', new_name)
shutil.copy(src, dst)
shutil.rmtree(coverage_path)
def run_command(args, cmd, capture=False, env=None, data=None, cwd=None, always=False, stdin=None, stdout=None,
cmd_verbosity=1, str_errors='strict'):
"""
@ -459,6 +626,8 @@ def docker_qualify_image(name):
if not name or any((c in name) for c in ('/', ':')):
return name
name = get_docker_completion().get(name, name)
return 'ansible/ansible:%s' % name