Initial ansible-test implementation. (#18556)

This commit is contained in:
Matt Clay 2016-11-29 21:21:53 -08:00 committed by GitHub
commit 6bbd92e422
191 changed files with 5483 additions and 48 deletions

1
test/runner/ansible-test Symbolic link
View file

@ -0,0 +1 @@
test.py

View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

1
test/runner/injector/cover Symbolic link
View file

@ -0,0 +1 @@
injector.py

1
test/runner/injector/cover2 Symbolic link
View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

1
test/runner/injector/cover3 Symbolic link
View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

184
test/runner/injector/injector.py Executable file
View file

@ -0,0 +1,184 @@
#!/usr/bin/env python
"""Code coverage wrapper."""
from __future__ import absolute_import, print_function
import errno
import os
import sys
import pipes
import logging
import getpass
logger = logging.getLogger('injector') # pylint: disable=locally-disabled, invalid-name
def main():
"""Main entry point."""
formatter = logging.Formatter('%(asctime)s ' + str(os.getpid()) + ' %(levelname)s %(message)s')
log_name = 'ansible-test-coverage.%s.log' % getpass.getuser()
self_dir = os.path.dirname(os.path.abspath(__file__))
handler = logging.FileHandler(os.path.join('/tmp', log_name))
handler.setFormatter(formatter)
logger.addHandler(handler)
handler = logging.FileHandler(os.path.abspath(os.path.join(self_dir, '..', 'logs', log_name)))
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
try:
logger.debug('Self: %s', __file__)
logger.debug('Arguments: %s', ' '.join(pipes.quote(c) for c in sys.argv))
if os.path.basename(__file__).startswith('runner'):
args, env = runner()
elif os.path.basename(__file__).startswith('cover'):
args, env = cover()
else:
args, env = injector()
logger.debug('Run command: %s', ' '.join(pipes.quote(c) for c in args))
try:
cwd = os.getcwd()
except OSError as ex:
if ex.errno != errno.EACCES:
raise
cwd = None
logger.debug('Working directory: %s', cwd or '?')
for key in sorted(env.keys()):
logger.debug('%s=%s', key, env[key])
os.execvpe(args[0], args, env)
except Exception as ex:
logger.fatal(ex)
raise
def injector():
"""
:rtype: list[str], dict[str, str]
"""
self_dir = os.path.dirname(os.path.abspath(__file__))
command = os.path.basename(__file__)
mode = os.environ.get('ANSIBLE_TEST_COVERAGE')
version = os.environ.get('ANSIBLE_TEST_PYTHON_VERSION', '')
executable = find_executable(command)
if mode in ('coverage', 'version'):
if mode == 'coverage':
args, env = coverage_command(self_dir, version)
args += [executable]
tool = 'cover'
else:
interpreter = find_executable('python' + version)
args, env = [interpreter, executable], os.environ.copy()
tool = 'runner'
if command in ('ansible', 'ansible-playbook', 'ansible-pull'):
interpreter = find_executable(tool + version)
args += ['--extra-vars', 'ansible_python_interpreter=' + interpreter]
else:
args, env = [executable], os.environ.copy()
args += sys.argv[1:]
return args, env
def runner():
"""
:rtype: list[str], dict[str, str]
"""
command = os.path.basename(__file__)
version = command.replace('runner', '')
interpreter = find_executable('python' + version)
args, env = [interpreter], os.environ.copy()
args += sys.argv[1:]
return args, env
def cover():
"""
:rtype: list[str], dict[str, str]
"""
self_dir = os.path.dirname(os.path.abspath(__file__))
command = os.path.basename(__file__)
version = command.replace('cover', '')
if len(sys.argv) > 1:
executable = sys.argv[1]
else:
executable = ''
if os.path.basename(executable).startswith('ansible_module_'):
args, env = coverage_command(self_dir, version)
else:
interpreter = find_executable('python' + version)
args, env = [interpreter], os.environ.copy()
args += sys.argv[1:]
return args, env
def coverage_command(self_dir, version):
"""
:type self_dir: str
:type version: str
:rtype: list[str], dict[str, str]
"""
executable = 'coverage'
if version:
executable += '-%s' % version
args = [
find_executable(executable),
'run',
'--append',
'--rcfile',
os.path.join(self_dir, '.coveragerc'),
]
env = os.environ.copy()
env['COVERAGE_FILE'] = os.path.abspath(os.path.join(self_dir, '..', 'output', 'coverage'))
return args, env
def find_executable(executable):
"""
:type executable: str
:rtype: str
"""
self = os.path.abspath(__file__)
path = os.environ.get('PATH', os.defpath)
seen_dirs = set()
for path_dir in path.split(os.pathsep):
if path_dir in seen_dirs:
continue
seen_dirs.add(path_dir)
candidate = os.path.abspath(os.path.join(path_dir, executable))
if candidate == self:
continue
if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK):
return candidate
raise Exception('Executable "%s" not found in path: %s' % (executable, path))
if __name__ == '__main__':
main()

1
test/runner/injector/pytest Symbolic link
View file

@ -0,0 +1 @@
injector.py

1
test/runner/injector/runner Symbolic link
View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

View file

@ -0,0 +1 @@
injector.py

View file

View file

@ -0,0 +1,34 @@
"""Miscellaneous utility functions and classes specific to ansible cli tools."""
from __future__ import absolute_import, print_function
import os
from lib.util import common_environment
def ansible_environment(args):
"""
:type args: CommonConfig
:rtype: dict[str, str]
"""
env = common_environment()
path = env['PATH']
ansible_path = os.path.join(os.getcwd(), 'bin')
if not path.startswith(ansible_path + os.pathsep):
path = ansible_path + os.pathsep + path
ansible = dict(
ANSIBLE_FORCE_COLOR='%s' % 'true' if args.color else 'false',
ANSIBLE_DEPRECATION_WARNINGS='false',
ANSIBLE_CONFIG='/dev/null',
PYTHONPATH=os.path.abspath('lib'),
PAGER='/bin/cat',
PATH=path,
)
env.update(ansible)
return env

165
test/runner/lib/changes.py Normal file
View file

@ -0,0 +1,165 @@
"""Detect changes in Ansible code."""
from __future__ import absolute_import, print_function
import re
import os
from lib.util import (
ApplicationError,
SubprocessError,
MissingEnvironmentVariable,
CommonConfig,
)
from lib.http import (
HttpClient,
urlencode,
)
from lib.git import (
Git,
)
class InvalidBranch(ApplicationError):
"""Exception for invalid branch specification."""
def __init__(self, branch, reason):
"""
:type branch: str
:type reason: str
"""
message = 'Invalid branch: %s\n%s' % (branch, reason)
super(InvalidBranch, self).__init__(message)
self.branch = branch
class ChangeDetectionNotSupported(ApplicationError):
"""Exception for cases where change detection is not supported."""
def __init__(self, message):
"""
:type message: str
"""
super(ChangeDetectionNotSupported, self).__init__(message)
class ShippableChanges(object):
"""Change information for Shippable build."""
def __init__(self, args, git):
"""
:type args: CommonConfig
:type git: Git
"""
self.args = args
try:
self.branch = os.environ['BRANCH']
self.is_pr = os.environ['IS_PULL_REQUEST'] == 'true'
self.is_tag = os.environ['IS_GIT_TAG'] == 'true'
self.commit = os.environ['COMMIT']
self.project_id = os.environ['PROJECT_ID']
except KeyError as ex:
raise MissingEnvironmentVariable(name=ex.args[0])
if self.is_tag:
raise ChangeDetectionNotSupported('Change detection is not supported for tags.')
if self.is_pr:
self.paths = sorted(git.get_diff_names([self.branch]))
else:
merge_runs = self.get_merge_runs(self.project_id, self.branch)
last_successful_commit = self.get_last_successful_commit(merge_runs)
self.paths = sorted(git.get_diff_names([last_successful_commit, self.commit]))
def get_merge_runs(self, project_id, branch):
"""
:type project_id: str
:type branch: str
:rtype: list[dict]
"""
params = dict(
isPullRequest='false',
projectIds=project_id,
branch=branch,
)
client = HttpClient(self.args, always=True)
response = client.get('https://api.shippable.com/runs?%s' % urlencode(params))
return response.json()
@staticmethod
def get_last_successful_commit(merge_runs):
"""
:type merge_runs: list[dict]
:rtype: str
"""
merge_runs = sorted(merge_runs, key=lambda r: r['createdAt'])
known_commits = set()
last_successful_commit = None
for merge_run in merge_runs:
commit_sha = merge_run['commitSha']
if commit_sha not in known_commits:
known_commits.add(commit_sha)
if merge_run['statusCode'] == 30:
last_successful_commit = commit_sha
return last_successful_commit
class LocalChanges(object):
"""Change information for local work."""
def __init__(self, args, git):
"""
:type args: CommonConfig
:type git: Git
"""
self.args = args
self.current_branch = git.get_branch()
if self.is_official_branch(self.current_branch):
raise InvalidBranch(branch=self.current_branch,
reason='Current branch is not a feature branch.')
self.fork_branch = None
self.fork_point = None
self.local_branches = sorted(git.get_branches())
self.official_branches = sorted([b for b in self.local_branches if self.is_official_branch(b)])
for self.fork_branch in self.official_branches:
try:
self.fork_point = git.get_branch_fork_point(self.fork_branch)
break
except SubprocessError:
pass
if self.fork_point is None:
raise ApplicationError('Unable to auto-detect fork branch and fork point.')
# tracked files (including unchanged)
self.tracked = sorted(git.get_file_names(['--cached']))
# untracked files (except ignored)
self.untracked = sorted(git.get_file_names(['--others', '--exclude-standard']))
# tracked changes (including deletions) committed since the branch was forked
self.committed = sorted(git.get_diff_names([self.fork_point, 'HEAD']))
# tracked changes (including deletions) which are staged
self.staged = sorted(git.get_diff_names(['--cached']))
# tracked changes (including deletions) which are not staged
self.unstaged = sorted(git.get_diff_names([]))
@staticmethod
def is_official_branch(name):
"""
:type name: str
:rtype: bool
"""
if name == 'devel':
return True
if re.match(r'^stable-[0-9]+\.[0-9]+$', name):
return True
return False

View file

@ -0,0 +1,326 @@
"""Classify changes in Ansible code."""
from __future__ import absolute_import, print_function
import os
from lib.target import (
walk_module_targets,
walk_integration_targets,
walk_units_targets,
walk_compile_targets,
)
from lib.util import (
display,
)
def categorize_changes(paths, verbose_command=None):
"""
:type paths: list[str]
:type verbose_command: str
:rtype paths: dict[str, list[str]]
"""
mapper = PathMapper()
commands = {
'sanity': set(),
'compile': set(),
'units': set(),
'integration': set(),
'windows-integration': set(),
'network-integration': set(),
}
display.info('Mapping %d changed file(s) to tests.' % len(paths))
for path in paths:
tests = mapper.classify(path)
if tests is None:
display.info('%s -> all' % path, verbosity=1)
tests = all_tests() # not categorized, run all tests
display.warning('Path not categorized: %s' % path)
else:
tests = dict((key, value) for key, value in tests.items() if value)
if verbose_command:
result = '%s: %s' % (verbose_command, tests.get(verbose_command) or 'none')
# identify targeted integration tests (those which only target a single integration command)
if 'integration' in verbose_command and tests.get(verbose_command):
if not any('integration' in command for command in tests.keys() if command != verbose_command):
result += ' (targeted)'
else:
result = '%s' % tests
display.info('%s -> %s' % (path, result), verbosity=1)
for command, target in tests.items():
commands[command].add(target)
for command in commands:
if any(t == 'all' for t in commands[command]):
commands[command] = set(['all'])
commands = dict((c, sorted(commands[c])) for c in commands.keys() if commands[c])
return commands
class PathMapper(object):
"""Map file paths to test commands and targets."""
def __init__(self):
self.integration_targets = list(walk_integration_targets())
self.module_targets = list(walk_module_targets())
self.compile_targets = list(walk_compile_targets())
self.units_targets = list(walk_units_targets())
self.compile_paths = set(t.path for t in self.compile_targets)
self.units_modules = set(t.module for t in self.units_targets if t.module)
self.units_paths = set(t.path for t in self.units_targets)
self.module_names_by_path = dict((t.path, t.module) for t in self.module_targets)
self.integration_targets_by_name = dict((t.name, t) for t in self.integration_targets)
self.posix_integration_by_module = dict((m, t.name) for t in self.integration_targets
if 'posix/' in t.aliases for m in t.modules)
self.windows_integration_by_module = dict((m, t.name) for t in self.integration_targets
if 'windows/' in t.aliases for m in t.modules)
self.network_integration_by_module = dict((m, t.name) for t in self.integration_targets
if 'network/' in t.aliases for m in t.modules)
def classify(self, path):
"""
:type path: str
:rtype: dict[str, str] | None
"""
result = self._classify(path)
# run all tests when no result given
if result is None:
return None
# compile path if eligible
if path in self.compile_paths:
result['compile'] = path
# run sanity on path unless result specified otherwise
if 'sanity' not in result:
result['sanity'] = path
return result
def _classify(self, path):
"""
:type path: str
:rtype: dict[str, str] | None
"""
filename = os.path.basename(path)
name, ext = os.path.splitext(filename)
minimal = {}
if path.startswith('.github/'):
return minimal
if path.startswith('bin/'):
return minimal
if path.startswith('contrib/'):
return {
'units': 'test/units/contrib/'
}
if path.startswith('docs/'):
return minimal
if path.startswith('docs-api/'):
return minimal
if path.startswith('docsite/'):
return minimal
if path.startswith('examples/'):
return minimal
if path.startswith('hacking/'):
return minimal
if path.startswith('lib/ansible/modules/'):
module = self.module_names_by_path.get(path)
if module:
return {
'units': module if module in self.units_modules else None,
'integration': self.posix_integration_by_module.get(module) if ext == '.py' else None,
'windows-integration': self.windows_integration_by_module.get(module) if ext == '.ps1' else None,
'network-integration': self.network_integration_by_module.get(module),
}
return minimal
if path.startswith('lib/ansible/module_utils/'):
if ext == '.ps1':
return {
'windows-integration': 'all',
}
if ext == '.py':
return {
'integration': 'all',
'network-integration': 'all',
'units': 'all',
}
if path.startswith('lib/ansible/plugins/connection/'):
if name == '__init__':
return {
'integration': 'all',
'windows-integration': 'all',
'network-integration': 'all',
'units': 'test/units/plugins/connection/',
}
if name == 'winrm':
return {
'windows-integration': 'all',
'units': 'test/units/plugins/connection/',
}
if name == 'local':
return {
'integration': 'all',
'network-integration': 'all',
'units': 'test/units/plugins/connections/',
}
if 'connection_%s' % name in self.integration_targets_by_name:
return {
'integration': 'connection_%s' % name,
}
return minimal
if path.startswith('lib/ansible/utils/module_docs_fragments/'):
return {
'sanity': 'all',
}
if path.startswith('lib/ansible/'):
return all_tests() # broad impact, run all tests
if path.startswith('packaging/'):
return minimal
if path.startswith('test/compile/'):
return {
'compile': 'all',
}
if path.startswith('test/results/'):
return minimal
if path.startswith('test/integration/roles/'):
return minimal
if path.startswith('test/integration/targets/'):
target = self.integration_targets_by_name[path.split('/')[3]]
if 'hidden/' in target.aliases:
return {
'integration': 'all',
'windows-integration': 'all',
'network-integration': 'all',
}
return {
'integration': target.name if 'posix/' in target.aliases else None,
'windows-integration': target.name if 'windows/' in target.aliases else None,
'network-integration': target.name if 'network/' in target.aliases else None,
}
if path.startswith('test/integration/'):
return {
'integration': 'all',
'windows-integration': 'all',
'network-integration': 'all',
}
if path.startswith('test/samples/'):
return minimal
if path.startswith('test/sanity/'):
return {
'sanity': 'all', # test infrastructure, run all sanity checks
}
if path.startswith('test/units/'):
if path in self.units_paths:
return {
'units': path,
}
return {
'units': os.path.dirname(path),
}
if path.startswith('test/runner/'):
return all_tests() # test infrastructure, run all tests
if path.startswith('test/utils/shippable/'):
return all_tests() # test infrastructure, run all tests
if path.startswith('test/utils/'):
return minimal
if path == 'test/README.md':
return minimal
if path.startswith('ticket_stubs/'):
return minimal
if '/' not in path:
if path in (
'.gitattributes',
'.gitignore',
'.gitmodules',
'.mailmap',
'tox.ini', # obsolete
'COPYING',
'VERSION',
'Makefile',
'setup.py',
):
return minimal
if path in (
'shippable.yml',
'.coveragerc',
):
return all_tests() # test infrastructure, run all tests
if path == '.yamllint':
return {
'sanity': 'all',
}
if ext in ('.md', '.rst', '.txt', '.xml', '.in'):
return minimal
return None # unknown, will result in fall-back to run all tests
def all_tests():
"""
:rtype: dict[str, str]
"""
return {
'sanity': 'all',
'compile': 'all',
'units': 'all',
'integration': 'all',
'windows-integration': 'all',
'network-integration': 'all',
}

340
test/runner/lib/core_ci.py Normal file
View file

@ -0,0 +1,340 @@
"""Access Ansible Core CI remote services."""
from __future__ import absolute_import, print_function
import json
import os
import traceback
import uuid
import errno
import time
from lib.http import (
HttpClient,
HttpResponse,
HttpError,
)
from lib.util import (
ApplicationError,
run_command,
make_dirs,
CommonConfig,
display,
is_shippable,
)
class AnsibleCoreCI(object):
"""Client for Ansible Core CI services."""
def __init__(self, args, platform, version, stage='prod', persist=True, name=None):
"""
:type args: CommonConfig
:type platform: str
:type version: str
:type stage: str
:type persist: bool
:type name: str
"""
self.args = args
self.platform = platform
self.version = version
self.stage = stage
self.client = HttpClient(args)
self.connection = None
self.instance_id = None
self.name = name if name else '%s-%s' % (self.platform, self.version)
if self.platform == 'windows':
self.ssh_key = None
self.endpoint = 'https://14blg63h2i.execute-api.us-east-1.amazonaws.com'
self.port = 5986
elif self.platform == 'freebsd':
self.ssh_key = SshKey(args)
self.endpoint = 'https://14blg63h2i.execute-api.us-east-1.amazonaws.com'
self.port = 22
elif self.platform == 'osx':
self.ssh_key = SshKey(args)
self.endpoint = 'https://osx.testing.ansible.com'
self.port = None
else:
raise ApplicationError('Unsupported platform: %s' % platform)
self.path = os.path.expanduser('~/.ansible/test/instances/%s-%s' % (self.name, self.stage))
if persist and self._load():
try:
display.info('Checking existing %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
self.connection = self.get()
display.info('Loaded existing %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
except HttpError as ex:
if ex.status != 404:
raise
self._clear()
display.info('Cleared stale %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
self.instance_id = None
else:
self.instance_id = None
self._clear()
if self.instance_id:
self.started = True
else:
self.started = False
self.instance_id = str(uuid.uuid4())
display.info('Initializing new %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
def start(self):
"""Start instance."""
if is_shippable():
self.start_shippable()
else:
self.start_remote()
def start_remote(self):
"""Start instance for remote development/testing."""
with open(os.path.expanduser('~/.ansible-core-ci.key'), 'r') as key_fd:
auth_key = key_fd.read().strip()
self._start(dict(
remote=dict(
key=auth_key,
nonce=None,
),
))
def start_shippable(self):
"""Start instance on Shippable."""
self._start(dict(
shippable=dict(
run_id=os.environ['SHIPPABLE_BUILD_ID'],
job_number=int(os.environ['SHIPPABLE_JOB_NUMBER']),
),
))
def stop(self):
"""Stop instance."""
if not self.started:
display.info('Skipping invalid %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
return
response = self.client.delete(self._uri)
if response.status_code == 404:
self._clear()
display.info('Cleared invalid %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
return
if response.status_code == 200:
self._clear()
display.info('Stopped running %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
return
raise self._create_http_error(response)
def get(self):
"""
Get instance connection information.
:rtype: InstanceConnection
"""
if not self.started:
display.info('Skipping invalid %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
return None
if self.connection and self.connection.running:
return self.connection
response = self.client.get(self._uri)
if response.status_code != 200:
raise self._create_http_error(response)
if self.args.explain:
self.connection = InstanceConnection(
running=True,
hostname='cloud.example.com',
port=self.port or 12345,
username='username',
password='password' if self.platform == 'windows' else None,
)
else:
response_json = response.json()
status = response_json['status']
con = response_json['connection']
self.connection = InstanceConnection(
running=status == 'running',
hostname=con['hostname'],
port=int(con.get('port', self.port)),
username=con['username'],
password=con.get('password'),
)
status = 'running' if self.connection.running else 'starting'
display.info('Retrieved %s %s/%s instance %s.' % (status, self.platform, self.version, self.instance_id),
verbosity=1)
return self.connection
def wait(self):
"""Wait for the instance to become ready."""
for _ in range(1, 90):
if self.get().running:
return
time.sleep(10)
raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
(self.platform, self.version, self.instance_id))
@property
def _uri(self):
return '%s/%s/jobs/%s' % (self.endpoint, self.stage, self.instance_id)
def _start(self, auth):
"""Start instance."""
if self.started:
display.info('Skipping started %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
return
data = dict(
config=dict(
platform=self.platform,
version=self.version,
public_key=self.ssh_key.pub_contents if self.ssh_key else None,
query=False,
)
)
data.update(dict(auth=auth))
headers = {
'Content-Type': 'application/json',
}
response = self.client.put(self._uri, data=json.dumps(data), headers=headers)
if response.status_code != 200:
raise self._create_http_error(response)
self.started = True
self._save()
display.info('Started %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
verbosity=1)
def _clear(self):
"""Clear instance information."""
try:
self.connection = None
os.remove(self.path)
except OSError as ex:
if ex.errno != errno.ENOENT:
raise
def _load(self):
"""Load instance information."""
try:
with open(self.path, 'r') as instance_fd:
self.instance_id = instance_fd.read()
self.started = True
except IOError as ex:
if ex.errno != errno.ENOENT:
raise
self.instance_id = None
return self.instance_id
def _save(self):
"""Save instance information."""
if self.args.explain:
return
make_dirs(os.path.dirname(self.path))
with open(self.path, 'w') as instance_fd:
instance_fd.write(self.instance_id)
@staticmethod
def _create_http_error(response):
"""
:type response: HttpResponse
:rtype: ApplicationError
"""
response_json = response.json()
stack_trace = ''
if 'message' in response_json:
message = response_json['message']
elif 'errorMessage' in response_json:
message = response_json['errorMessage'].strip()
if 'stackTrace' in response_json:
trace = '\n'.join([x.rstrip() for x in traceback.format_list(response_json['stackTrace'])])
stack_trace = ('\nTraceback (from remote server):\n%s' % trace)
else:
message = str(response_json)
return HttpError(response.status_code, '%s%s' % (message, stack_trace))
class SshKey(object):
"""Container for SSH key used to connect to remote instances."""
def __init__(self, args):
"""
:type args: CommonConfig
"""
tmp = os.path.expanduser('~/.ansible/test/')
self.key = os.path.join(tmp, 'id_rsa')
self.pub = os.path.join(tmp, 'id_rsa.pub')
if not os.path.isfile(self.pub):
if not args.explain:
make_dirs(tmp)
run_command(args, ['ssh-keygen', '-q', '-t', 'rsa', '-N', '', '-f', self.key])
if args.explain:
self.pub_contents = None
else:
with open(self.pub, 'r') as pub_fd:
self.pub_contents = pub_fd.read().strip()
class InstanceConnection(object):
"""Container for remote instance status and connection details."""
def __init__(self, running, hostname, port, username, password):
"""
:type running: bool
:type hostname: str
:type port: int
:type username: str
:type password: str | None
"""
self.running = running
self.hostname = hostname
self.port = port
self.username = username
self.password = password
def __str__(self):
if self.password:
return '%s:%s [%s:%s]' % (self.hostname, self.port, self.username, self.password)
return '%s:%s [%s]' % (self.hostname, self.port, self.username)

148
test/runner/lib/cover.py Normal file
View file

@ -0,0 +1,148 @@
"""Code coverage utilities."""
from __future__ import absolute_import, print_function
import os
import re
from lib.target import walk_module_targets
from lib.util import display, ApplicationError, run_command
from lib.executor import EnvironmentConfig, Delegate, install_command_requirements
COVERAGE_DIR = 'test/results/coverage'
COVERAGE_FILE = os.path.join(COVERAGE_DIR, 'coverage')
def command_coverage_combine(args):
"""Patch paths in coverage files and merge into a single file.
:type args: CoverageConfig
"""
coverage = initialize_coverage(args)
modules = dict((t.module, t.path) for t in list(walk_module_targets()))
coverage_files = [os.path.join(COVERAGE_DIR, f) for f in os.listdir(COVERAGE_DIR)
if f.startswith('coverage') and f != 'coverage']
arc_data = {}
ansible_path = os.path.abspath('lib/ansible/') + '/'
root_path = os.getcwd() + '/'
for coverage_file in coverage_files:
original = coverage.CoverageData()
if os.path.getsize(coverage_file) == 0:
display.warning('Empty coverage file: %s' % coverage_file)
continue
try:
original.read_file(coverage_file)
except Exception as ex: # pylint: disable=locally-disabled, broad-except
display.error(str(ex))
continue
for filename in original.measured_files():
arcs = original.arcs(filename)
if '/ansible_modlib.zip/ansible/' in filename:
new_name = re.sub('^.*/ansible_modlib.zip/ansible/', ansible_path, filename)
display.info('%s -> %s' % (filename, new_name), verbosity=3)
filename = new_name
elif '/ansible_module_' in filename:
module = re.sub('^.*/ansible_module_(?P<module>.*).py$', '\\g<module>', filename)
new_name = os.path.abspath(modules[module])
display.info('%s -> %s' % (filename, new_name), verbosity=3)
filename = new_name
elif filename.startswith('/root/ansible/'):
new_name = re.sub('^/.*?/ansible/', root_path, filename)
display.info('%s -> %s' % (filename, new_name), verbosity=3)
filename = new_name
if filename not in arc_data:
arc_data[filename] = []
arc_data[filename] += arcs
updated = coverage.CoverageData()
for filename in arc_data:
if not os.path.isfile(filename):
display.warning('Invalid coverage path: %s' % filename)
continue
updated.add_arcs({filename: arc_data[filename]})
if not args.explain:
updated.write_file(COVERAGE_FILE)
def command_coverage_report(args):
"""
:type args: CoverageConfig
"""
command_coverage_combine(args)
run_command(args, ['coverage', 'report'])
def command_coverage_html(args):
"""
:type args: CoverageConfig
"""
command_coverage_combine(args)
run_command(args, ['coverage', 'html', '-d', 'test/results/reports/coverage'])
def command_coverage_xml(args):
"""
:type args: CoverageConfig
"""
command_coverage_combine(args)
run_command(args, ['coverage', 'xml', '-o', 'test/results/reports/coverage.xml'])
def command_coverage_erase(args):
"""
:type args: CoverageConfig
"""
initialize_coverage(args)
for name in os.listdir(COVERAGE_DIR):
if not name.startswith('coverage'):
continue
path = os.path.join(COVERAGE_DIR, name)
if not args.explain:
os.remove(path)
def initialize_coverage(args):
"""
:type args: CoverageConfig
:rtype: coverage
"""
if args.delegate:
raise Delegate()
if args.requirements:
install_command_requirements(args)
try:
import coverage
except ImportError:
coverage = None
if not coverage:
raise ApplicationError('You must install the "coverage" python module to use this command.')
return coverage
class CoverageConfig(EnvironmentConfig):
"""Configuration for the coverage command."""
def __init__(self, args):
"""
:type args: any
"""
super(CoverageConfig, self).__init__(args, 'coverage')

View file

@ -0,0 +1,331 @@
"""Delegate test execution to another environment."""
from __future__ import absolute_import, print_function
import os
import sys
import lib.pytar
import lib.thread
from lib.executor import (
SUPPORTED_PYTHON_VERSIONS,
EnvironmentConfig,
IntegrationConfig,
ShellConfig,
TestConfig,
create_shell_command,
)
from lib.core_ci import (
AnsibleCoreCI,
)
from lib.manage_ci import (
ManagePosixCI,
)
from lib.util import (
ApplicationError,
run_command,
)
BUFFER_SIZE = 256 * 256
def delegate(args, exclude, require):
"""
:type args: EnvironmentConfig
:type exclude: list[str]
:type require: list[str]
"""
if args.tox:
delegate_tox(args, exclude, require)
return True
if args.docker:
delegate_docker(args, exclude, require)
return True
if args.remote:
delegate_remote(args, exclude, require)
return True
return False
def delegate_tox(args, exclude, require):
"""
:type args: EnvironmentConfig
:type exclude: list[str]
:type require: list[str]
"""
if args.python:
versions = args.python,
if args.python not in SUPPORTED_PYTHON_VERSIONS:
raise ApplicationError('tox does not support Python version %s' % args.python)
else:
versions = SUPPORTED_PYTHON_VERSIONS
options = {
'--tox': args.tox_args,
}
for version in versions:
tox = ['tox', '-c', 'test/runner/tox.ini', '-e', 'py' + version.replace('.', ''), '--']
cmd = generate_command(args, os.path.abspath('test/runner/test.py'), options, exclude, require)
if not args.python:
cmd += ['--python', version]
run_command(args, tox + cmd)
def delegate_docker(args, exclude, require):
"""
:type args: EnvironmentConfig
:type exclude: list[str]
:type require: list[str]
"""
util_image = args.docker_util
test_image = args.docker
privileged = args.docker_privileged
util_id = None
test_id = None
options = {
'--docker': 1,
'--docker-privileged': 0,
'--docker-util': 1,
}
cmd = generate_command(args, '/root/ansible/test/runner/test.py', options, exclude, require)
if isinstance(args, IntegrationConfig):
if not args.allow_destructive:
cmd.append('--allow-destructive')
if not args.explain:
lib.pytar.create_tarfile('/tmp/ansible.tgz', '.', lib.pytar.ignore)
try:
if util_image:
util_id, _ = run_command(args, [
'docker', 'run', '--detach',
util_image,
], capture=True)
if args.explain:
util_id = 'util_id'
else:
util_id = util_id.strip()
else:
util_id = None
test_cmd = [
'docker', 'run', '--detach',
'--volume', '/sys/fs/cgroup:/sys/fs/cgroup:ro',
'--privileged=%s' % str(privileged).lower(),
]
if util_id:
test_cmd += [
'--link', '%s:ansible.http.tests' % util_id,
'--link', '%s:sni1.ansible.http.tests' % util_id,
'--link', '%s:sni2.ansible.http.tests' % util_id,
'--link', '%s:fail.ansible.http.tests' % util_id,
'--env', 'HTTPTESTER=1',
]
test_id, _ = run_command(args, test_cmd + [test_image], capture=True)
if args.explain:
test_id = 'test_id'
else:
test_id = test_id.strip()
# write temporary files to /root since /tmp isn't ready immediately on container start
docker_put(args, test_id, 'test/runner/setup/docker.sh', '/root/docker.sh')
run_command(args,
['docker', 'exec', test_id, '/bin/bash', '/root/docker.sh'])
docker_put(args, test_id, '/tmp/ansible.tgz', '/root/ansible.tgz')
run_command(args,
['docker', 'exec', test_id, 'mkdir', '/root/ansible'])
run_command(args,
['docker', 'exec', test_id, 'tar', 'oxzf', '/root/ansible.tgz', '--directory', '/root/ansible'])
try:
command = ['docker', 'exec']
if isinstance(args, ShellConfig):
command.append('-it')
run_command(args, command + [test_id] + cmd)
finally:
run_command(args,
['docker', 'exec', test_id,
'tar', 'czf', '/root/results.tgz', '--directory', '/root/ansible/test', 'results'])
docker_get(args, test_id, '/root/results.tgz', '/tmp/results.tgz')
run_command(args,
['tar', 'oxzf', '/tmp/results.tgz', '-C', 'test'])
finally:
if util_id:
run_command(args,
['docker', 'rm', '-f', util_id],
capture=True)
if test_id:
run_command(args,
['docker', 'rm', '-f', test_id],
capture=True)
def docker_put(args, container_id, src, dst):
"""
:type args: EnvironmentConfig
:type container_id: str
:type src: str
:type dst: str
"""
# avoid 'docker cp' due to a bug which causes 'docker rm' to fail
cmd = ['docker', 'exec', '-i', container_id, 'dd', 'of=%s' % dst, 'bs=%s' % BUFFER_SIZE]
with open(src, 'rb') as src_fd:
run_command(args, cmd, stdin=src_fd, capture=True)
def docker_get(args, container_id, src, dst):
"""
:type args: EnvironmentConfig
:type container_id: str
:type src: str
:type dst: str
"""
# avoid 'docker cp' due to a bug which causes 'docker rm' to fail
cmd = ['docker', 'exec', '-i', container_id, 'dd', 'if=%s' % src, 'bs=%s' % BUFFER_SIZE]
with open(dst, 'wb') as dst_fd:
run_command(args, cmd, stdout=dst_fd, capture=True)
def delegate_remote(args, exclude, require):
"""
:type args: EnvironmentConfig
:type exclude: list[str]
:type require: list[str]
"""
parts = args.remote.split('/', 1)
platform = parts[0]
version = parts[1]
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage)
try:
core_ci.start()
core_ci.wait()
options = {
'--remote': 1,
}
cmd = generate_command(args, 'ansible/test/runner/test.py', options, exclude, require)
if isinstance(args, IntegrationConfig):
if not args.allow_destructive:
cmd.append('--allow-destructive')
manage = ManagePosixCI(core_ci)
manage.setup()
try:
manage.ssh(cmd)
finally:
manage.ssh('rm -rf /tmp/results && cp -a ansible/test/results /tmp/results')
manage.download('/tmp/results', 'test')
finally:
pass
def generate_command(args, path, options, exclude, require):
"""
:type args: EnvironmentConfig
:type path: str
:type options: dict[str, int]
:type exclude: list[str]
:type require: list[str]
:return: list[str]
"""
options['--color'] = 1
cmd = [path]
cmd += list(filter_options(args, sys.argv[1:], options, exclude, require))
cmd += ['--color', 'yes' if args.color else 'no']
if args.requirements:
cmd += ['--requirements']
if isinstance(args, ShellConfig):
cmd = create_shell_command(cmd)
return cmd
def filter_options(args, argv, options, exclude, require):
"""
:type args: EnvironmentConfig
:type argv: list[str]
:type options: dict[str, int]
:type exclude: list[str]
:type require: list[str]
:rtype: collections.Iterable[str]
"""
options = options.copy()
options['--requirements'] = 0
if isinstance(args, TestConfig):
options.update({
'--changed': 0,
'--tracked': 0,
'--untracked': 0,
'--ignore-committed': 0,
'--ignore-staged': 0,
'--ignore-unstaged': 0,
'--changed-from': 1,
'--changed-path': 1,
})
remaining = 0
for arg in argv:
if not arg.startswith('-') and remaining:
remaining -= 1
continue
remaining = 0
parts = arg.split('=', 1)
key = parts[0]
if key in options:
remaining = options[key] - len(parts) + 1
continue
yield arg
for target in exclude:
yield '--exclude'
yield target
for target in require:
yield '--require'
yield target

1253
test/runner/lib/executor.py Normal file

File diff suppressed because it is too large Load diff

76
test/runner/lib/git.py Normal file
View file

@ -0,0 +1,76 @@
"""Wrapper around git command-line tools."""
from __future__ import absolute_import, print_function
from lib.util import (
CommonConfig,
run_command,
)
class Git(object):
"""Wrapper around git command-line tools."""
def __init__(self, args):
"""
:type args: CommonConfig
"""
self.args = args
self.git = 'git'
def get_diff_names(self, args):
"""
:type args: list[str]
:rtype: list[str]
"""
cmd = ['diff', '--name-only', '--no-renames', '-z'] + args
return self.run_git_split(cmd, '\0')
def get_file_names(self, args):
"""
:type args: list[str]
:rtype: list[str]
"""
cmd = ['ls-files', '-z'] + args
return self.run_git_split(cmd, '\0')
def get_branches(self):
"""
:rtype: list[str]
"""
cmd = ['for-each-ref', 'refs/heads/', '--format', '%(refname:strip=2)']
return self.run_git_split(cmd)
def get_branch(self):
"""
:rtype: str
"""
cmd = ['symbolic-ref', '--short', 'HEAD']
return self.run_git(cmd).strip()
def get_branch_fork_point(self, branch):
"""
:type branch: str
:rtype: str
"""
cmd = ['merge-base', '--fork-point', branch]
return self.run_git(cmd).strip()
def run_git_split(self, cmd, separator=None):
"""
:type cmd: list[str]
:param separator: str | None
:rtype: list[str]
"""
output = self.run_git(cmd).strip(separator)
if len(output) == 0:
return []
return output.split(separator)
def run_git(self, cmd):
"""
:type cmd: list[str]
:rtype: str
"""
return run_command(self.args, [self.git] + cmd, capture=True, always=True)[0]

122
test/runner/lib/http.py Normal file
View file

@ -0,0 +1,122 @@
"""
Primitive replacement for requests to avoid extra dependency.
Avoids use of urllib2 due to lack of SNI support.
"""
from __future__ import absolute_import, print_function
import json
try:
from urllib import urlencode
except ImportError:
# noinspection PyCompatibility,PyUnresolvedReferences,PyUnresolvedReferences
from urllib.parse import urlencode # pylint: disable=locally-disabled, import-error, no-name-in-module
from lib.util import (
CommonConfig,
ApplicationError,
run_command,
)
class HttpClient(object):
"""Make HTTP requests via curl."""
def __init__(self, args, always=False):
"""
:type args: CommonConfig
:type always: bool
"""
self.args = args
self.always = always
def get(self, url):
"""
:type url: str
:rtype: HttpResponse
"""
return self.request('GET', url)
def delete(self, url):
"""
:type url: str
:rtype: HttpResponse
"""
return self.request('DELETE', url)
def put(self, url, data=None, headers=None):
"""
:type url: str
:type data: str | None
:type headers: dict[str, str] | None
:rtype: HttpResponse
"""
return self.request('PUT', url, data, headers)
def request(self, method, url, data=None, headers=None):
"""
:type method: str
:type url: str
:type data: str | None
:type headers: dict[str, str] | None
:rtype: HttpResponse
"""
cmd = ['curl', '-s', '-S', '-i', '-X', method]
if headers is None:
headers = {}
headers['Expect'] = '' # don't send expect continue header
for header in headers.keys():
cmd += ['-H', '%s: %s' % (header, headers[header])]
if data is not None:
cmd += ['-d', data]
cmd += [url]
stdout, _ = run_command(self.args, cmd, capture=True, always=self.always)
if self.args.explain and not self.always:
return HttpResponse(200, '')
header, body = stdout.split('\r\n\r\n', 1)
response_headers = header.split('\r\n')
first_line = response_headers[0]
http_response = first_line.split(' ')
status_code = int(http_response[1])
return HttpResponse(status_code, body)
class HttpResponse(object):
"""HTTP response from curl."""
def __init__(self, status_code, response):
"""
:type status_code: int
:type response: str
"""
self.status_code = status_code
self.response = response
def json(self):
"""
:rtype: any
"""
try:
return json.loads(self.response)
except ValueError:
raise HttpError(self.status_code, 'Cannot parse response as JSON:\n%s' % self.response)
class HttpError(ApplicationError):
"""HTTP response as an error."""
def __init__(self, status, message):
"""
:type status: int
:type message: str
"""
super(HttpError, self).__init__('%s: %s' % (status, message))
self.status = status

View file

@ -0,0 +1,142 @@
"""Access Ansible Core CI remote services."""
from __future__ import absolute_import, print_function
import pipes
from time import sleep
import lib.pytar
from lib.util import (
SubprocessError,
ApplicationError,
run_command,
)
from lib.core_ci import (
AnsibleCoreCI,
)
from lib.ansible_util import (
ansible_environment,
)
class ManageWindowsCI(object):
"""Manage access to a Windows instance provided by Ansible Core CI."""
def __init__(self, core_ci):
"""
:type core_ci: AnsibleCoreCI
"""
self.core_ci = core_ci
def wait(self):
"""Wait for instance to respond to ansible ping."""
extra_vars = [
'ansible_connection=winrm',
'ansible_host=%s' % self.core_ci.connection.hostname,
'ansible_user=%s' % self.core_ci.connection.username,
'ansible_password=%s' % self.core_ci.connection.password,
'ansible_port=%s' % self.core_ci.connection.port,
'ansible_winrm_server_cert_validation=ignore',
]
name = 'windows_%s' % self.core_ci.version
env = ansible_environment(self.core_ci.args)
cmd = ['ansible', '-m', 'win_ping', '-i', '%s,' % name, name, '-e', ' '.join(extra_vars)]
for _ in range(1, 90):
try:
run_command(self.core_ci.args, cmd, env=env)
return
except SubprocessError:
sleep(10)
continue
raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
(self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
class ManagePosixCI(object):
"""Manage access to a POSIX instance provided by Ansible Core CI."""
def __init__(self, core_ci):
"""
:type core_ci: AnsibleCoreCI
"""
self.core_ci = core_ci
self.ssh_args = ['-o', 'BatchMode=yes', '-o', 'StrictHostKeyChecking=no', '-i', self.core_ci.ssh_key.key]
if self.core_ci.platform == 'freebsd':
self.become = ['su', '-l', 'root', '-c']
elif self.core_ci.platform == 'osx':
self.become = ['sudo', '-in', 'PATH=/usr/local/bin:$PATH']
def setup(self):
"""Start instance and wait for it to become ready and respond to an ansible ping."""
self.wait()
self.configure()
self.upload_source()
def wait(self):
"""Wait for instance to respond to SSH."""
for _ in range(1, 90):
try:
self.ssh('id')
return
except SubprocessError:
sleep(10)
continue
raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
(self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
def configure(self):
"""Configure remote host for testing."""
self.upload('test/runner/setup/remote.sh', '/tmp')
self.ssh('chmod +x /tmp/remote.sh && /tmp/remote.sh %s' % self.core_ci.platform)
def upload_source(self):
"""Upload and extract source."""
if not self.core_ci.args.explain:
lib.pytar.create_tarfile('/tmp/ansible.tgz', '.', lib.pytar.ignore)
self.upload('/tmp/ansible.tgz', '/tmp')
self.ssh('rm -rf ~/ansible && mkdir ~/ansible && cd ~/ansible && tar oxzf /tmp/ansible.tgz')
def download(self, remote, local):
"""
:type remote: str
:type local: str
"""
self.scp('%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote), local)
def upload(self, local, remote):
"""
:type local: str
:type remote: str
"""
self.scp(local, '%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote))
def ssh(self, command):
"""
:type command: str | list[str]
"""
if isinstance(command, list):
command = ' '.join(pipes.quote(c) for c in command)
run_command(self.core_ci.args,
['ssh', '-tt', '-q'] + self.ssh_args +
['-p', str(self.core_ci.connection.port),
'%s@%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname)] +
self.become + [pipes.quote(command)])
def scp(self, src, dst):
"""
:type src: str
:type dst: str
"""
run_command(self.core_ci.args,
['scp'] + self.ssh_args +
['-P', str(self.core_ci.connection.port), '-q', '-r', src, dst])

69
test/runner/lib/pytar.py Normal file
View file

@ -0,0 +1,69 @@
"""Python native TGZ creation."""
from __future__ import absolute_import, print_function
import tarfile
import os
# improve performance by disabling uid/gid lookups
tarfile.pwd = None
tarfile.grp = None
# To reduce archive time and size, ignore non-versioned files which are large or numerous.
# Also ignore miscellaneous git related files since the .git directory is ignored.
IGNORE_DIRS = (
'.tox',
'.git',
'.idea',
'__pycache__',
'ansible.egg-info',
)
IGNORE_FILES = (
'.gitignore',
'.gitdir',
)
IGNORE_EXTENSIONS = (
'.pyc',
'.retry',
)
def ignore(item):
"""
:type item: tarfile.TarInfo
:rtype: tarfile.TarInfo | None
"""
filename = os.path.basename(item.path)
name, ext = os.path.splitext(filename)
dirs = os.path.split(item.path)
if not item.isdir():
if item.path.startswith('./test/results/'):
return None
if item.path.startswith('./docsite/') and filename.endswith('_module.rst'):
return None
if name in IGNORE_FILES:
return None
if ext in IGNORE_EXTENSIONS:
return None
if any(d in IGNORE_DIRS for d in dirs):
return None
return item
def create_tarfile(dst_path, src_path, tar_filter):
"""
:type dst_path: str
:type src_path: str
:type tar_filter: (tarfile.TarInfo) -> tarfile.TarInfo | None
"""
with tarfile.TarFile.gzopen(dst_path, mode='w', compresslevel=4) as tar:
tar.add(src_path, filter=tar_filter)

530
test/runner/lib/target.py Normal file
View file

@ -0,0 +1,530 @@
"""Test target identification, iteration and inclusion/exclusion."""
from __future__ import absolute_import, print_function
import os
import re
import errno
import itertools
import abc
from lib.util import ApplicationError
MODULE_EXTENSIONS = '.py', '.ps1'
def find_target_completion(target_func, prefix):
"""
:type target_func: () -> collections.Iterable[CompletionTarget]
:type prefix: unicode
:rtype: list[str]
"""
try:
targets = target_func()
prefix = prefix.encode()
short = os.environ.get('COMP_TYPE') == '63' # double tab completion from bash
matches = walk_completion_targets(targets, prefix, short)
return matches
except Exception as ex: # pylint: disable=locally-disabled, broad-except
return [str(ex)]
def walk_completion_targets(targets, prefix, short=False):
"""
:type targets: collections.Iterable[CompletionTarget]
:type prefix: str
:type short: bool
:rtype: tuple[str]
"""
aliases = set(alias for target in targets for alias in target.aliases)
if prefix.endswith('/') and prefix in aliases:
aliases.remove(prefix)
matches = [alias for alias in aliases if alias.startswith(prefix) and '/' not in alias[len(prefix):-1]]
if short:
offset = len(os.path.dirname(prefix))
if offset:
offset += 1
relative_matches = [match[offset:] for match in matches if len(match) > offset]
if len(relative_matches) > 1:
matches = relative_matches
return tuple(sorted(matches))
def walk_internal_targets(targets, includes=None, excludes=None, requires=None):
"""
:type targets: collections.Iterable[T <= CompletionTarget]
:type includes: list[str]
:type excludes: list[str]
:type requires: list[str]
:rtype: tuple[T <= CompletionTarget]
"""
targets = tuple(targets)
include_targets = sorted(filter_targets(targets, includes, errors=True, directories=False), key=lambda t: t.name)
if requires:
require_targets = set(filter_targets(targets, requires, errors=True, directories=False))
include_targets = [target for target in include_targets if target in require_targets]
if excludes:
list(filter_targets(targets, excludes, errors=True, include=False, directories=False))
internal_targets = set(filter_targets(include_targets, excludes, errors=False, include=False, directories=False))
return tuple(sorted(internal_targets, key=lambda t: t.name))
def walk_external_targets(targets, includes=None, excludes=None, requires=None):
"""
:type targets: collections.Iterable[CompletionTarget]
:type includes: list[str]
:type excludes: list[str]
:type requires: list[str]
:rtype: tuple[CompletionTarget], tuple[CompletionTarget]
"""
targets = tuple(targets)
if requires:
include_targets = list(filter_targets(targets, includes, errors=True, directories=False))
require_targets = set(filter_targets(targets, requires, errors=True, directories=False))
includes = [target.name for target in include_targets if target in require_targets]
if includes:
include_targets = sorted(filter_targets(targets, includes, errors=True), key=lambda t: t.name)
else:
include_targets = []
else:
include_targets = sorted(filter_targets(targets, includes, errors=True), key=lambda t: t.name)
if excludes:
exclude_targets = sorted(filter_targets(targets, excludes, errors=True), key=lambda t: t.name)
else:
exclude_targets = []
previous = None
include = []
for target in include_targets:
if isinstance(previous, DirectoryTarget) and isinstance(target, DirectoryTarget) \
and previous.name == target.name:
previous.modules = tuple(set(previous.modules) | set(target.modules))
else:
include.append(target)
previous = target
previous = None
exclude = []
for target in exclude_targets:
if isinstance(previous, DirectoryTarget) and isinstance(target, DirectoryTarget) \
and previous.name == target.name:
previous.modules = tuple(set(previous.modules) | set(target.modules))
else:
exclude.append(target)
previous = target
return tuple(include), tuple(exclude)
def filter_targets(targets, patterns, include=True, directories=True, errors=True):
"""
:type targets: collections.Iterable[CompletionTarget]
:type patterns: list[str]
:type include: bool
:type directories: bool
:type errors: bool
:rtype: collections.Iterable[CompletionTarget]
"""
unmatched = set(patterns or ())
for target in targets:
matched_directories = set()
match = False
if patterns:
for alias in target.aliases:
for pattern in patterns:
if re.match('^%s$' % pattern, alias):
match = True
try:
unmatched.remove(pattern)
except KeyError:
pass
if alias.endswith('/'):
if target.base_path and len(target.base_path) > len(alias):
matched_directories.add(target.base_path)
else:
matched_directories.add(alias)
elif include:
match = True
if not target.base_path:
matched_directories.add('.')
for alias in target.aliases:
if alias.endswith('/'):
if target.base_path and len(target.base_path) > len(alias):
matched_directories.add(target.base_path)
else:
matched_directories.add(alias)
if match != include:
continue
if directories and matched_directories:
yield DirectoryTarget(sorted(matched_directories, key=len)[0], target.modules)
else:
yield target
if errors:
if unmatched:
raise TargetPatternsNotMatched(unmatched)
def walk_module_targets():
"""
:rtype: collections.Iterable[TestTarget]
"""
path = 'lib/ansible/modules'
for target in walk_test_targets(path, path + '/', extensions=MODULE_EXTENSIONS):
if not target.module:
continue
yield target
def walk_units_targets():
"""
:rtype: collections.Iterable[TestTarget]
"""
return walk_test_targets(path='test/units', module_path='test/units/modules/', extensions=('.py',), prefix='test_')
def walk_compile_targets():
"""
:rtype: collections.Iterable[TestTarget]
"""
return walk_test_targets(module_path='lib/ansible/modules/', extensions=('.py',))
def walk_sanity_targets():
"""
:rtype: collections.Iterable[TestTarget]
"""
return walk_test_targets(module_path='lib/ansible/modules/')
def walk_posix_integration_targets():
"""
:rtype: collections.Iterable[IntegrationTarget]
"""
for target in walk_integration_targets():
if 'posix/' in target.aliases:
yield target
def walk_network_integration_targets():
"""
:rtype: collections.Iterable[IntegrationTarget]
"""
for target in walk_integration_targets():
if 'network/' in target.aliases:
yield target
def walk_windows_integration_targets():
"""
:rtype: collections.Iterable[IntegrationTarget]
"""
for target in walk_integration_targets():
if 'windows/' in target.aliases:
yield target
def walk_integration_targets():
"""
:rtype: collections.Iterable[IntegrationTarget]
"""
path = 'test/integration/targets'
modules = frozenset(t.module for t in walk_module_targets())
paths = sorted(os.path.join(path, p) for p in os.listdir(path))
prefixes = load_integration_prefixes()
for path in paths:
yield IntegrationTarget(path, modules, prefixes)
def load_integration_prefixes():
"""
:rtype: dict[str, str]
"""
path = 'test/integration'
names = sorted(f for f in os.listdir(path) if os.path.splitext(f)[0] == 'target-prefixes')
prefixes = {}
for name in names:
prefix = os.path.splitext(name)[1][1:]
with open(os.path.join(path, name), 'r') as prefix_fd:
prefixes.update(dict((k, prefix) for k in prefix_fd.read().splitlines()))
return prefixes
def walk_test_targets(path=None, module_path=None, extensions=None, prefix=None):
"""
:type path: str | None
:type module_path: str | None
:type extensions: tuple[str] | None
:type prefix: str | None
:rtype: collections.Iterable[TestTarget]
"""
for root, _, file_names in os.walk(path or '.', topdown=False):
if root.endswith('/__pycache__'):
continue
if path is None:
root = root[2:]
if root.startswith('.'):
continue
for file_name in file_names:
name, ext = os.path.splitext(os.path.basename(file_name))
if name.startswith('.'):
continue
if extensions and ext not in extensions:
continue
if prefix and not name.startswith(prefix):
continue
yield TestTarget(os.path.join(root, file_name), module_path, prefix, path)
class CompletionTarget(object):
"""Command-line argument completion target base class."""
__metaclass__ = abc.ABCMeta
def __init__(self):
self.name = None
self.path = None
self.base_path = None
self.modules = tuple()
self.aliases = tuple()
def __eq__(self, other):
if isinstance(other, CompletionTarget):
return self.__repr__() == other.__repr__()
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
return self.name.__lt__(other.name)
def __gt__(self, other):
return self.name.__gt__(other.name)
def __hash__(self):
return hash(self.__repr__())
def __repr__(self):
if self.modules:
return '%s (%s)' % (self.name, ', '.join(self.modules))
return self.name
class DirectoryTarget(CompletionTarget):
"""Directory target."""
def __init__(self, path, modules):
"""
:type path: str
:type modules: tuple[str]
"""
super(DirectoryTarget, self).__init__()
self.name = path
self.path = path
self.modules = modules
class TestTarget(CompletionTarget):
"""Generic test target."""
def __init__(self, path, module_path, module_prefix, base_path):
"""
:type path: str
:type module_path: str | None
:type module_prefix: str | None
:type base_path: str
"""
super(TestTarget, self).__init__()
self.name = path
self.path = path
self.base_path = base_path + '/' if base_path else None
name, ext = os.path.splitext(os.path.basename(self.path))
if module_path and path.startswith(module_path) and name != '__init__' and ext in MODULE_EXTENSIONS:
self.module = name[len(module_prefix or ''):].lstrip('_')
self.modules = self.module,
else:
self.module = None
self.modules = tuple()
aliases = [self.path, self.module]
parts = self.path.split('/')
for i in range(1, len(parts)):
alias = '%s/' % '/'.join(parts[:i])
aliases.append(alias)
aliases = [a for a in aliases if a]
self.aliases = tuple(sorted(aliases))
class IntegrationTarget(CompletionTarget):
"""Integration test target."""
non_posix = frozenset((
'network',
'windows',
))
categories = frozenset(non_posix | frozenset((
'posix',
'module',
'needs',
'skip',
)))
def __init__(self, path, modules, prefixes):
"""
:type path: str
:type modules: frozenset[str]
:type prefixes: dict[str, str]
"""
super(IntegrationTarget, self).__init__()
self.name = os.path.basename(path)
self.path = path
# script_path and type
contents = sorted(os.listdir(path))
runme_files = tuple(c for c in contents if os.path.splitext(c)[0] == 'runme')
test_files = tuple(c for c in contents if os.path.splitext(c)[0] == 'test')
self.script_path = None
if runme_files:
self.type = 'script'
self.script_path = os.path.join(path, runme_files[0])
elif test_files:
self.type = 'special'
elif os.path.isdir(os.path.join(path, 'tasks')):
self.type = 'role'
else:
self.type = 'unknown'
# static_aliases
try:
with open(os.path.join(path, 'aliases'), 'r') as aliases_file:
static_aliases = tuple(aliases_file.read().splitlines())
except IOError as ex:
if ex.errno != errno.ENOENT:
raise
static_aliases = tuple()
# modules
if self.name in modules:
module = self.name
elif self.name.startswith('win_') and self.name[4:] in modules:
module = self.name[4:]
else:
module = None
self.modules = tuple(sorted(a for a in static_aliases + tuple([module]) if a in modules))
# groups
groups = [self.type]
groups += [a for a in static_aliases if a not in modules]
groups += ['module/%s' % m for m in self.modules]
if not self.modules:
groups.append('non_module')
if 'destructive' not in groups:
groups.append('non_destructive')
if '_' in self.name:
prefix = self.name[:self.name.find('_')]
else:
prefix = None
if prefix in prefixes:
group = prefixes[prefix]
if group != prefix:
group = '%s/%s' % (group, prefix)
groups.append(group)
if self.name.startswith('win_'):
groups.append('windows')
if self.name.startswith('connection_'):
groups.append('connection')
if self.name.startswith('setup_') or self.name.startswith('prepare_'):
groups.append('hidden')
if self.type not in ('script', 'role'):
groups.append('hidden')
for group in itertools.islice(groups, 0, len(groups)):
if '/' in group:
parts = group.split('/')
for i in range(1, len(parts)):
groups.append('/'.join(parts[:i]))
if not any(g in self.non_posix for g in groups):
groups.append('posix')
# aliases
aliases = [self.name] + \
['%s/' % g for g in groups] + \
['%s/%s' % (g, self.name) for g in groups if g not in self.categories]
if 'hidden/' in aliases:
aliases = ['hidden/'] + ['hidden/%s' % a for a in aliases if not a.startswith('hidden/')]
self.aliases = tuple(sorted(set(aliases)))
class TargetPatternsNotMatched(ApplicationError):
"""One or more targets were not matched when a match was required."""
def __init__(self, patterns):
"""
:type patterns: set[str]
"""
self.patterns = sorted(patterns)
if len(patterns) > 1:
message = 'Target patterns not matched:\n%s' % '\n'.join(self.patterns)
else:
message = 'Target pattern not matched: %s' % self.patterns[0]
super(TargetPatternsNotMatched, self).__init__(message)

48
test/runner/lib/thread.py Normal file
View file

@ -0,0 +1,48 @@
"""Python threading tools."""
from __future__ import absolute_import, print_function
import threading
import sys
try:
# noinspection PyPep8Naming
import Queue as queue
except ImportError:
# noinspection PyUnresolvedReferences
import queue # pylint: disable=locally-disabled, import-error
class WrappedThread(threading.Thread):
"""Wrapper around Thread which captures results and exceptions."""
def __init__(self, action):
"""
:type action: () -> any
"""
super(WrappedThread, self).__init__()
self._result = queue.Queue()
self.action = action
def run(self):
"""
Run action and capture results or exception.
Do not override. Do not call directly. Executed by the start() method.
"""
# noinspection PyBroadException
try:
self._result.put((self.action(), None))
except: # pylint: disable=locally-disabled, bare-except
self._result.put((None, sys.exc_info()))
def wait_for_result(self):
"""
Wait for thread to exit and return the result or raise an exception.
:rtype: any
"""
result, exception = self._result.get()
if exception:
if sys.version_info[0] > 2:
raise exception[0](exception[1]).with_traceback(exception[2])
# noinspection PyRedundantParentheses
exec('raise exception[0], exception[1], exception[2]') # pylint: disable=locally-disabled, exec-used
return result

415
test/runner/lib/util.py Normal file
View file

@ -0,0 +1,415 @@
"""Miscellaneous utility functions and classes."""
from __future__ import absolute_import, print_function
import errno
import os
import pipes
import shutil
import subprocess
import sys
import time
def is_shippable():
"""
:rtype: bool
"""
return os.environ.get('SHIPPABLE') == 'true'
def remove_file(path):
"""
:type path: str
"""
if os.path.isfile(path):
os.remove(path)
def find_executable(executable, cwd=None, path=None, required=True):
"""
:type executable: str
:type cwd: str
:type path: str
:type required: bool | str
:rtype: str | None
"""
match = None
real_cwd = os.getcwd()
if not cwd:
cwd = real_cwd
if os.path.dirname(executable):
target = os.path.join(cwd, executable)
if os.path.exists(target) and os.access(target, os.F_OK | os.X_OK):
match = executable
else:
if path is None:
path = os.environ.get('PATH', os.defpath)
if path:
path_dirs = path.split(os.pathsep)
seen_dirs = set()
for path_dir in path_dirs:
if path_dir in seen_dirs:
continue
seen_dirs.add(path_dir)
if os.path.abspath(path_dir) == real_cwd:
path_dir = cwd
candidate = os.path.join(path_dir, executable)
if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK):
match = candidate
break
if not match and required:
message = 'Required program "%s" not found.' % executable
if required != 'warning':
raise ApplicationError(message)
display.warning(message)
return match
def run_command(args, cmd, capture=False, env=None, data=None, cwd=None, always=False, stdin=None, stdout=None):
"""
:type args: CommonConfig
:type cmd: collections.Iterable[str]
:type capture: bool
:type env: dict[str, str] | None
:type data: str | None
:type cwd: str | None
:type always: bool
:type stdin: file | None
:type stdout: file | None
:rtype: str | None, str | None
"""
explain = args.explain and not always
return raw_command(cmd, capture=capture, env=env, data=data, cwd=cwd, explain=explain, stdin=stdin, stdout=stdout)
def raw_command(cmd, capture=False, env=None, data=None, cwd=None, explain=False, stdin=None, stdout=None):
"""
:type cmd: collections.Iterable[str]
:type capture: bool
:type env: dict[str, str] | None
:type data: str | None
:type cwd: str | None
:type explain: bool
:type stdin: file | None
:type stdout: file | None
:rtype: str | None, str | None
"""
if not cwd:
cwd = os.getcwd()
if not env:
env = common_environment()
cmd = list(cmd)
escaped_cmd = ' '.join(pipes.quote(c) for c in cmd)
display.info('Run command: %s' % escaped_cmd, verbosity=1)
display.info('Working directory: %s' % cwd, verbosity=2)
program = find_executable(cmd[0], cwd=cwd, path=env['PATH'], required='warning')
if program:
display.info('Program found: %s' % program, verbosity=2)
for key in sorted(env.keys()):
display.info('%s=%s' % (key, env[key]), verbosity=2)
if explain:
return None, None
communicate = False
if stdin is not None:
data = None
communicate = True
elif data is not None:
stdin = subprocess.PIPE
communicate = True
if stdout:
communicate = True
if capture:
stdout = stdout or subprocess.PIPE
stderr = subprocess.PIPE
communicate = True
else:
stderr = None
start = time.time()
try:
process = subprocess.Popen(cmd, env=env, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd)
except OSError as ex:
if ex.errno == errno.ENOENT:
raise ApplicationError('Required program "%s" not found.' % cmd[0])
raise
if communicate:
stdout, stderr = process.communicate(data)
else:
process.wait()
stdout, stderr = None, None
status = process.returncode
runtime = time.time() - start
display.info('Command exited with status %s after %s seconds.' % (status, runtime), verbosity=4)
if status == 0:
return stdout, stderr
raise SubprocessError(cmd, status, stdout, stderr, runtime)
def common_environment():
"""Common environment used for executing all programs."""
env = dict(
LC_ALL='en_US.UTF-8',
PATH=os.environ.get('PATH', os.defpath),
)
required = (
'HOME',
)
optional = (
'HTTPTESTER',
)
env.update(pass_vars(required=required, optional=optional))
return env
def pass_vars(required=None, optional=None):
"""
:type required: collections.Iterable[str]
:type optional: collections.Iterable[str]
:rtype: dict[str, str]
"""
env = {}
for name in required:
if name not in os.environ:
raise MissingEnvironmentVariable(name)
env[name] = os.environ[name]
for name in optional:
if name not in os.environ:
continue
env[name] = os.environ[name]
return env
def deepest_path(path_a, path_b):
"""Return the deepest of two paths, or None if the paths are unrelated.
:type path_a: str
:type path_b: str
:return: str | None
"""
if path_a == '.':
path_a = ''
if path_b == '.':
path_b = ''
if path_a.startswith(path_b):
return path_a or '.'
if path_b.startswith(path_a):
return path_b or '.'
return None
def remove_tree(path):
"""
:type path: str
"""
try:
shutil.rmtree(path)
except OSError as ex:
if ex.errno != errno.ENOENT:
raise
def make_dirs(path):
"""
:type path: str
"""
try:
os.makedirs(path)
except OSError as ex:
if ex.errno != errno.EEXIST:
raise
class Display(object):
"""Manages color console output."""
clear = '\033[0m'
red = '\033[31m'
green = '\033[32m'
yellow = '\033[33m'
blue = '\033[34m'
purple = '\033[35m'
cyan = '\033[36m'
verbosity_colors = {
0: None,
1: green,
2: blue,
3: cyan,
}
def __init__(self):
self.verbosity = 0
self.color = True
self.warnings = []
def __warning(self, message):
"""
:type message: str
"""
self.print_message('WARNING: %s' % message, color=self.purple, fd=sys.stderr)
def review_warnings(self):
"""Review all warnings which previously occurred."""
if not self.warnings:
return
self.__warning('Reviewing previous %d warning(s):' % len(self.warnings))
for warning in self.warnings:
self.__warning(warning)
def warning(self, message):
"""
:type message: str
"""
self.__warning(message)
self.warnings.append(message)
def notice(self, message):
"""
:type message: str
"""
self.print_message('NOTICE: %s' % message, color=self.purple, fd=sys.stderr)
def error(self, message):
"""
:type message: str
"""
self.print_message('ERROR: %s' % message, color=self.red, fd=sys.stderr)
def info(self, message, verbosity=0):
"""
:type message: str
:type verbosity: int
"""
if self.verbosity >= verbosity:
color = self.verbosity_colors.get(verbosity, self.yellow)
self.print_message(message, color=color)
def print_message(self, message, color=None, fd=sys.stdout): # pylint: disable=locally-disabled, invalid-name
"""
:type message: str
:type color: str | None
:type fd: file
"""
if color and self.color:
# convert color resets in message to desired color
message = message.replace(self.clear, color)
message = '%s%s%s' % (color, message, self.clear)
print(message, file=fd)
fd.flush()
class ApplicationError(Exception):
"""General application error."""
def __init__(self, message=None):
"""
:type message: str | None
"""
super(ApplicationError, self).__init__(message)
class ApplicationWarning(Exception):
"""General application warning which interrupts normal program flow."""
def __init__(self, message=None):
"""
:type message: str | None
"""
super(ApplicationWarning, self).__init__(message)
class SubprocessError(ApplicationError):
"""Error resulting from failed subprocess execution."""
def __init__(self, cmd, status=0, stdout=None, stderr=None, runtime=None):
"""
:type cmd: list[str]
:type status: int
:type stdout: str | None
:type stderr: str | None
:type runtime: float | None
"""
message = 'Command "%s" returned exit status %s.\n' % (' '.join(pipes.quote(c) for c in cmd), status)
if stderr:
message += '>>> Standard Error\n'
message += '%s%s\n' % (stderr.strip(), Display.clear)
if stdout:
message += '>>> Standard Output\n'
message += '%s%s\n' % (stdout.strip(), Display.clear)
message = message.strip()
super(SubprocessError, self).__init__(message)
self.cmd = cmd
self.status = status
self.stdout = stdout
self.stderr = stderr
self.runtime = runtime
class MissingEnvironmentVariable(ApplicationError):
"""Error caused by missing environment variable."""
def __init__(self, name):
"""
:type name: str
"""
super(MissingEnvironmentVariable, self).__init__('Missing environment variable: %s' % name)
self.name = name
class CommonConfig(object):
"""Configuration common to all commands."""
def __init__(self, args):
"""
:type args: any
"""
self.color = args.color # type: bool
self.explain = args.explain # type: bool
self.verbosity = args.verbosity # type: int
display = Display() # pylint: disable=locally-disabled, invalid-name

235
test/runner/reorganize-tests.sh Executable file
View file

@ -0,0 +1,235 @@
#!/usr/bin/env bash
set -eu
source_root=$(python -c "from os import path; print(path.abspath(path.join(path.dirname('$0'), '..', '..')))")
cd "${source_root}"
# Convert existing compile skip files to match the unified repository layout.
mkdir -p test/compile
rm -f test/compile/*.txt
for type in core extras; do
sed "s|^|/lib/ansible/modules/${type}|" \
< "lib/ansible/modules/${type}/test/utils/shippable/sanity-skip-python24.txt" \
>> "test/compile/python2.4-skip.txt"
done
# Existing skip files are only for modules.
# Add missing skip entries for core code.
cat << EOF >> test/compile/python2.4-skip.txt
/lib/ansible/modules/__init__.py
/lib/ansible/module_utils/a10.py
/lib/ansible/module_utils/rax.py
/lib/ansible/module_utils/openstack.py
/lib/ansible/module_utils/cloud.py
/lib/ansible/module_utils/ec2.py
/lib/ansible/module_utils/gce.py
/lib/ansible/module_utils/lxd.py
/lib/ansible/module_utils/docker_common.py
/lib/ansible/module_utils/azure_rm_common.py
/lib/ansible/module_utils/vca.py
/lib/ansible/module_utils/vmware.py
/lib/ansible/module_utils/gcp.py
/lib/ansible/module_utils/gcdns.py
/lib/ansible/vars/
/lib/ansible/utils/
/lib/ansible/template/
/lib/ansible/plugins/
/lib/ansible/playbook/
/lib/ansible/parsing/
/lib/ansible/inventory/
/lib/ansible/galaxy/
/lib/ansible/executor/
/lib/ansible/errors/
/lib/ansible/compat/
/lib/ansible/config/
/lib/ansible/cli/
/lib/ansible/constants.py
/lib/ansible/release.py
/lib/ansible/__init__.py
/hacking/
/contrib/
/docsite/
/test/
EOF
cat << EOF >> test/compile/python2.6-skip.txt
/contrib/inventory/vagrant.py
/hacking/dump_playbook_attributes.py
EOF
cat << EOF >> test/compile/python3.5-skip.txt
/test/samples/multi.py
/examples/scripts/uptime.py
EOF
for path in test/compile/*.txt; do
sort -o "${path}" "${path}"
done
# Not all scripts pass shellcheck yet.
mkdir -p test/sanity/shellcheck
cat << EOF > test/sanity/shellcheck/skip.txt
test/sanity/code-smell/boilerplate.sh
EOF
sort -o test/sanity/shellcheck/skip.txt test/sanity/shellcheck/skip.txt
# Add skip list for code-smell scripts.
# These scripts don't pass, so we can't run them in CI.
cat << EOF > test/sanity/code-smell/skip.txt
inappropriately-private.sh
EOF
# Add skip list for validate-modules.
# Some of these exclusions are temporary, others belong in validate-modules.
cat << EOF > test/sanity/validate-modules/skip.txt
lib/ansible/modules/core/utilities/logic/async_status.py
lib/ansible/modules/core/utilities/helper/_fireball.py
lib/ansible/modules/core/utilities/helper/_accelerate.py
lib/ansible/modules/core/test
lib/ansible/modules/core/.github
lib/ansible/modules/extras/test
lib/ansible/modules/extras/.github
EOF
# Remove existing aliases from previous script runs.
rm -f test/integration/targets/*/aliases
# Map destructive/ targets to integration tests.
targets=$(grep 'role:' "test/integration/destructive.yml" \
| sed 's/^.* role: //; s/[ ,].*$//;')
for target in ${targets}; do
alias='destructive'
echo "target: ${target}, alias: ${alias}"
echo "${alias}" >> "test/integration/targets/${target}/aliases"
done
# Map destructive/non_destructive targets to posix groups for integration tests.
# This will allow re-balancing of posix tests on Shippable independently of destructive/non_destructive targets.
for type in destructive non_destructive; do
targets=$(grep 'role:' "test/integration/${type}.yml" \
| sed 's/^.* role: //; s/[ ,].*$//;')
if [ "${type}" = "destructive" ]; then
group="posix/ci/group1"
else
group="posix/ci/group2"
fi
for target in ${targets}; do
echo "target: ${target}, group: ${group}"
echo "${group}" >> "test/integration/targets/${target}/aliases"
done
done
# Add aliases to integration tests.
targets=$(grep 'role:' test/integration/{destructive,non_destructive}.yml \
| sed 's/^.* role: //; s/[ ,].*$//;')
for target in ${targets}; do
aliases=$(grep -h "role: *${target}[ ,]" test/integration/{destructive,non_destructive}.yml \
| sed 's/when:[^,]*//;' \
| sed 's/^.*tags:[ []*//g; s/[]}].*$//g; s/ //g; s/,/ /g; s/test_//g;')
for alias in ${aliases}; do
if [ "${target}" != "${alias}" ]; then
# convert needs_ prefixed aliases to groups
alias="${alias//needs_/needs\/}"
echo "target: ${target}, alias: ${alias}"
echo "${alias}" >> "test/integration/targets/${target}/aliases"
fi
done
done
# Map test_win_group* targets to windows groups for integration tests.
for type in test_win_group1 test_win_group2 test_win_group3; do
targets=$(grep 'role:' "test/integration/${type}.yml" \
| sed 's/^.* role: //; s/[ ,].*$//;')
group=$(echo "${type}" | sed 's/^test_win_/windows_/; s/_/\/ci\//;')
for target in ${targets}; do
echo "target: ${target}, group: ${group}"
echo "${group}" >> "test/integration/targets/${target}/aliases"
done
done
# Add additional windows tests to appropriate groups.
echo 'windows/ci/group2' >> test/integration/targets/binary_modules_winrm/aliases
echo 'windows/ci/group3' >> test/integration/targets/connection_winrm/aliases
# Add posix/ci/group3 for posix tests which are not already grouped for ci.
group="posix/ci/group3"
for target in test/integration/targets/*; do
target=$(basename "${target}")
if [[ "${target}" =~ (setup|prepare)_ ]]; then
continue
fi
if [ -f "test/integration/targets/${target}/test.sh" ]; then
continue
fi
if [ -f "test/integration/targets/${target}/aliases" ]; then
if grep -q -P "^(windows|posix)/" "test/integration/targets/${target}/aliases"; then
continue
fi
fi
if [[ "${target}" =~ _ ]]; then
prefix="${target//_*/}"
if grep -q --line-regex "${prefix}" test/integration/target-prefixes.*; then
continue
fi
fi
echo "target: ${target}, group: ${group}"
echo "${group}" >> "test/integration/targets/${target}/aliases"
done
# Add skip aliases for python3.
sed 's/^test_//' test/utils/shippable/python3-test-tag-blacklist.txt | while IFS= read -r target; do
echo "skip/python3" >> "test/integration/targets/${target}/aliases"
done
# Add skip aliases for tests which don't pass yet on osx/freebsd.
for target in service postgresql mysql_db mysql_user mysql_variables uri get_url async_extra_data; do
echo "skip/osx" >> "test/integration/targets/${target}/aliases"
echo "skip/freebsd" >> "test/integration/targets/${target}/aliases"
done
# Add skip aliases for tests which don't pass yet on osx.
for target in gathering_facts iterators git; do
echo "skip/osx" >> "test/integration/targets/${target}/aliases"
done
# Add needs/root entries as required.
for target in connection_chroot authorized_key copy template unarchive; do
echo "needs/root" >> "test/integration/targets/${target}/aliases"
done
# Add needs/ssh entries as required.
for target in async_extra_data connection_ssh connection_paramiko_ssh; do
echo "needs/ssh" >> "test/integration/targets/${target}/aliases"
done
# Add missing alias for windows async_status.
echo "async_status" >> test/integration/targets/win_async_wrapper/aliases
# Remove connection tests from CI groups which aren't supported yet.
for connection in docker jail libvirt_lxc lxc lxd; do
target="connection_${connection}"
sed -i '/^posix\/ci\/.*$/d' "test/integration/targets/${target}/aliases"
done
# Sort aliases.
for file in test/integration/targets/*/aliases; do
sort -o "${file}" "${file}"
done

View file

@ -0,0 +1 @@
argparse ; python_version < '2.7'

View file

@ -0,0 +1,2 @@
coverage >= 4.2
pywinrm >= 0.2.1 # 0.1.1 required, but 0.2.1 provides better performance

View file

@ -0,0 +1 @@
coverage

View file

@ -0,0 +1,8 @@
jinja2
jmespath
junit-xml
ordereddict ; python_version < '2.7'
paramiko
passlib
pycrypto
pyyaml

View file

@ -0,0 +1,5 @@
jinja2
mock
pylint
voluptuous
yamllint

View file

@ -0,0 +1,11 @@
boto3
jinja2
mock
nose
passlib
pycrypto
pytest
python-memcached
pyyaml
redis
unittest2 ; python_version < '2.7'

View file

@ -0,0 +1,4 @@
jinja2
junit-xml
pywinrm
pyyaml

View file

@ -0,0 +1,18 @@
#!/bin/sh
set -eu
# Support images with only python3 installed.
if [ ! -f /usr/bin/python ] && [ -f /usr/bin/python3 ]; then
ln -s /usr/bin/python3 /usr/bin/python
fi
if [ ! -f /usr/bin/pip ] && [ -f /usr/bin/pip3 ]; then
ln -s /usr/bin/pip3 /usr/bin/pip
fi
# Improve prompts on remote host for interactive use.
cat << EOF > ~/.bashrc
alias ls='ls --color=auto'
export PS1='\[\e]0;\u@\h: \w\a\]\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '
cd ~/ansible/
EOF

View file

@ -0,0 +1,69 @@
#!/bin/sh
set -eu
platform="$1"
env
cd ~/
if [ "${platform}" = "freebsd" ]; then
pkg install -y curl
if [ ! -f bootstrap.sh ]; then
curl "https://raw.githubusercontent.com/mattclay/ansible-hacking/master/bootstrap.sh" -o bootstrap.sh -#
fi
chmod +x bootstrap.sh
./bootstrap.sh pip -y -q
pkg install -y \
bash \
devel/ruby-gems \
gtar \
mercurial \
rsync \
ruby \
subversion \
sudo \
zip
fi
pip install virtualenv
# Tests assume loopback addresses other than 127.0.0.1 will work.
# Add aliases for loopback addresses used by tests.
for i in 3 4 254; do
ifconfig lo0 alias "127.0.0.${i}" up
done
ifconfig lo0
# Since tests run as root, we also need to be able to ssh to localhost as root.
sed -i '' 's/^# *PermitRootLogin.*$/PermitRootLogin yes/;' /etc/ssh/sshd_config
if [ "${platform}" = "freebsd" ]; then
# Restart sshd for configuration changes and loopback aliases to work.
service sshd restart
fi
# Generate our ssh key and add it to our authorized_keys file.
# We also need to add localhost's server keys to known_hosts.
if [ ! -f "${HOME}/.ssh/id_rsa.pub" ]; then
ssh-keygen -q -t rsa -N '' -f "${HOME}/.ssh/id_rsa"
cp "${HOME}/.ssh/id_rsa.pub" "${HOME}/.ssh/authorized_keys"
for key in /etc/ssh/ssh_host_*_key.pub; do
pk=$(cat "${key}")
echo "localhost ${pk}" >> "${HOME}/.ssh/known_hosts"
done
fi
# Improve prompts on remote host for interactive use.
cat << EOF > ~/.bashrc
alias ls='ls -G'
export PS1='\[\e]0;\u@\h: \w\a\]\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '
cd ~/ansible/
EOF

446
test/runner/test.py Executable file
View file

@ -0,0 +1,446 @@
#!/usr/bin/env python
# PYTHON_ARGCOMPLETE_OK
"""Test runner for all Ansible tests."""
from __future__ import absolute_import, print_function
import errno
import os
import sys
from lib.util import (
ApplicationError,
display,
raw_command,
)
from lib.delegation import (
delegate,
)
from lib.executor import (
command_posix_integration,
command_network_integration,
command_windows_integration,
command_units,
command_compile,
command_sanity,
command_shell,
SANITY_TESTS,
SUPPORTED_PYTHON_VERSIONS,
COMPILE_PYTHON_VERSIONS,
PosixIntegrationConfig,
WindowsIntegrationConfig,
NetworkIntegrationConfig,
SanityConfig,
UnitsConfig,
CompileConfig,
ShellConfig,
ApplicationWarning,
Delegate,
generate_pip_install,
)
from lib.target import (
find_target_completion,
walk_posix_integration_targets,
walk_network_integration_targets,
walk_windows_integration_targets,
walk_units_targets,
walk_compile_targets,
walk_sanity_targets,
)
import lib.cover
def main():
"""Main program function."""
try:
git_root = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
os.chdir(git_root)
args = parse_args()
config = args.config(args)
display.verbosity = config.verbosity
display.color = config.color
try:
args.func(config)
except Delegate as ex:
delegate(config, ex.exclude, ex.require)
display.review_warnings()
except ApplicationWarning as ex:
display.warning(str(ex))
exit(0)
except ApplicationError as ex:
display.error(str(ex))
exit(1)
except KeyboardInterrupt:
exit(2)
except IOError as ex:
if ex.errno == errno.EPIPE:
exit(3)
raise
def parse_args():
"""Parse command line arguments."""
try:
import argparse
except ImportError:
if '--requirements' not in sys.argv:
raise
raw_command(generate_pip_install('ansible-test'))
import argparse
try:
import argcomplete
except ImportError:
argcomplete = None
if argcomplete:
epilog = 'Tab completion available using the "argcomplete" python package.'
else:
epilog = 'Install the "argcomplete" python package to enable tab completion.'
parser = argparse.ArgumentParser(epilog=epilog)
common = argparse.ArgumentParser(add_help=False)
common.add_argument('-e', '--explain',
action='store_true',
help='explain commands that would be executed')
common.add_argument('-v', '--verbose',
dest='verbosity',
action='count',
default=0,
help='display more output')
common.add_argument('--color',
metavar='COLOR',
nargs='?',
help='generate color output: %(choices)s',
choices=('yes', 'no', 'auto'),
const='yes',
default='auto')
test = argparse.ArgumentParser(add_help=False, parents=[common])
test.add_argument('include',
metavar='TARGET',
nargs='*',
help='test the specified target').completer = complete_target
test.add_argument('--exclude',
metavar='TARGET',
action='append',
help='exclude the specified target').completer = complete_target
test.add_argument('--require',
metavar='TARGET',
action='append',
help='require the specified target').completer = complete_target
test.add_argument('--coverage',
action='store_true',
help='analyze code coverage when running tests')
add_changes(test, argparse)
add_environments(test)
integration = argparse.ArgumentParser(add_help=False, parents=[test])
integration.add_argument('--python',
metavar='VERSION',
choices=SUPPORTED_PYTHON_VERSIONS,
help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
integration.add_argument('--start-at',
metavar='TARGET',
help='start at the specified target').completer = complete_target
integration.add_argument('--start-at-task',
metavar='TASK',
help='start at the specified task')
integration.add_argument('--allow-destructive',
action='store_true',
help='allow destructive tests (--local and --tox only)')
integration.add_argument('--retry-on-error',
action='store_true',
help='retry failed test with increased verbosity')
subparsers = parser.add_subparsers(metavar='COMMAND')
subparsers.required = True # work-around for python 3 bug which makes subparsers optional
posix_integration = subparsers.add_parser('integration',
parents=[integration],
help='posix integration tests')
posix_integration.set_defaults(func=command_posix_integration,
targets=walk_posix_integration_targets,
config=PosixIntegrationConfig)
add_extra_docker_options(posix_integration)
network_integration = subparsers.add_parser('network-integration',
parents=[integration],
help='network integration tests')
network_integration.set_defaults(func=command_network_integration,
targets=walk_network_integration_targets,
config=NetworkIntegrationConfig)
windows_integration = subparsers.add_parser('windows-integration',
parents=[integration],
help='windows integration tests')
windows_integration.set_defaults(func=command_windows_integration,
targets=walk_windows_integration_targets,
config=WindowsIntegrationConfig)
windows_integration.add_argument('--windows',
metavar='VERSION',
action='append',
help='windows version')
units = subparsers.add_parser('units',
parents=[test],
help='unit tests')
units.set_defaults(func=command_units,
targets=walk_units_targets,
config=UnitsConfig)
units.add_argument('--python',
metavar='VERSION',
choices=SUPPORTED_PYTHON_VERSIONS,
help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
units.add_argument('--collect-only',
action='store_true',
help='collect tests but do not execute them')
compiler = subparsers.add_parser('compile',
parents=[test],
help='compile tests')
compiler.set_defaults(func=command_compile,
targets=walk_compile_targets,
config=CompileConfig)
compiler.add_argument('--python',
metavar='VERSION',
choices=COMPILE_PYTHON_VERSIONS,
help='python version: %s' % ', '.join(COMPILE_PYTHON_VERSIONS))
sanity = subparsers.add_parser('sanity',
parents=[test],
help='sanity tests')
sanity.set_defaults(func=command_sanity,
targets=walk_sanity_targets,
config=SanityConfig)
sanity.add_argument('--test',
metavar='TEST',
action='append',
choices=[t.name for t in SANITY_TESTS],
help='tests to run')
sanity.add_argument('--skip-test',
metavar='TEST',
action='append',
choices=[t.name for t in SANITY_TESTS],
help='tests to skip')
sanity.add_argument('--list-tests',
action='store_true',
help='list available tests')
sanity.add_argument('--python',
metavar='VERSION',
choices=SUPPORTED_PYTHON_VERSIONS,
help='python version: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
shell = subparsers.add_parser('shell',
parents=[common],
help='open an interactive shell')
shell.set_defaults(func=command_shell,
config=ShellConfig)
add_environments(shell, tox_version=True)
add_extra_docker_options(shell)
coverage_common = argparse.ArgumentParser(add_help=False, parents=[common])
add_environments(coverage_common, tox_version=True, tox_only=True)
coverage = subparsers.add_parser('coverage',
help='code coverage management and reporting')
coverage_subparsers = coverage.add_subparsers(metavar='COMMAND')
coverage_subparsers.required = True # work-around for python 3 bug which makes subparsers optional
coverage_combine = coverage_subparsers.add_parser('combine',
parents=[coverage_common],
help='combine coverage data and rewrite remote paths')
coverage_combine.set_defaults(func=lib.cover.command_coverage_combine,
config=lib.cover.CoverageConfig)
coverage_erase = coverage_subparsers.add_parser('erase',
parents=[coverage_common],
help='erase coverage data files')
coverage_erase.set_defaults(func=lib.cover.command_coverage_erase,
config=lib.cover.CoverageConfig)
coverage_report = coverage_subparsers.add_parser('report',
parents=[coverage_common],
help='generate console coverage report')
coverage_report.set_defaults(func=lib.cover.command_coverage_report,
config=lib.cover.CoverageConfig)
coverage_html = coverage_subparsers.add_parser('html',
parents=[coverage_common],
help='generate html coverage report')
coverage_html.set_defaults(func=lib.cover.command_coverage_html,
config=lib.cover.CoverageConfig)
coverage_xml = coverage_subparsers.add_parser('xml',
parents=[coverage_common],
help='generate xml coverage report')
coverage_xml.set_defaults(func=lib.cover.command_coverage_xml,
config=lib.cover.CoverageConfig)
if argcomplete:
argcomplete.autocomplete(parser, always_complete_options=False, validator=lambda i, k: True)
args = parser.parse_args()
if args.explain and not args.verbosity:
args.verbosity = 1
if args.color == 'yes':
args.color = True
elif args.color == 'no':
args.color = False
else:
args.color = sys.stdout.isatty()
return args
def add_changes(parser, argparse):
"""
:type parser: argparse.ArgumentParser
:type argparse: argparse
"""
parser.add_argument('--changed', action='store_true', help='limit targets based on changes')
changes = parser.add_argument_group(title='change detection arguments')
changes.add_argument('--tracked', action='store_true', help=argparse.SUPPRESS)
changes.add_argument('--untracked', action='store_true', help='include untracked files')
changes.add_argument('--ignore-committed', dest='committed', action='store_false', help='exclude committed files')
changes.add_argument('--ignore-staged', dest='staged', action='store_false', help='exclude staged files')
changes.add_argument('--ignore-unstaged', dest='unstaged', action='store_false', help='exclude unstaged files')
changes.add_argument('--changed-from', metavar='PATH', help=argparse.SUPPRESS)
changes.add_argument('--changed-path', metavar='PATH', action='append', help=argparse.SUPPRESS)
def add_environments(parser, tox_version=False, tox_only=False):
"""
:type parser: argparse.ArgumentParser
:type tox_version: bool
:type tox_only: bool
"""
parser.add_argument('--requirements',
action='store_true',
help='install command requirements')
environments = parser.add_mutually_exclusive_group()
environments.add_argument('--local',
action='store_true',
help='run from the local environment')
if tox_version:
environments.add_argument('--tox',
metavar='VERSION',
nargs='?',
default=None,
const='.'.join(str(i) for i in sys.version_info[:2]),
choices=SUPPORTED_PYTHON_VERSIONS,
help='run from a tox virtualenv: %s' % ', '.join(SUPPORTED_PYTHON_VERSIONS))
else:
environments.add_argument('--tox',
action='store_true',
help='run from a tox virtualenv')
if tox_only:
environments.set_defaults(
docker=None,
remote=None,
remote_stage=None,
)
return
environments.add_argument('--docker',
metavar='IMAGE',
nargs='?',
default=None,
const='ubuntu1604',
help='run from a docker container')
environments.add_argument('--remote',
metavar='PLATFORM',
default=None,
help='run from a remote instance')
remote = parser.add_argument_group(title='remote arguments')
remote.add_argument('--remote-stage',
metavar='STAGE',
help='remote stage to use: %(choices)s',
choices=['prod', 'dev'],
default='prod')
def add_extra_docker_options(parser):
"""
:type parser: argparse.ArgumentParser
"""
docker = parser.add_argument_group(title='docker arguments')
docker.add_argument('--docker-util',
metavar='IMAGE',
default='httptester',
help='docker utility image to provide test services')
docker.add_argument('--docker-privileged',
action='store_true',
help='run docker container in privileged mode')
def complete_target(prefix, parsed_args, **_):
"""
:type prefix: unicode
:type parsed_args: any
:rtype: list[str]
"""
return find_target_completion(parsed_args.targets, prefix)
if __name__ == '__main__':
main()

9
test/runner/tox.ini Normal file
View file

@ -0,0 +1,9 @@
[tox]
skipsdist = True
minversion = 2.5.0
[testenv]
changedir = {toxinidir}/../../
commands = {posargs}
passenv = HOME
args_are_paths = False