mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-07-22 12:50:22 -07:00
Initial ansible-test implementation. (#18556)
This commit is contained in:
parent
d95eac16eb
commit
6bbd92e422
191 changed files with 5483 additions and 48 deletions
0
test/runner/lib/__init__.py
Normal file
0
test/runner/lib/__init__.py
Normal file
34
test/runner/lib/ansible_util.py
Normal file
34
test/runner/lib/ansible_util.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
"""Miscellaneous utility functions and classes specific to ansible cli tools."""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
|
||||
from lib.util import common_environment
|
||||
|
||||
|
||||
def ansible_environment(args):
|
||||
"""
|
||||
:type args: CommonConfig
|
||||
:rtype: dict[str, str]
|
||||
"""
|
||||
env = common_environment()
|
||||
path = env['PATH']
|
||||
|
||||
ansible_path = os.path.join(os.getcwd(), 'bin')
|
||||
|
||||
if not path.startswith(ansible_path + os.pathsep):
|
||||
path = ansible_path + os.pathsep + path
|
||||
|
||||
ansible = dict(
|
||||
ANSIBLE_FORCE_COLOR='%s' % 'true' if args.color else 'false',
|
||||
ANSIBLE_DEPRECATION_WARNINGS='false',
|
||||
ANSIBLE_CONFIG='/dev/null',
|
||||
PYTHONPATH=os.path.abspath('lib'),
|
||||
PAGER='/bin/cat',
|
||||
PATH=path,
|
||||
)
|
||||
|
||||
env.update(ansible)
|
||||
|
||||
return env
|
165
test/runner/lib/changes.py
Normal file
165
test/runner/lib/changes.py
Normal file
|
@ -0,0 +1,165 @@
|
|||
"""Detect changes in Ansible code."""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import re
|
||||
import os
|
||||
|
||||
from lib.util import (
|
||||
ApplicationError,
|
||||
SubprocessError,
|
||||
MissingEnvironmentVariable,
|
||||
CommonConfig,
|
||||
)
|
||||
|
||||
from lib.http import (
|
||||
HttpClient,
|
||||
urlencode,
|
||||
)
|
||||
|
||||
from lib.git import (
|
||||
Git,
|
||||
)
|
||||
|
||||
|
||||
class InvalidBranch(ApplicationError):
|
||||
"""Exception for invalid branch specification."""
|
||||
def __init__(self, branch, reason):
|
||||
"""
|
||||
:type branch: str
|
||||
:type reason: str
|
||||
"""
|
||||
message = 'Invalid branch: %s\n%s' % (branch, reason)
|
||||
|
||||
super(InvalidBranch, self).__init__(message)
|
||||
|
||||
self.branch = branch
|
||||
|
||||
|
||||
class ChangeDetectionNotSupported(ApplicationError):
|
||||
"""Exception for cases where change detection is not supported."""
|
||||
def __init__(self, message):
|
||||
"""
|
||||
:type message: str
|
||||
"""
|
||||
super(ChangeDetectionNotSupported, self).__init__(message)
|
||||
|
||||
|
||||
class ShippableChanges(object):
|
||||
"""Change information for Shippable build."""
|
||||
def __init__(self, args, git):
|
||||
"""
|
||||
:type args: CommonConfig
|
||||
:type git: Git
|
||||
"""
|
||||
self.args = args
|
||||
|
||||
try:
|
||||
self.branch = os.environ['BRANCH']
|
||||
self.is_pr = os.environ['IS_PULL_REQUEST'] == 'true'
|
||||
self.is_tag = os.environ['IS_GIT_TAG'] == 'true'
|
||||
self.commit = os.environ['COMMIT']
|
||||
self.project_id = os.environ['PROJECT_ID']
|
||||
except KeyError as ex:
|
||||
raise MissingEnvironmentVariable(name=ex.args[0])
|
||||
|
||||
if self.is_tag:
|
||||
raise ChangeDetectionNotSupported('Change detection is not supported for tags.')
|
||||
|
||||
if self.is_pr:
|
||||
self.paths = sorted(git.get_diff_names([self.branch]))
|
||||
else:
|
||||
merge_runs = self.get_merge_runs(self.project_id, self.branch)
|
||||
last_successful_commit = self.get_last_successful_commit(merge_runs)
|
||||
self.paths = sorted(git.get_diff_names([last_successful_commit, self.commit]))
|
||||
|
||||
def get_merge_runs(self, project_id, branch):
|
||||
"""
|
||||
:type project_id: str
|
||||
:type branch: str
|
||||
:rtype: list[dict]
|
||||
"""
|
||||
params = dict(
|
||||
isPullRequest='false',
|
||||
projectIds=project_id,
|
||||
branch=branch,
|
||||
)
|
||||
|
||||
client = HttpClient(self.args, always=True)
|
||||
response = client.get('https://api.shippable.com/runs?%s' % urlencode(params))
|
||||
return response.json()
|
||||
|
||||
@staticmethod
|
||||
def get_last_successful_commit(merge_runs):
|
||||
"""
|
||||
:type merge_runs: list[dict]
|
||||
:rtype: str
|
||||
"""
|
||||
merge_runs = sorted(merge_runs, key=lambda r: r['createdAt'])
|
||||
known_commits = set()
|
||||
last_successful_commit = None
|
||||
|
||||
for merge_run in merge_runs:
|
||||
commit_sha = merge_run['commitSha']
|
||||
if commit_sha not in known_commits:
|
||||
known_commits.add(commit_sha)
|
||||
if merge_run['statusCode'] == 30:
|
||||
last_successful_commit = commit_sha
|
||||
|
||||
return last_successful_commit
|
||||
|
||||
|
||||
class LocalChanges(object):
|
||||
"""Change information for local work."""
|
||||
def __init__(self, args, git):
|
||||
"""
|
||||
:type args: CommonConfig
|
||||
:type git: Git
|
||||
"""
|
||||
self.args = args
|
||||
self.current_branch = git.get_branch()
|
||||
|
||||
if self.is_official_branch(self.current_branch):
|
||||
raise InvalidBranch(branch=self.current_branch,
|
||||
reason='Current branch is not a feature branch.')
|
||||
|
||||
self.fork_branch = None
|
||||
self.fork_point = None
|
||||
|
||||
self.local_branches = sorted(git.get_branches())
|
||||
self.official_branches = sorted([b for b in self.local_branches if self.is_official_branch(b)])
|
||||
|
||||
for self.fork_branch in self.official_branches:
|
||||
try:
|
||||
self.fork_point = git.get_branch_fork_point(self.fork_branch)
|
||||
break
|
||||
except SubprocessError:
|
||||
pass
|
||||
|
||||
if self.fork_point is None:
|
||||
raise ApplicationError('Unable to auto-detect fork branch and fork point.')
|
||||
|
||||
# tracked files (including unchanged)
|
||||
self.tracked = sorted(git.get_file_names(['--cached']))
|
||||
# untracked files (except ignored)
|
||||
self.untracked = sorted(git.get_file_names(['--others', '--exclude-standard']))
|
||||
# tracked changes (including deletions) committed since the branch was forked
|
||||
self.committed = sorted(git.get_diff_names([self.fork_point, 'HEAD']))
|
||||
# tracked changes (including deletions) which are staged
|
||||
self.staged = sorted(git.get_diff_names(['--cached']))
|
||||
# tracked changes (including deletions) which are not staged
|
||||
self.unstaged = sorted(git.get_diff_names([]))
|
||||
|
||||
@staticmethod
|
||||
def is_official_branch(name):
|
||||
"""
|
||||
:type name: str
|
||||
:rtype: bool
|
||||
"""
|
||||
if name == 'devel':
|
||||
return True
|
||||
|
||||
if re.match(r'^stable-[0-9]+\.[0-9]+$', name):
|
||||
return True
|
||||
|
||||
return False
|
326
test/runner/lib/classification.py
Normal file
326
test/runner/lib/classification.py
Normal file
|
@ -0,0 +1,326 @@
|
|||
"""Classify changes in Ansible code."""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
|
||||
from lib.target import (
|
||||
walk_module_targets,
|
||||
walk_integration_targets,
|
||||
walk_units_targets,
|
||||
walk_compile_targets,
|
||||
)
|
||||
|
||||
from lib.util import (
|
||||
display,
|
||||
)
|
||||
|
||||
|
||||
def categorize_changes(paths, verbose_command=None):
|
||||
"""
|
||||
:type paths: list[str]
|
||||
:type verbose_command: str
|
||||
:rtype paths: dict[str, list[str]]
|
||||
"""
|
||||
mapper = PathMapper()
|
||||
|
||||
commands = {
|
||||
'sanity': set(),
|
||||
'compile': set(),
|
||||
'units': set(),
|
||||
'integration': set(),
|
||||
'windows-integration': set(),
|
||||
'network-integration': set(),
|
||||
}
|
||||
|
||||
display.info('Mapping %d changed file(s) to tests.' % len(paths))
|
||||
|
||||
for path in paths:
|
||||
tests = mapper.classify(path)
|
||||
|
||||
if tests is None:
|
||||
display.info('%s -> all' % path, verbosity=1)
|
||||
tests = all_tests() # not categorized, run all tests
|
||||
display.warning('Path not categorized: %s' % path)
|
||||
else:
|
||||
tests = dict((key, value) for key, value in tests.items() if value)
|
||||
|
||||
if verbose_command:
|
||||
result = '%s: %s' % (verbose_command, tests.get(verbose_command) or 'none')
|
||||
|
||||
# identify targeted integration tests (those which only target a single integration command)
|
||||
if 'integration' in verbose_command and tests.get(verbose_command):
|
||||
if not any('integration' in command for command in tests.keys() if command != verbose_command):
|
||||
result += ' (targeted)'
|
||||
else:
|
||||
result = '%s' % tests
|
||||
|
||||
display.info('%s -> %s' % (path, result), verbosity=1)
|
||||
|
||||
for command, target in tests.items():
|
||||
commands[command].add(target)
|
||||
|
||||
for command in commands:
|
||||
if any(t == 'all' for t in commands[command]):
|
||||
commands[command] = set(['all'])
|
||||
|
||||
commands = dict((c, sorted(commands[c])) for c in commands.keys() if commands[c])
|
||||
|
||||
return commands
|
||||
|
||||
|
||||
class PathMapper(object):
|
||||
"""Map file paths to test commands and targets."""
|
||||
def __init__(self):
|
||||
self.integration_targets = list(walk_integration_targets())
|
||||
self.module_targets = list(walk_module_targets())
|
||||
self.compile_targets = list(walk_compile_targets())
|
||||
self.units_targets = list(walk_units_targets())
|
||||
|
||||
self.compile_paths = set(t.path for t in self.compile_targets)
|
||||
self.units_modules = set(t.module for t in self.units_targets if t.module)
|
||||
self.units_paths = set(t.path for t in self.units_targets)
|
||||
|
||||
self.module_names_by_path = dict((t.path, t.module) for t in self.module_targets)
|
||||
self.integration_targets_by_name = dict((t.name, t) for t in self.integration_targets)
|
||||
|
||||
self.posix_integration_by_module = dict((m, t.name) for t in self.integration_targets
|
||||
if 'posix/' in t.aliases for m in t.modules)
|
||||
self.windows_integration_by_module = dict((m, t.name) for t in self.integration_targets
|
||||
if 'windows/' in t.aliases for m in t.modules)
|
||||
self.network_integration_by_module = dict((m, t.name) for t in self.integration_targets
|
||||
if 'network/' in t.aliases for m in t.modules)
|
||||
|
||||
def classify(self, path):
|
||||
"""
|
||||
:type path: str
|
||||
:rtype: dict[str, str] | None
|
||||
"""
|
||||
result = self._classify(path)
|
||||
|
||||
# run all tests when no result given
|
||||
if result is None:
|
||||
return None
|
||||
|
||||
# compile path if eligible
|
||||
if path in self.compile_paths:
|
||||
result['compile'] = path
|
||||
|
||||
# run sanity on path unless result specified otherwise
|
||||
if 'sanity' not in result:
|
||||
result['sanity'] = path
|
||||
|
||||
return result
|
||||
|
||||
def _classify(self, path):
|
||||
"""
|
||||
:type path: str
|
||||
:rtype: dict[str, str] | None
|
||||
"""
|
||||
filename = os.path.basename(path)
|
||||
name, ext = os.path.splitext(filename)
|
||||
|
||||
minimal = {}
|
||||
|
||||
if path.startswith('.github/'):
|
||||
return minimal
|
||||
|
||||
if path.startswith('bin/'):
|
||||
return minimal
|
||||
|
||||
if path.startswith('contrib/'):
|
||||
return {
|
||||
'units': 'test/units/contrib/'
|
||||
}
|
||||
|
||||
if path.startswith('docs/'):
|
||||
return minimal
|
||||
|
||||
if path.startswith('docs-api/'):
|
||||
return minimal
|
||||
|
||||
if path.startswith('docsite/'):
|
||||
return minimal
|
||||
|
||||
if path.startswith('examples/'):
|
||||
return minimal
|
||||
|
||||
if path.startswith('hacking/'):
|
||||
return minimal
|
||||
|
||||
if path.startswith('lib/ansible/modules/'):
|
||||
module = self.module_names_by_path.get(path)
|
||||
|
||||
if module:
|
||||
return {
|
||||
'units': module if module in self.units_modules else None,
|
||||
'integration': self.posix_integration_by_module.get(module) if ext == '.py' else None,
|
||||
'windows-integration': self.windows_integration_by_module.get(module) if ext == '.ps1' else None,
|
||||
'network-integration': self.network_integration_by_module.get(module),
|
||||
}
|
||||
|
||||
return minimal
|
||||
|
||||
if path.startswith('lib/ansible/module_utils/'):
|
||||
if ext == '.ps1':
|
||||
return {
|
||||
'windows-integration': 'all',
|
||||
}
|
||||
|
||||
if ext == '.py':
|
||||
return {
|
||||
'integration': 'all',
|
||||
'network-integration': 'all',
|
||||
'units': 'all',
|
||||
}
|
||||
|
||||
if path.startswith('lib/ansible/plugins/connection/'):
|
||||
if name == '__init__':
|
||||
return {
|
||||
'integration': 'all',
|
||||
'windows-integration': 'all',
|
||||
'network-integration': 'all',
|
||||
'units': 'test/units/plugins/connection/',
|
||||
}
|
||||
|
||||
if name == 'winrm':
|
||||
return {
|
||||
'windows-integration': 'all',
|
||||
'units': 'test/units/plugins/connection/',
|
||||
}
|
||||
|
||||
if name == 'local':
|
||||
return {
|
||||
'integration': 'all',
|
||||
'network-integration': 'all',
|
||||
'units': 'test/units/plugins/connections/',
|
||||
}
|
||||
|
||||
if 'connection_%s' % name in self.integration_targets_by_name:
|
||||
return {
|
||||
'integration': 'connection_%s' % name,
|
||||
}
|
||||
|
||||
return minimal
|
||||
|
||||
if path.startswith('lib/ansible/utils/module_docs_fragments/'):
|
||||
return {
|
||||
'sanity': 'all',
|
||||
}
|
||||
|
||||
if path.startswith('lib/ansible/'):
|
||||
return all_tests() # broad impact, run all tests
|
||||
|
||||
if path.startswith('packaging/'):
|
||||
return minimal
|
||||
|
||||
if path.startswith('test/compile/'):
|
||||
return {
|
||||
'compile': 'all',
|
||||
}
|
||||
|
||||
if path.startswith('test/results/'):
|
||||
return minimal
|
||||
|
||||
if path.startswith('test/integration/roles/'):
|
||||
return minimal
|
||||
|
||||
if path.startswith('test/integration/targets/'):
|
||||
target = self.integration_targets_by_name[path.split('/')[3]]
|
||||
|
||||
if 'hidden/' in target.aliases:
|
||||
return {
|
||||
'integration': 'all',
|
||||
'windows-integration': 'all',
|
||||
'network-integration': 'all',
|
||||
}
|
||||
|
||||
return {
|
||||
'integration': target.name if 'posix/' in target.aliases else None,
|
||||
'windows-integration': target.name if 'windows/' in target.aliases else None,
|
||||
'network-integration': target.name if 'network/' in target.aliases else None,
|
||||
}
|
||||
|
||||
if path.startswith('test/integration/'):
|
||||
return {
|
||||
'integration': 'all',
|
||||
'windows-integration': 'all',
|
||||
'network-integration': 'all',
|
||||
}
|
||||
|
||||
if path.startswith('test/samples/'):
|
||||
return minimal
|
||||
|
||||
if path.startswith('test/sanity/'):
|
||||
return {
|
||||
'sanity': 'all', # test infrastructure, run all sanity checks
|
||||
}
|
||||
|
||||
if path.startswith('test/units/'):
|
||||
if path in self.units_paths:
|
||||
return {
|
||||
'units': path,
|
||||
}
|
||||
|
||||
return {
|
||||
'units': os.path.dirname(path),
|
||||
}
|
||||
|
||||
if path.startswith('test/runner/'):
|
||||
return all_tests() # test infrastructure, run all tests
|
||||
|
||||
if path.startswith('test/utils/shippable/'):
|
||||
return all_tests() # test infrastructure, run all tests
|
||||
|
||||
if path.startswith('test/utils/'):
|
||||
return minimal
|
||||
|
||||
if path == 'test/README.md':
|
||||
return minimal
|
||||
|
||||
if path.startswith('ticket_stubs/'):
|
||||
return minimal
|
||||
|
||||
if '/' not in path:
|
||||
if path in (
|
||||
'.gitattributes',
|
||||
'.gitignore',
|
||||
'.gitmodules',
|
||||
'.mailmap',
|
||||
'tox.ini', # obsolete
|
||||
'COPYING',
|
||||
'VERSION',
|
||||
'Makefile',
|
||||
'setup.py',
|
||||
):
|
||||
return minimal
|
||||
|
||||
if path in (
|
||||
'shippable.yml',
|
||||
'.coveragerc',
|
||||
):
|
||||
return all_tests() # test infrastructure, run all tests
|
||||
|
||||
if path == '.yamllint':
|
||||
return {
|
||||
'sanity': 'all',
|
||||
}
|
||||
|
||||
if ext in ('.md', '.rst', '.txt', '.xml', '.in'):
|
||||
return minimal
|
||||
|
||||
return None # unknown, will result in fall-back to run all tests
|
||||
|
||||
|
||||
def all_tests():
|
||||
"""
|
||||
:rtype: dict[str, str]
|
||||
"""
|
||||
return {
|
||||
'sanity': 'all',
|
||||
'compile': 'all',
|
||||
'units': 'all',
|
||||
'integration': 'all',
|
||||
'windows-integration': 'all',
|
||||
'network-integration': 'all',
|
||||
}
|
340
test/runner/lib/core_ci.py
Normal file
340
test/runner/lib/core_ci.py
Normal file
|
@ -0,0 +1,340 @@
|
|||
"""Access Ansible Core CI remote services."""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import json
|
||||
import os
|
||||
import traceback
|
||||
import uuid
|
||||
import errno
|
||||
import time
|
||||
|
||||
from lib.http import (
|
||||
HttpClient,
|
||||
HttpResponse,
|
||||
HttpError,
|
||||
)
|
||||
|
||||
from lib.util import (
|
||||
ApplicationError,
|
||||
run_command,
|
||||
make_dirs,
|
||||
CommonConfig,
|
||||
display,
|
||||
is_shippable,
|
||||
)
|
||||
|
||||
|
||||
class AnsibleCoreCI(object):
|
||||
"""Client for Ansible Core CI services."""
|
||||
def __init__(self, args, platform, version, stage='prod', persist=True, name=None):
|
||||
"""
|
||||
:type args: CommonConfig
|
||||
:type platform: str
|
||||
:type version: str
|
||||
:type stage: str
|
||||
:type persist: bool
|
||||
:type name: str
|
||||
"""
|
||||
self.args = args
|
||||
self.platform = platform
|
||||
self.version = version
|
||||
self.stage = stage
|
||||
self.client = HttpClient(args)
|
||||
self.connection = None
|
||||
self.instance_id = None
|
||||
self.name = name if name else '%s-%s' % (self.platform, self.version)
|
||||
|
||||
if self.platform == 'windows':
|
||||
self.ssh_key = None
|
||||
self.endpoint = 'https://14blg63h2i.execute-api.us-east-1.amazonaws.com'
|
||||
self.port = 5986
|
||||
elif self.platform == 'freebsd':
|
||||
self.ssh_key = SshKey(args)
|
||||
self.endpoint = 'https://14blg63h2i.execute-api.us-east-1.amazonaws.com'
|
||||
self.port = 22
|
||||
elif self.platform == 'osx':
|
||||
self.ssh_key = SshKey(args)
|
||||
self.endpoint = 'https://osx.testing.ansible.com'
|
||||
self.port = None
|
||||
else:
|
||||
raise ApplicationError('Unsupported platform: %s' % platform)
|
||||
|
||||
self.path = os.path.expanduser('~/.ansible/test/instances/%s-%s' % (self.name, self.stage))
|
||||
|
||||
if persist and self._load():
|
||||
try:
|
||||
display.info('Checking existing %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
||||
verbosity=1)
|
||||
|
||||
self.connection = self.get()
|
||||
|
||||
display.info('Loaded existing %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
||||
verbosity=1)
|
||||
except HttpError as ex:
|
||||
if ex.status != 404:
|
||||
raise
|
||||
|
||||
self._clear()
|
||||
|
||||
display.info('Cleared stale %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
||||
verbosity=1)
|
||||
|
||||
self.instance_id = None
|
||||
else:
|
||||
self.instance_id = None
|
||||
self._clear()
|
||||
|
||||
if self.instance_id:
|
||||
self.started = True
|
||||
else:
|
||||
self.started = False
|
||||
self.instance_id = str(uuid.uuid4())
|
||||
|
||||
display.info('Initializing new %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
||||
verbosity=1)
|
||||
|
||||
def start(self):
|
||||
"""Start instance."""
|
||||
if is_shippable():
|
||||
self.start_shippable()
|
||||
else:
|
||||
self.start_remote()
|
||||
|
||||
def start_remote(self):
|
||||
"""Start instance for remote development/testing."""
|
||||
with open(os.path.expanduser('~/.ansible-core-ci.key'), 'r') as key_fd:
|
||||
auth_key = key_fd.read().strip()
|
||||
|
||||
self._start(dict(
|
||||
remote=dict(
|
||||
key=auth_key,
|
||||
nonce=None,
|
||||
),
|
||||
))
|
||||
|
||||
def start_shippable(self):
|
||||
"""Start instance on Shippable."""
|
||||
self._start(dict(
|
||||
shippable=dict(
|
||||
run_id=os.environ['SHIPPABLE_BUILD_ID'],
|
||||
job_number=int(os.environ['SHIPPABLE_JOB_NUMBER']),
|
||||
),
|
||||
))
|
||||
|
||||
def stop(self):
|
||||
"""Stop instance."""
|
||||
if not self.started:
|
||||
display.info('Skipping invalid %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
||||
verbosity=1)
|
||||
return
|
||||
|
||||
response = self.client.delete(self._uri)
|
||||
|
||||
if response.status_code == 404:
|
||||
self._clear()
|
||||
display.info('Cleared invalid %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
||||
verbosity=1)
|
||||
return
|
||||
|
||||
if response.status_code == 200:
|
||||
self._clear()
|
||||
display.info('Stopped running %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
||||
verbosity=1)
|
||||
return
|
||||
|
||||
raise self._create_http_error(response)
|
||||
|
||||
def get(self):
|
||||
"""
|
||||
Get instance connection information.
|
||||
:rtype: InstanceConnection
|
||||
"""
|
||||
if not self.started:
|
||||
display.info('Skipping invalid %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
||||
verbosity=1)
|
||||
return None
|
||||
|
||||
if self.connection and self.connection.running:
|
||||
return self.connection
|
||||
|
||||
response = self.client.get(self._uri)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise self._create_http_error(response)
|
||||
|
||||
if self.args.explain:
|
||||
self.connection = InstanceConnection(
|
||||
running=True,
|
||||
hostname='cloud.example.com',
|
||||
port=self.port or 12345,
|
||||
username='username',
|
||||
password='password' if self.platform == 'windows' else None,
|
||||
)
|
||||
else:
|
||||
response_json = response.json()
|
||||
|
||||
status = response_json['status']
|
||||
con = response_json['connection']
|
||||
|
||||
self.connection = InstanceConnection(
|
||||
running=status == 'running',
|
||||
hostname=con['hostname'],
|
||||
port=int(con.get('port', self.port)),
|
||||
username=con['username'],
|
||||
password=con.get('password'),
|
||||
)
|
||||
|
||||
status = 'running' if self.connection.running else 'starting'
|
||||
|
||||
display.info('Retrieved %s %s/%s instance %s.' % (status, self.platform, self.version, self.instance_id),
|
||||
verbosity=1)
|
||||
|
||||
return self.connection
|
||||
|
||||
def wait(self):
|
||||
"""Wait for the instance to become ready."""
|
||||
for _ in range(1, 90):
|
||||
if self.get().running:
|
||||
return
|
||||
time.sleep(10)
|
||||
|
||||
raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
|
||||
(self.platform, self.version, self.instance_id))
|
||||
|
||||
@property
|
||||
def _uri(self):
|
||||
return '%s/%s/jobs/%s' % (self.endpoint, self.stage, self.instance_id)
|
||||
|
||||
def _start(self, auth):
|
||||
"""Start instance."""
|
||||
if self.started:
|
||||
display.info('Skipping started %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
||||
verbosity=1)
|
||||
return
|
||||
|
||||
data = dict(
|
||||
config=dict(
|
||||
platform=self.platform,
|
||||
version=self.version,
|
||||
public_key=self.ssh_key.pub_contents if self.ssh_key else None,
|
||||
query=False,
|
||||
)
|
||||
)
|
||||
|
||||
data.update(dict(auth=auth))
|
||||
|
||||
headers = {
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
|
||||
response = self.client.put(self._uri, data=json.dumps(data), headers=headers)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise self._create_http_error(response)
|
||||
|
||||
self.started = True
|
||||
self._save()
|
||||
|
||||
display.info('Started %s/%s instance %s.' % (self.platform, self.version, self.instance_id),
|
||||
verbosity=1)
|
||||
|
||||
def _clear(self):
|
||||
"""Clear instance information."""
|
||||
try:
|
||||
self.connection = None
|
||||
os.remove(self.path)
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
def _load(self):
|
||||
"""Load instance information."""
|
||||
try:
|
||||
with open(self.path, 'r') as instance_fd:
|
||||
self.instance_id = instance_fd.read()
|
||||
self.started = True
|
||||
except IOError as ex:
|
||||
if ex.errno != errno.ENOENT:
|
||||
raise
|
||||
self.instance_id = None
|
||||
|
||||
return self.instance_id
|
||||
|
||||
def _save(self):
|
||||
"""Save instance information."""
|
||||
if self.args.explain:
|
||||
return
|
||||
|
||||
make_dirs(os.path.dirname(self.path))
|
||||
|
||||
with open(self.path, 'w') as instance_fd:
|
||||
instance_fd.write(self.instance_id)
|
||||
|
||||
@staticmethod
|
||||
def _create_http_error(response):
|
||||
"""
|
||||
:type response: HttpResponse
|
||||
:rtype: ApplicationError
|
||||
"""
|
||||
response_json = response.json()
|
||||
stack_trace = ''
|
||||
|
||||
if 'message' in response_json:
|
||||
message = response_json['message']
|
||||
elif 'errorMessage' in response_json:
|
||||
message = response_json['errorMessage'].strip()
|
||||
if 'stackTrace' in response_json:
|
||||
trace = '\n'.join([x.rstrip() for x in traceback.format_list(response_json['stackTrace'])])
|
||||
stack_trace = ('\nTraceback (from remote server):\n%s' % trace)
|
||||
else:
|
||||
message = str(response_json)
|
||||
|
||||
return HttpError(response.status_code, '%s%s' % (message, stack_trace))
|
||||
|
||||
|
||||
class SshKey(object):
|
||||
"""Container for SSH key used to connect to remote instances."""
|
||||
def __init__(self, args):
|
||||
"""
|
||||
:type args: CommonConfig
|
||||
"""
|
||||
tmp = os.path.expanduser('~/.ansible/test/')
|
||||
|
||||
self.key = os.path.join(tmp, 'id_rsa')
|
||||
self.pub = os.path.join(tmp, 'id_rsa.pub')
|
||||
|
||||
if not os.path.isfile(self.pub):
|
||||
if not args.explain:
|
||||
make_dirs(tmp)
|
||||
|
||||
run_command(args, ['ssh-keygen', '-q', '-t', 'rsa', '-N', '', '-f', self.key])
|
||||
|
||||
if args.explain:
|
||||
self.pub_contents = None
|
||||
else:
|
||||
with open(self.pub, 'r') as pub_fd:
|
||||
self.pub_contents = pub_fd.read().strip()
|
||||
|
||||
|
||||
class InstanceConnection(object):
|
||||
"""Container for remote instance status and connection details."""
|
||||
def __init__(self, running, hostname, port, username, password):
|
||||
"""
|
||||
:type running: bool
|
||||
:type hostname: str
|
||||
:type port: int
|
||||
:type username: str
|
||||
:type password: str | None
|
||||
"""
|
||||
self.running = running
|
||||
self.hostname = hostname
|
||||
self.port = port
|
||||
self.username = username
|
||||
self.password = password
|
||||
|
||||
def __str__(self):
|
||||
if self.password:
|
||||
return '%s:%s [%s:%s]' % (self.hostname, self.port, self.username, self.password)
|
||||
|
||||
return '%s:%s [%s]' % (self.hostname, self.port, self.username)
|
148
test/runner/lib/cover.py
Normal file
148
test/runner/lib/cover.py
Normal file
|
@ -0,0 +1,148 @@
|
|||
"""Code coverage utilities."""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
|
||||
from lib.target import walk_module_targets
|
||||
from lib.util import display, ApplicationError, run_command
|
||||
from lib.executor import EnvironmentConfig, Delegate, install_command_requirements
|
||||
|
||||
COVERAGE_DIR = 'test/results/coverage'
|
||||
COVERAGE_FILE = os.path.join(COVERAGE_DIR, 'coverage')
|
||||
|
||||
|
||||
def command_coverage_combine(args):
|
||||
"""Patch paths in coverage files and merge into a single file.
|
||||
:type args: CoverageConfig
|
||||
"""
|
||||
coverage = initialize_coverage(args)
|
||||
|
||||
modules = dict((t.module, t.path) for t in list(walk_module_targets()))
|
||||
|
||||
coverage_files = [os.path.join(COVERAGE_DIR, f) for f in os.listdir(COVERAGE_DIR)
|
||||
if f.startswith('coverage') and f != 'coverage']
|
||||
|
||||
arc_data = {}
|
||||
|
||||
ansible_path = os.path.abspath('lib/ansible/') + '/'
|
||||
root_path = os.getcwd() + '/'
|
||||
|
||||
for coverage_file in coverage_files:
|
||||
original = coverage.CoverageData()
|
||||
|
||||
if os.path.getsize(coverage_file) == 0:
|
||||
display.warning('Empty coverage file: %s' % coverage_file)
|
||||
continue
|
||||
|
||||
try:
|
||||
original.read_file(coverage_file)
|
||||
except Exception as ex: # pylint: disable=locally-disabled, broad-except
|
||||
display.error(str(ex))
|
||||
continue
|
||||
|
||||
for filename in original.measured_files():
|
||||
arcs = original.arcs(filename)
|
||||
|
||||
if '/ansible_modlib.zip/ansible/' in filename:
|
||||
new_name = re.sub('^.*/ansible_modlib.zip/ansible/', ansible_path, filename)
|
||||
display.info('%s -> %s' % (filename, new_name), verbosity=3)
|
||||
filename = new_name
|
||||
elif '/ansible_module_' in filename:
|
||||
module = re.sub('^.*/ansible_module_(?P<module>.*).py$', '\\g<module>', filename)
|
||||
new_name = os.path.abspath(modules[module])
|
||||
display.info('%s -> %s' % (filename, new_name), verbosity=3)
|
||||
filename = new_name
|
||||
elif filename.startswith('/root/ansible/'):
|
||||
new_name = re.sub('^/.*?/ansible/', root_path, filename)
|
||||
display.info('%s -> %s' % (filename, new_name), verbosity=3)
|
||||
filename = new_name
|
||||
|
||||
if filename not in arc_data:
|
||||
arc_data[filename] = []
|
||||
|
||||
arc_data[filename] += arcs
|
||||
|
||||
updated = coverage.CoverageData()
|
||||
|
||||
for filename in arc_data:
|
||||
if not os.path.isfile(filename):
|
||||
display.warning('Invalid coverage path: %s' % filename)
|
||||
continue
|
||||
|
||||
updated.add_arcs({filename: arc_data[filename]})
|
||||
|
||||
if not args.explain:
|
||||
updated.write_file(COVERAGE_FILE)
|
||||
|
||||
|
||||
def command_coverage_report(args):
|
||||
"""
|
||||
:type args: CoverageConfig
|
||||
"""
|
||||
command_coverage_combine(args)
|
||||
run_command(args, ['coverage', 'report'])
|
||||
|
||||
|
||||
def command_coverage_html(args):
|
||||
"""
|
||||
:type args: CoverageConfig
|
||||
"""
|
||||
command_coverage_combine(args)
|
||||
run_command(args, ['coverage', 'html', '-d', 'test/results/reports/coverage'])
|
||||
|
||||
|
||||
def command_coverage_xml(args):
|
||||
"""
|
||||
:type args: CoverageConfig
|
||||
"""
|
||||
command_coverage_combine(args)
|
||||
run_command(args, ['coverage', 'xml', '-o', 'test/results/reports/coverage.xml'])
|
||||
|
||||
|
||||
def command_coverage_erase(args):
|
||||
"""
|
||||
:type args: CoverageConfig
|
||||
"""
|
||||
initialize_coverage(args)
|
||||
|
||||
for name in os.listdir(COVERAGE_DIR):
|
||||
if not name.startswith('coverage'):
|
||||
continue
|
||||
|
||||
path = os.path.join(COVERAGE_DIR, name)
|
||||
|
||||
if not args.explain:
|
||||
os.remove(path)
|
||||
|
||||
|
||||
def initialize_coverage(args):
|
||||
"""
|
||||
:type args: CoverageConfig
|
||||
:rtype: coverage
|
||||
"""
|
||||
if args.delegate:
|
||||
raise Delegate()
|
||||
|
||||
if args.requirements:
|
||||
install_command_requirements(args)
|
||||
|
||||
try:
|
||||
import coverage
|
||||
except ImportError:
|
||||
coverage = None
|
||||
|
||||
if not coverage:
|
||||
raise ApplicationError('You must install the "coverage" python module to use this command.')
|
||||
|
||||
return coverage
|
||||
|
||||
|
||||
class CoverageConfig(EnvironmentConfig):
|
||||
"""Configuration for the coverage command."""
|
||||
def __init__(self, args):
|
||||
"""
|
||||
:type args: any
|
||||
"""
|
||||
super(CoverageConfig, self).__init__(args, 'coverage')
|
331
test/runner/lib/delegation.py
Normal file
331
test/runner/lib/delegation.py
Normal file
|
@ -0,0 +1,331 @@
|
|||
"""Delegate test execution to another environment."""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import lib.pytar
|
||||
import lib.thread
|
||||
|
||||
from lib.executor import (
|
||||
SUPPORTED_PYTHON_VERSIONS,
|
||||
EnvironmentConfig,
|
||||
IntegrationConfig,
|
||||
ShellConfig,
|
||||
TestConfig,
|
||||
create_shell_command,
|
||||
)
|
||||
|
||||
from lib.core_ci import (
|
||||
AnsibleCoreCI,
|
||||
)
|
||||
|
||||
from lib.manage_ci import (
|
||||
ManagePosixCI,
|
||||
)
|
||||
|
||||
from lib.util import (
|
||||
ApplicationError,
|
||||
run_command,
|
||||
)
|
||||
|
||||
BUFFER_SIZE = 256 * 256
|
||||
|
||||
|
||||
def delegate(args, exclude, require):
|
||||
"""
|
||||
:type args: EnvironmentConfig
|
||||
:type exclude: list[str]
|
||||
:type require: list[str]
|
||||
"""
|
||||
if args.tox:
|
||||
delegate_tox(args, exclude, require)
|
||||
return True
|
||||
|
||||
if args.docker:
|
||||
delegate_docker(args, exclude, require)
|
||||
return True
|
||||
|
||||
if args.remote:
|
||||
delegate_remote(args, exclude, require)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def delegate_tox(args, exclude, require):
|
||||
"""
|
||||
:type args: EnvironmentConfig
|
||||
:type exclude: list[str]
|
||||
:type require: list[str]
|
||||
"""
|
||||
if args.python:
|
||||
versions = args.python,
|
||||
|
||||
if args.python not in SUPPORTED_PYTHON_VERSIONS:
|
||||
raise ApplicationError('tox does not support Python version %s' % args.python)
|
||||
else:
|
||||
versions = SUPPORTED_PYTHON_VERSIONS
|
||||
|
||||
options = {
|
||||
'--tox': args.tox_args,
|
||||
}
|
||||
|
||||
for version in versions:
|
||||
tox = ['tox', '-c', 'test/runner/tox.ini', '-e', 'py' + version.replace('.', ''), '--']
|
||||
cmd = generate_command(args, os.path.abspath('test/runner/test.py'), options, exclude, require)
|
||||
|
||||
if not args.python:
|
||||
cmd += ['--python', version]
|
||||
|
||||
run_command(args, tox + cmd)
|
||||
|
||||
|
||||
def delegate_docker(args, exclude, require):
|
||||
"""
|
||||
:type args: EnvironmentConfig
|
||||
:type exclude: list[str]
|
||||
:type require: list[str]
|
||||
"""
|
||||
util_image = args.docker_util
|
||||
test_image = args.docker
|
||||
privileged = args.docker_privileged
|
||||
|
||||
util_id = None
|
||||
test_id = None
|
||||
|
||||
options = {
|
||||
'--docker': 1,
|
||||
'--docker-privileged': 0,
|
||||
'--docker-util': 1,
|
||||
}
|
||||
|
||||
cmd = generate_command(args, '/root/ansible/test/runner/test.py', options, exclude, require)
|
||||
|
||||
if isinstance(args, IntegrationConfig):
|
||||
if not args.allow_destructive:
|
||||
cmd.append('--allow-destructive')
|
||||
|
||||
if not args.explain:
|
||||
lib.pytar.create_tarfile('/tmp/ansible.tgz', '.', lib.pytar.ignore)
|
||||
|
||||
try:
|
||||
if util_image:
|
||||
util_id, _ = run_command(args, [
|
||||
'docker', 'run', '--detach',
|
||||
util_image,
|
||||
], capture=True)
|
||||
|
||||
if args.explain:
|
||||
util_id = 'util_id'
|
||||
else:
|
||||
util_id = util_id.strip()
|
||||
else:
|
||||
util_id = None
|
||||
|
||||
test_cmd = [
|
||||
'docker', 'run', '--detach',
|
||||
'--volume', '/sys/fs/cgroup:/sys/fs/cgroup:ro',
|
||||
'--privileged=%s' % str(privileged).lower(),
|
||||
]
|
||||
|
||||
if util_id:
|
||||
test_cmd += [
|
||||
'--link', '%s:ansible.http.tests' % util_id,
|
||||
'--link', '%s:sni1.ansible.http.tests' % util_id,
|
||||
'--link', '%s:sni2.ansible.http.tests' % util_id,
|
||||
'--link', '%s:fail.ansible.http.tests' % util_id,
|
||||
'--env', 'HTTPTESTER=1',
|
||||
]
|
||||
|
||||
test_id, _ = run_command(args, test_cmd + [test_image], capture=True)
|
||||
|
||||
if args.explain:
|
||||
test_id = 'test_id'
|
||||
else:
|
||||
test_id = test_id.strip()
|
||||
|
||||
# write temporary files to /root since /tmp isn't ready immediately on container start
|
||||
docker_put(args, test_id, 'test/runner/setup/docker.sh', '/root/docker.sh')
|
||||
|
||||
run_command(args,
|
||||
['docker', 'exec', test_id, '/bin/bash', '/root/docker.sh'])
|
||||
|
||||
docker_put(args, test_id, '/tmp/ansible.tgz', '/root/ansible.tgz')
|
||||
|
||||
run_command(args,
|
||||
['docker', 'exec', test_id, 'mkdir', '/root/ansible'])
|
||||
|
||||
run_command(args,
|
||||
['docker', 'exec', test_id, 'tar', 'oxzf', '/root/ansible.tgz', '--directory', '/root/ansible'])
|
||||
|
||||
try:
|
||||
command = ['docker', 'exec']
|
||||
|
||||
if isinstance(args, ShellConfig):
|
||||
command.append('-it')
|
||||
|
||||
run_command(args, command + [test_id] + cmd)
|
||||
finally:
|
||||
run_command(args,
|
||||
['docker', 'exec', test_id,
|
||||
'tar', 'czf', '/root/results.tgz', '--directory', '/root/ansible/test', 'results'])
|
||||
|
||||
docker_get(args, test_id, '/root/results.tgz', '/tmp/results.tgz')
|
||||
|
||||
run_command(args,
|
||||
['tar', 'oxzf', '/tmp/results.tgz', '-C', 'test'])
|
||||
finally:
|
||||
if util_id:
|
||||
run_command(args,
|
||||
['docker', 'rm', '-f', util_id],
|
||||
capture=True)
|
||||
|
||||
if test_id:
|
||||
run_command(args,
|
||||
['docker', 'rm', '-f', test_id],
|
||||
capture=True)
|
||||
|
||||
|
||||
def docker_put(args, container_id, src, dst):
|
||||
"""
|
||||
:type args: EnvironmentConfig
|
||||
:type container_id: str
|
||||
:type src: str
|
||||
:type dst: str
|
||||
"""
|
||||
# avoid 'docker cp' due to a bug which causes 'docker rm' to fail
|
||||
cmd = ['docker', 'exec', '-i', container_id, 'dd', 'of=%s' % dst, 'bs=%s' % BUFFER_SIZE]
|
||||
|
||||
with open(src, 'rb') as src_fd:
|
||||
run_command(args, cmd, stdin=src_fd, capture=True)
|
||||
|
||||
|
||||
def docker_get(args, container_id, src, dst):
|
||||
"""
|
||||
:type args: EnvironmentConfig
|
||||
:type container_id: str
|
||||
:type src: str
|
||||
:type dst: str
|
||||
"""
|
||||
# avoid 'docker cp' due to a bug which causes 'docker rm' to fail
|
||||
cmd = ['docker', 'exec', '-i', container_id, 'dd', 'if=%s' % src, 'bs=%s' % BUFFER_SIZE]
|
||||
|
||||
with open(dst, 'wb') as dst_fd:
|
||||
run_command(args, cmd, stdout=dst_fd, capture=True)
|
||||
|
||||
|
||||
def delegate_remote(args, exclude, require):
|
||||
"""
|
||||
:type args: EnvironmentConfig
|
||||
:type exclude: list[str]
|
||||
:type require: list[str]
|
||||
"""
|
||||
parts = args.remote.split('/', 1)
|
||||
|
||||
platform = parts[0]
|
||||
version = parts[1]
|
||||
|
||||
core_ci = AnsibleCoreCI(args, platform, version, stage=args.remote_stage)
|
||||
|
||||
try:
|
||||
core_ci.start()
|
||||
core_ci.wait()
|
||||
|
||||
options = {
|
||||
'--remote': 1,
|
||||
}
|
||||
|
||||
cmd = generate_command(args, 'ansible/test/runner/test.py', options, exclude, require)
|
||||
|
||||
if isinstance(args, IntegrationConfig):
|
||||
if not args.allow_destructive:
|
||||
cmd.append('--allow-destructive')
|
||||
|
||||
manage = ManagePosixCI(core_ci)
|
||||
manage.setup()
|
||||
|
||||
try:
|
||||
manage.ssh(cmd)
|
||||
finally:
|
||||
manage.ssh('rm -rf /tmp/results && cp -a ansible/test/results /tmp/results')
|
||||
manage.download('/tmp/results', 'test')
|
||||
finally:
|
||||
pass
|
||||
|
||||
|
||||
def generate_command(args, path, options, exclude, require):
|
||||
"""
|
||||
:type args: EnvironmentConfig
|
||||
:type path: str
|
||||
:type options: dict[str, int]
|
||||
:type exclude: list[str]
|
||||
:type require: list[str]
|
||||
:return: list[str]
|
||||
"""
|
||||
options['--color'] = 1
|
||||
|
||||
cmd = [path]
|
||||
cmd += list(filter_options(args, sys.argv[1:], options, exclude, require))
|
||||
cmd += ['--color', 'yes' if args.color else 'no']
|
||||
|
||||
if args.requirements:
|
||||
cmd += ['--requirements']
|
||||
|
||||
if isinstance(args, ShellConfig):
|
||||
cmd = create_shell_command(cmd)
|
||||
|
||||
return cmd
|
||||
|
||||
|
||||
def filter_options(args, argv, options, exclude, require):
|
||||
"""
|
||||
:type args: EnvironmentConfig
|
||||
:type argv: list[str]
|
||||
:type options: dict[str, int]
|
||||
:type exclude: list[str]
|
||||
:type require: list[str]
|
||||
:rtype: collections.Iterable[str]
|
||||
"""
|
||||
options = options.copy()
|
||||
|
||||
options['--requirements'] = 0
|
||||
|
||||
if isinstance(args, TestConfig):
|
||||
options.update({
|
||||
'--changed': 0,
|
||||
'--tracked': 0,
|
||||
'--untracked': 0,
|
||||
'--ignore-committed': 0,
|
||||
'--ignore-staged': 0,
|
||||
'--ignore-unstaged': 0,
|
||||
'--changed-from': 1,
|
||||
'--changed-path': 1,
|
||||
})
|
||||
|
||||
remaining = 0
|
||||
|
||||
for arg in argv:
|
||||
if not arg.startswith('-') and remaining:
|
||||
remaining -= 1
|
||||
continue
|
||||
|
||||
remaining = 0
|
||||
|
||||
parts = arg.split('=', 1)
|
||||
key = parts[0]
|
||||
|
||||
if key in options:
|
||||
remaining = options[key] - len(parts) + 1
|
||||
continue
|
||||
|
||||
yield arg
|
||||
|
||||
for target in exclude:
|
||||
yield '--exclude'
|
||||
yield target
|
||||
|
||||
for target in require:
|
||||
yield '--require'
|
||||
yield target
|
1253
test/runner/lib/executor.py
Normal file
1253
test/runner/lib/executor.py
Normal file
File diff suppressed because it is too large
Load diff
76
test/runner/lib/git.py
Normal file
76
test/runner/lib/git.py
Normal file
|
@ -0,0 +1,76 @@
|
|||
"""Wrapper around git command-line tools."""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
from lib.util import (
|
||||
CommonConfig,
|
||||
run_command,
|
||||
)
|
||||
|
||||
|
||||
class Git(object):
|
||||
"""Wrapper around git command-line tools."""
|
||||
def __init__(self, args):
|
||||
"""
|
||||
:type args: CommonConfig
|
||||
"""
|
||||
self.args = args
|
||||
self.git = 'git'
|
||||
|
||||
def get_diff_names(self, args):
|
||||
"""
|
||||
:type args: list[str]
|
||||
:rtype: list[str]
|
||||
"""
|
||||
cmd = ['diff', '--name-only', '--no-renames', '-z'] + args
|
||||
return self.run_git_split(cmd, '\0')
|
||||
|
||||
def get_file_names(self, args):
|
||||
"""
|
||||
:type args: list[str]
|
||||
:rtype: list[str]
|
||||
"""
|
||||
cmd = ['ls-files', '-z'] + args
|
||||
return self.run_git_split(cmd, '\0')
|
||||
|
||||
def get_branches(self):
|
||||
"""
|
||||
:rtype: list[str]
|
||||
"""
|
||||
cmd = ['for-each-ref', 'refs/heads/', '--format', '%(refname:strip=2)']
|
||||
return self.run_git_split(cmd)
|
||||
|
||||
def get_branch(self):
|
||||
"""
|
||||
:rtype: str
|
||||
"""
|
||||
cmd = ['symbolic-ref', '--short', 'HEAD']
|
||||
return self.run_git(cmd).strip()
|
||||
|
||||
def get_branch_fork_point(self, branch):
|
||||
"""
|
||||
:type branch: str
|
||||
:rtype: str
|
||||
"""
|
||||
cmd = ['merge-base', '--fork-point', branch]
|
||||
return self.run_git(cmd).strip()
|
||||
|
||||
def run_git_split(self, cmd, separator=None):
|
||||
"""
|
||||
:type cmd: list[str]
|
||||
:param separator: str | None
|
||||
:rtype: list[str]
|
||||
"""
|
||||
output = self.run_git(cmd).strip(separator)
|
||||
|
||||
if len(output) == 0:
|
||||
return []
|
||||
|
||||
return output.split(separator)
|
||||
|
||||
def run_git(self, cmd):
|
||||
"""
|
||||
:type cmd: list[str]
|
||||
:rtype: str
|
||||
"""
|
||||
return run_command(self.args, [self.git] + cmd, capture=True, always=True)[0]
|
122
test/runner/lib/http.py
Normal file
122
test/runner/lib/http.py
Normal file
|
@ -0,0 +1,122 @@
|
|||
"""
|
||||
Primitive replacement for requests to avoid extra dependency.
|
||||
Avoids use of urllib2 due to lack of SNI support.
|
||||
"""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import json
|
||||
|
||||
try:
|
||||
from urllib import urlencode
|
||||
except ImportError:
|
||||
# noinspection PyCompatibility,PyUnresolvedReferences,PyUnresolvedReferences
|
||||
from urllib.parse import urlencode # pylint: disable=locally-disabled, import-error, no-name-in-module
|
||||
|
||||
from lib.util import (
|
||||
CommonConfig,
|
||||
ApplicationError,
|
||||
run_command,
|
||||
)
|
||||
|
||||
|
||||
class HttpClient(object):
|
||||
"""Make HTTP requests via curl."""
|
||||
def __init__(self, args, always=False):
|
||||
"""
|
||||
:type args: CommonConfig
|
||||
:type always: bool
|
||||
"""
|
||||
self.args = args
|
||||
self.always = always
|
||||
|
||||
def get(self, url):
|
||||
"""
|
||||
:type url: str
|
||||
:rtype: HttpResponse
|
||||
"""
|
||||
return self.request('GET', url)
|
||||
|
||||
def delete(self, url):
|
||||
"""
|
||||
:type url: str
|
||||
:rtype: HttpResponse
|
||||
"""
|
||||
return self.request('DELETE', url)
|
||||
|
||||
def put(self, url, data=None, headers=None):
|
||||
"""
|
||||
:type url: str
|
||||
:type data: str | None
|
||||
:type headers: dict[str, str] | None
|
||||
:rtype: HttpResponse
|
||||
"""
|
||||
return self.request('PUT', url, data, headers)
|
||||
|
||||
def request(self, method, url, data=None, headers=None):
|
||||
"""
|
||||
:type method: str
|
||||
:type url: str
|
||||
:type data: str | None
|
||||
:type headers: dict[str, str] | None
|
||||
:rtype: HttpResponse
|
||||
"""
|
||||
cmd = ['curl', '-s', '-S', '-i', '-X', method]
|
||||
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
headers['Expect'] = '' # don't send expect continue header
|
||||
|
||||
for header in headers.keys():
|
||||
cmd += ['-H', '%s: %s' % (header, headers[header])]
|
||||
|
||||
if data is not None:
|
||||
cmd += ['-d', data]
|
||||
|
||||
cmd += [url]
|
||||
|
||||
stdout, _ = run_command(self.args, cmd, capture=True, always=self.always)
|
||||
|
||||
if self.args.explain and not self.always:
|
||||
return HttpResponse(200, '')
|
||||
|
||||
header, body = stdout.split('\r\n\r\n', 1)
|
||||
|
||||
response_headers = header.split('\r\n')
|
||||
first_line = response_headers[0]
|
||||
http_response = first_line.split(' ')
|
||||
status_code = int(http_response[1])
|
||||
|
||||
return HttpResponse(status_code, body)
|
||||
|
||||
|
||||
class HttpResponse(object):
|
||||
"""HTTP response from curl."""
|
||||
def __init__(self, status_code, response):
|
||||
"""
|
||||
:type status_code: int
|
||||
:type response: str
|
||||
"""
|
||||
self.status_code = status_code
|
||||
self.response = response
|
||||
|
||||
def json(self):
|
||||
"""
|
||||
:rtype: any
|
||||
"""
|
||||
try:
|
||||
return json.loads(self.response)
|
||||
except ValueError:
|
||||
raise HttpError(self.status_code, 'Cannot parse response as JSON:\n%s' % self.response)
|
||||
|
||||
|
||||
class HttpError(ApplicationError):
|
||||
"""HTTP response as an error."""
|
||||
def __init__(self, status, message):
|
||||
"""
|
||||
:type status: int
|
||||
:type message: str
|
||||
"""
|
||||
super(HttpError, self).__init__('%s: %s' % (status, message))
|
||||
self.status = status
|
142
test/runner/lib/manage_ci.py
Normal file
142
test/runner/lib/manage_ci.py
Normal file
|
@ -0,0 +1,142 @@
|
|||
"""Access Ansible Core CI remote services."""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import pipes
|
||||
|
||||
from time import sleep
|
||||
|
||||
import lib.pytar
|
||||
|
||||
from lib.util import (
|
||||
SubprocessError,
|
||||
ApplicationError,
|
||||
run_command,
|
||||
)
|
||||
|
||||
from lib.core_ci import (
|
||||
AnsibleCoreCI,
|
||||
)
|
||||
|
||||
from lib.ansible_util import (
|
||||
ansible_environment,
|
||||
)
|
||||
|
||||
|
||||
class ManageWindowsCI(object):
|
||||
"""Manage access to a Windows instance provided by Ansible Core CI."""
|
||||
def __init__(self, core_ci):
|
||||
"""
|
||||
:type core_ci: AnsibleCoreCI
|
||||
"""
|
||||
self.core_ci = core_ci
|
||||
|
||||
def wait(self):
|
||||
"""Wait for instance to respond to ansible ping."""
|
||||
extra_vars = [
|
||||
'ansible_connection=winrm',
|
||||
'ansible_host=%s' % self.core_ci.connection.hostname,
|
||||
'ansible_user=%s' % self.core_ci.connection.username,
|
||||
'ansible_password=%s' % self.core_ci.connection.password,
|
||||
'ansible_port=%s' % self.core_ci.connection.port,
|
||||
'ansible_winrm_server_cert_validation=ignore',
|
||||
]
|
||||
|
||||
name = 'windows_%s' % self.core_ci.version
|
||||
|
||||
env = ansible_environment(self.core_ci.args)
|
||||
cmd = ['ansible', '-m', 'win_ping', '-i', '%s,' % name, name, '-e', ' '.join(extra_vars)]
|
||||
|
||||
for _ in range(1, 90):
|
||||
try:
|
||||
run_command(self.core_ci.args, cmd, env=env)
|
||||
return
|
||||
except SubprocessError:
|
||||
sleep(10)
|
||||
continue
|
||||
|
||||
raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
|
||||
(self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
|
||||
|
||||
|
||||
class ManagePosixCI(object):
|
||||
"""Manage access to a POSIX instance provided by Ansible Core CI."""
|
||||
def __init__(self, core_ci):
|
||||
"""
|
||||
:type core_ci: AnsibleCoreCI
|
||||
"""
|
||||
self.core_ci = core_ci
|
||||
self.ssh_args = ['-o', 'BatchMode=yes', '-o', 'StrictHostKeyChecking=no', '-i', self.core_ci.ssh_key.key]
|
||||
|
||||
if self.core_ci.platform == 'freebsd':
|
||||
self.become = ['su', '-l', 'root', '-c']
|
||||
elif self.core_ci.platform == 'osx':
|
||||
self.become = ['sudo', '-in', 'PATH=/usr/local/bin:$PATH']
|
||||
|
||||
def setup(self):
|
||||
"""Start instance and wait for it to become ready and respond to an ansible ping."""
|
||||
self.wait()
|
||||
self.configure()
|
||||
self.upload_source()
|
||||
|
||||
def wait(self):
|
||||
"""Wait for instance to respond to SSH."""
|
||||
for _ in range(1, 90):
|
||||
try:
|
||||
self.ssh('id')
|
||||
return
|
||||
except SubprocessError:
|
||||
sleep(10)
|
||||
continue
|
||||
|
||||
raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
|
||||
(self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
|
||||
|
||||
def configure(self):
|
||||
"""Configure remote host for testing."""
|
||||
self.upload('test/runner/setup/remote.sh', '/tmp')
|
||||
self.ssh('chmod +x /tmp/remote.sh && /tmp/remote.sh %s' % self.core_ci.platform)
|
||||
|
||||
def upload_source(self):
|
||||
"""Upload and extract source."""
|
||||
if not self.core_ci.args.explain:
|
||||
lib.pytar.create_tarfile('/tmp/ansible.tgz', '.', lib.pytar.ignore)
|
||||
|
||||
self.upload('/tmp/ansible.tgz', '/tmp')
|
||||
self.ssh('rm -rf ~/ansible && mkdir ~/ansible && cd ~/ansible && tar oxzf /tmp/ansible.tgz')
|
||||
|
||||
def download(self, remote, local):
|
||||
"""
|
||||
:type remote: str
|
||||
:type local: str
|
||||
"""
|
||||
self.scp('%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote), local)
|
||||
|
||||
def upload(self, local, remote):
|
||||
"""
|
||||
:type local: str
|
||||
:type remote: str
|
||||
"""
|
||||
self.scp(local, '%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote))
|
||||
|
||||
def ssh(self, command):
|
||||
"""
|
||||
:type command: str | list[str]
|
||||
"""
|
||||
if isinstance(command, list):
|
||||
command = ' '.join(pipes.quote(c) for c in command)
|
||||
|
||||
run_command(self.core_ci.args,
|
||||
['ssh', '-tt', '-q'] + self.ssh_args +
|
||||
['-p', str(self.core_ci.connection.port),
|
||||
'%s@%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname)] +
|
||||
self.become + [pipes.quote(command)])
|
||||
|
||||
def scp(self, src, dst):
|
||||
"""
|
||||
:type src: str
|
||||
:type dst: str
|
||||
"""
|
||||
run_command(self.core_ci.args,
|
||||
['scp'] + self.ssh_args +
|
||||
['-P', str(self.core_ci.connection.port), '-q', '-r', src, dst])
|
69
test/runner/lib/pytar.py
Normal file
69
test/runner/lib/pytar.py
Normal file
|
@ -0,0 +1,69 @@
|
|||
"""Python native TGZ creation."""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import tarfile
|
||||
import os
|
||||
|
||||
# improve performance by disabling uid/gid lookups
|
||||
tarfile.pwd = None
|
||||
tarfile.grp = None
|
||||
|
||||
# To reduce archive time and size, ignore non-versioned files which are large or numerous.
|
||||
# Also ignore miscellaneous git related files since the .git directory is ignored.
|
||||
|
||||
IGNORE_DIRS = (
|
||||
'.tox',
|
||||
'.git',
|
||||
'.idea',
|
||||
'__pycache__',
|
||||
'ansible.egg-info',
|
||||
)
|
||||
|
||||
IGNORE_FILES = (
|
||||
'.gitignore',
|
||||
'.gitdir',
|
||||
)
|
||||
|
||||
IGNORE_EXTENSIONS = (
|
||||
'.pyc',
|
||||
'.retry',
|
||||
)
|
||||
|
||||
|
||||
def ignore(item):
|
||||
"""
|
||||
:type item: tarfile.TarInfo
|
||||
:rtype: tarfile.TarInfo | None
|
||||
"""
|
||||
filename = os.path.basename(item.path)
|
||||
name, ext = os.path.splitext(filename)
|
||||
dirs = os.path.split(item.path)
|
||||
|
||||
if not item.isdir():
|
||||
if item.path.startswith('./test/results/'):
|
||||
return None
|
||||
|
||||
if item.path.startswith('./docsite/') and filename.endswith('_module.rst'):
|
||||
return None
|
||||
|
||||
if name in IGNORE_FILES:
|
||||
return None
|
||||
|
||||
if ext in IGNORE_EXTENSIONS:
|
||||
return None
|
||||
|
||||
if any(d in IGNORE_DIRS for d in dirs):
|
||||
return None
|
||||
|
||||
return item
|
||||
|
||||
|
||||
def create_tarfile(dst_path, src_path, tar_filter):
|
||||
"""
|
||||
:type dst_path: str
|
||||
:type src_path: str
|
||||
:type tar_filter: (tarfile.TarInfo) -> tarfile.TarInfo | None
|
||||
"""
|
||||
with tarfile.TarFile.gzopen(dst_path, mode='w', compresslevel=4) as tar:
|
||||
tar.add(src_path, filter=tar_filter)
|
530
test/runner/lib/target.py
Normal file
530
test/runner/lib/target.py
Normal file
|
@ -0,0 +1,530 @@
|
|||
"""Test target identification, iteration and inclusion/exclusion."""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import os
|
||||
import re
|
||||
import errno
|
||||
import itertools
|
||||
import abc
|
||||
|
||||
from lib.util import ApplicationError
|
||||
|
||||
MODULE_EXTENSIONS = '.py', '.ps1'
|
||||
|
||||
|
||||
def find_target_completion(target_func, prefix):
|
||||
"""
|
||||
:type target_func: () -> collections.Iterable[CompletionTarget]
|
||||
:type prefix: unicode
|
||||
:rtype: list[str]
|
||||
"""
|
||||
try:
|
||||
targets = target_func()
|
||||
prefix = prefix.encode()
|
||||
short = os.environ.get('COMP_TYPE') == '63' # double tab completion from bash
|
||||
matches = walk_completion_targets(targets, prefix, short)
|
||||
return matches
|
||||
except Exception as ex: # pylint: disable=locally-disabled, broad-except
|
||||
return [str(ex)]
|
||||
|
||||
|
||||
def walk_completion_targets(targets, prefix, short=False):
|
||||
"""
|
||||
:type targets: collections.Iterable[CompletionTarget]
|
||||
:type prefix: str
|
||||
:type short: bool
|
||||
:rtype: tuple[str]
|
||||
"""
|
||||
aliases = set(alias for target in targets for alias in target.aliases)
|
||||
|
||||
if prefix.endswith('/') and prefix in aliases:
|
||||
aliases.remove(prefix)
|
||||
|
||||
matches = [alias for alias in aliases if alias.startswith(prefix) and '/' not in alias[len(prefix):-1]]
|
||||
|
||||
if short:
|
||||
offset = len(os.path.dirname(prefix))
|
||||
if offset:
|
||||
offset += 1
|
||||
relative_matches = [match[offset:] for match in matches if len(match) > offset]
|
||||
if len(relative_matches) > 1:
|
||||
matches = relative_matches
|
||||
|
||||
return tuple(sorted(matches))
|
||||
|
||||
|
||||
def walk_internal_targets(targets, includes=None, excludes=None, requires=None):
|
||||
"""
|
||||
:type targets: collections.Iterable[T <= CompletionTarget]
|
||||
:type includes: list[str]
|
||||
:type excludes: list[str]
|
||||
:type requires: list[str]
|
||||
:rtype: tuple[T <= CompletionTarget]
|
||||
"""
|
||||
targets = tuple(targets)
|
||||
|
||||
include_targets = sorted(filter_targets(targets, includes, errors=True, directories=False), key=lambda t: t.name)
|
||||
|
||||
if requires:
|
||||
require_targets = set(filter_targets(targets, requires, errors=True, directories=False))
|
||||
include_targets = [target for target in include_targets if target in require_targets]
|
||||
|
||||
if excludes:
|
||||
list(filter_targets(targets, excludes, errors=True, include=False, directories=False))
|
||||
|
||||
internal_targets = set(filter_targets(include_targets, excludes, errors=False, include=False, directories=False))
|
||||
return tuple(sorted(internal_targets, key=lambda t: t.name))
|
||||
|
||||
|
||||
def walk_external_targets(targets, includes=None, excludes=None, requires=None):
|
||||
"""
|
||||
:type targets: collections.Iterable[CompletionTarget]
|
||||
:type includes: list[str]
|
||||
:type excludes: list[str]
|
||||
:type requires: list[str]
|
||||
:rtype: tuple[CompletionTarget], tuple[CompletionTarget]
|
||||
"""
|
||||
targets = tuple(targets)
|
||||
|
||||
if requires:
|
||||
include_targets = list(filter_targets(targets, includes, errors=True, directories=False))
|
||||
require_targets = set(filter_targets(targets, requires, errors=True, directories=False))
|
||||
includes = [target.name for target in include_targets if target in require_targets]
|
||||
|
||||
if includes:
|
||||
include_targets = sorted(filter_targets(targets, includes, errors=True), key=lambda t: t.name)
|
||||
else:
|
||||
include_targets = []
|
||||
else:
|
||||
include_targets = sorted(filter_targets(targets, includes, errors=True), key=lambda t: t.name)
|
||||
|
||||
if excludes:
|
||||
exclude_targets = sorted(filter_targets(targets, excludes, errors=True), key=lambda t: t.name)
|
||||
else:
|
||||
exclude_targets = []
|
||||
|
||||
previous = None
|
||||
include = []
|
||||
for target in include_targets:
|
||||
if isinstance(previous, DirectoryTarget) and isinstance(target, DirectoryTarget) \
|
||||
and previous.name == target.name:
|
||||
previous.modules = tuple(set(previous.modules) | set(target.modules))
|
||||
else:
|
||||
include.append(target)
|
||||
previous = target
|
||||
|
||||
previous = None
|
||||
exclude = []
|
||||
for target in exclude_targets:
|
||||
if isinstance(previous, DirectoryTarget) and isinstance(target, DirectoryTarget) \
|
||||
and previous.name == target.name:
|
||||
previous.modules = tuple(set(previous.modules) | set(target.modules))
|
||||
else:
|
||||
exclude.append(target)
|
||||
previous = target
|
||||
|
||||
return tuple(include), tuple(exclude)
|
||||
|
||||
|
||||
def filter_targets(targets, patterns, include=True, directories=True, errors=True):
|
||||
"""
|
||||
:type targets: collections.Iterable[CompletionTarget]
|
||||
:type patterns: list[str]
|
||||
:type include: bool
|
||||
:type directories: bool
|
||||
:type errors: bool
|
||||
:rtype: collections.Iterable[CompletionTarget]
|
||||
"""
|
||||
unmatched = set(patterns or ())
|
||||
|
||||
for target in targets:
|
||||
matched_directories = set()
|
||||
match = False
|
||||
|
||||
if patterns:
|
||||
for alias in target.aliases:
|
||||
for pattern in patterns:
|
||||
if re.match('^%s$' % pattern, alias):
|
||||
match = True
|
||||
|
||||
try:
|
||||
unmatched.remove(pattern)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
if alias.endswith('/'):
|
||||
if target.base_path and len(target.base_path) > len(alias):
|
||||
matched_directories.add(target.base_path)
|
||||
else:
|
||||
matched_directories.add(alias)
|
||||
elif include:
|
||||
match = True
|
||||
if not target.base_path:
|
||||
matched_directories.add('.')
|
||||
for alias in target.aliases:
|
||||
if alias.endswith('/'):
|
||||
if target.base_path and len(target.base_path) > len(alias):
|
||||
matched_directories.add(target.base_path)
|
||||
else:
|
||||
matched_directories.add(alias)
|
||||
|
||||
if match != include:
|
||||
continue
|
||||
|
||||
if directories and matched_directories:
|
||||
yield DirectoryTarget(sorted(matched_directories, key=len)[0], target.modules)
|
||||
else:
|
||||
yield target
|
||||
|
||||
if errors:
|
||||
if unmatched:
|
||||
raise TargetPatternsNotMatched(unmatched)
|
||||
|
||||
|
||||
def walk_module_targets():
|
||||
"""
|
||||
:rtype: collections.Iterable[TestTarget]
|
||||
"""
|
||||
path = 'lib/ansible/modules'
|
||||
|
||||
for target in walk_test_targets(path, path + '/', extensions=MODULE_EXTENSIONS):
|
||||
if not target.module:
|
||||
continue
|
||||
|
||||
yield target
|
||||
|
||||
|
||||
def walk_units_targets():
|
||||
"""
|
||||
:rtype: collections.Iterable[TestTarget]
|
||||
"""
|
||||
return walk_test_targets(path='test/units', module_path='test/units/modules/', extensions=('.py',), prefix='test_')
|
||||
|
||||
|
||||
def walk_compile_targets():
|
||||
"""
|
||||
:rtype: collections.Iterable[TestTarget]
|
||||
"""
|
||||
return walk_test_targets(module_path='lib/ansible/modules/', extensions=('.py',))
|
||||
|
||||
|
||||
def walk_sanity_targets():
|
||||
"""
|
||||
:rtype: collections.Iterable[TestTarget]
|
||||
"""
|
||||
return walk_test_targets(module_path='lib/ansible/modules/')
|
||||
|
||||
|
||||
def walk_posix_integration_targets():
|
||||
"""
|
||||
:rtype: collections.Iterable[IntegrationTarget]
|
||||
"""
|
||||
for target in walk_integration_targets():
|
||||
if 'posix/' in target.aliases:
|
||||
yield target
|
||||
|
||||
|
||||
def walk_network_integration_targets():
|
||||
"""
|
||||
:rtype: collections.Iterable[IntegrationTarget]
|
||||
"""
|
||||
for target in walk_integration_targets():
|
||||
if 'network/' in target.aliases:
|
||||
yield target
|
||||
|
||||
|
||||
def walk_windows_integration_targets():
|
||||
"""
|
||||
:rtype: collections.Iterable[IntegrationTarget]
|
||||
"""
|
||||
for target in walk_integration_targets():
|
||||
if 'windows/' in target.aliases:
|
||||
yield target
|
||||
|
||||
|
||||
def walk_integration_targets():
|
||||
"""
|
||||
:rtype: collections.Iterable[IntegrationTarget]
|
||||
"""
|
||||
path = 'test/integration/targets'
|
||||
modules = frozenset(t.module for t in walk_module_targets())
|
||||
paths = sorted(os.path.join(path, p) for p in os.listdir(path))
|
||||
prefixes = load_integration_prefixes()
|
||||
|
||||
for path in paths:
|
||||
yield IntegrationTarget(path, modules, prefixes)
|
||||
|
||||
|
||||
def load_integration_prefixes():
|
||||
"""
|
||||
:rtype: dict[str, str]
|
||||
"""
|
||||
path = 'test/integration'
|
||||
names = sorted(f for f in os.listdir(path) if os.path.splitext(f)[0] == 'target-prefixes')
|
||||
prefixes = {}
|
||||
|
||||
for name in names:
|
||||
prefix = os.path.splitext(name)[1][1:]
|
||||
with open(os.path.join(path, name), 'r') as prefix_fd:
|
||||
prefixes.update(dict((k, prefix) for k in prefix_fd.read().splitlines()))
|
||||
|
||||
return prefixes
|
||||
|
||||
|
||||
def walk_test_targets(path=None, module_path=None, extensions=None, prefix=None):
|
||||
"""
|
||||
:type path: str | None
|
||||
:type module_path: str | None
|
||||
:type extensions: tuple[str] | None
|
||||
:type prefix: str | None
|
||||
:rtype: collections.Iterable[TestTarget]
|
||||
"""
|
||||
for root, _, file_names in os.walk(path or '.', topdown=False):
|
||||
if root.endswith('/__pycache__'):
|
||||
continue
|
||||
|
||||
if path is None:
|
||||
root = root[2:]
|
||||
|
||||
if root.startswith('.'):
|
||||
continue
|
||||
|
||||
for file_name in file_names:
|
||||
name, ext = os.path.splitext(os.path.basename(file_name))
|
||||
|
||||
if name.startswith('.'):
|
||||
continue
|
||||
|
||||
if extensions and ext not in extensions:
|
||||
continue
|
||||
|
||||
if prefix and not name.startswith(prefix):
|
||||
continue
|
||||
|
||||
yield TestTarget(os.path.join(root, file_name), module_path, prefix, path)
|
||||
|
||||
|
||||
class CompletionTarget(object):
|
||||
"""Command-line argument completion target base class."""
|
||||
__metaclass__ = abc.ABCMeta
|
||||
|
||||
def __init__(self):
|
||||
self.name = None
|
||||
self.path = None
|
||||
self.base_path = None
|
||||
self.modules = tuple()
|
||||
self.aliases = tuple()
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, CompletionTarget):
|
||||
return self.__repr__() == other.__repr__()
|
||||
else:
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.name.__lt__(other.name)
|
||||
|
||||
def __gt__(self, other):
|
||||
return self.name.__gt__(other.name)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.__repr__())
|
||||
|
||||
def __repr__(self):
|
||||
if self.modules:
|
||||
return '%s (%s)' % (self.name, ', '.join(self.modules))
|
||||
|
||||
return self.name
|
||||
|
||||
|
||||
class DirectoryTarget(CompletionTarget):
|
||||
"""Directory target."""
|
||||
def __init__(self, path, modules):
|
||||
"""
|
||||
:type path: str
|
||||
:type modules: tuple[str]
|
||||
"""
|
||||
super(DirectoryTarget, self).__init__()
|
||||
|
||||
self.name = path
|
||||
self.path = path
|
||||
self.modules = modules
|
||||
|
||||
|
||||
class TestTarget(CompletionTarget):
|
||||
"""Generic test target."""
|
||||
def __init__(self, path, module_path, module_prefix, base_path):
|
||||
"""
|
||||
:type path: str
|
||||
:type module_path: str | None
|
||||
:type module_prefix: str | None
|
||||
:type base_path: str
|
||||
"""
|
||||
super(TestTarget, self).__init__()
|
||||
|
||||
self.name = path
|
||||
self.path = path
|
||||
self.base_path = base_path + '/' if base_path else None
|
||||
|
||||
name, ext = os.path.splitext(os.path.basename(self.path))
|
||||
|
||||
if module_path and path.startswith(module_path) and name != '__init__' and ext in MODULE_EXTENSIONS:
|
||||
self.module = name[len(module_prefix or ''):].lstrip('_')
|
||||
self.modules = self.module,
|
||||
else:
|
||||
self.module = None
|
||||
self.modules = tuple()
|
||||
|
||||
aliases = [self.path, self.module]
|
||||
parts = self.path.split('/')
|
||||
|
||||
for i in range(1, len(parts)):
|
||||
alias = '%s/' % '/'.join(parts[:i])
|
||||
aliases.append(alias)
|
||||
|
||||
aliases = [a for a in aliases if a]
|
||||
|
||||
self.aliases = tuple(sorted(aliases))
|
||||
|
||||
|
||||
class IntegrationTarget(CompletionTarget):
|
||||
"""Integration test target."""
|
||||
non_posix = frozenset((
|
||||
'network',
|
||||
'windows',
|
||||
))
|
||||
|
||||
categories = frozenset(non_posix | frozenset((
|
||||
'posix',
|
||||
'module',
|
||||
'needs',
|
||||
'skip',
|
||||
)))
|
||||
|
||||
def __init__(self, path, modules, prefixes):
|
||||
"""
|
||||
:type path: str
|
||||
:type modules: frozenset[str]
|
||||
:type prefixes: dict[str, str]
|
||||
"""
|
||||
super(IntegrationTarget, self).__init__()
|
||||
|
||||
self.name = os.path.basename(path)
|
||||
self.path = path
|
||||
|
||||
# script_path and type
|
||||
|
||||
contents = sorted(os.listdir(path))
|
||||
|
||||
runme_files = tuple(c for c in contents if os.path.splitext(c)[0] == 'runme')
|
||||
test_files = tuple(c for c in contents if os.path.splitext(c)[0] == 'test')
|
||||
|
||||
self.script_path = None
|
||||
|
||||
if runme_files:
|
||||
self.type = 'script'
|
||||
self.script_path = os.path.join(path, runme_files[0])
|
||||
elif test_files:
|
||||
self.type = 'special'
|
||||
elif os.path.isdir(os.path.join(path, 'tasks')):
|
||||
self.type = 'role'
|
||||
else:
|
||||
self.type = 'unknown'
|
||||
|
||||
# static_aliases
|
||||
|
||||
try:
|
||||
with open(os.path.join(path, 'aliases'), 'r') as aliases_file:
|
||||
static_aliases = tuple(aliases_file.read().splitlines())
|
||||
except IOError as ex:
|
||||
if ex.errno != errno.ENOENT:
|
||||
raise
|
||||
static_aliases = tuple()
|
||||
|
||||
# modules
|
||||
|
||||
if self.name in modules:
|
||||
module = self.name
|
||||
elif self.name.startswith('win_') and self.name[4:] in modules:
|
||||
module = self.name[4:]
|
||||
else:
|
||||
module = None
|
||||
|
||||
self.modules = tuple(sorted(a for a in static_aliases + tuple([module]) if a in modules))
|
||||
|
||||
# groups
|
||||
|
||||
groups = [self.type]
|
||||
groups += [a for a in static_aliases if a not in modules]
|
||||
groups += ['module/%s' % m for m in self.modules]
|
||||
|
||||
if not self.modules:
|
||||
groups.append('non_module')
|
||||
|
||||
if 'destructive' not in groups:
|
||||
groups.append('non_destructive')
|
||||
|
||||
if '_' in self.name:
|
||||
prefix = self.name[:self.name.find('_')]
|
||||
else:
|
||||
prefix = None
|
||||
|
||||
if prefix in prefixes:
|
||||
group = prefixes[prefix]
|
||||
|
||||
if group != prefix:
|
||||
group = '%s/%s' % (group, prefix)
|
||||
|
||||
groups.append(group)
|
||||
|
||||
if self.name.startswith('win_'):
|
||||
groups.append('windows')
|
||||
|
||||
if self.name.startswith('connection_'):
|
||||
groups.append('connection')
|
||||
|
||||
if self.name.startswith('setup_') or self.name.startswith('prepare_'):
|
||||
groups.append('hidden')
|
||||
|
||||
if self.type not in ('script', 'role'):
|
||||
groups.append('hidden')
|
||||
|
||||
for group in itertools.islice(groups, 0, len(groups)):
|
||||
if '/' in group:
|
||||
parts = group.split('/')
|
||||
for i in range(1, len(parts)):
|
||||
groups.append('/'.join(parts[:i]))
|
||||
|
||||
if not any(g in self.non_posix for g in groups):
|
||||
groups.append('posix')
|
||||
|
||||
# aliases
|
||||
|
||||
aliases = [self.name] + \
|
||||
['%s/' % g for g in groups] + \
|
||||
['%s/%s' % (g, self.name) for g in groups if g not in self.categories]
|
||||
|
||||
if 'hidden/' in aliases:
|
||||
aliases = ['hidden/'] + ['hidden/%s' % a for a in aliases if not a.startswith('hidden/')]
|
||||
|
||||
self.aliases = tuple(sorted(set(aliases)))
|
||||
|
||||
|
||||
class TargetPatternsNotMatched(ApplicationError):
|
||||
"""One or more targets were not matched when a match was required."""
|
||||
def __init__(self, patterns):
|
||||
"""
|
||||
:type patterns: set[str]
|
||||
"""
|
||||
self.patterns = sorted(patterns)
|
||||
|
||||
if len(patterns) > 1:
|
||||
message = 'Target patterns not matched:\n%s' % '\n'.join(self.patterns)
|
||||
else:
|
||||
message = 'Target pattern not matched: %s' % self.patterns[0]
|
||||
|
||||
super(TargetPatternsNotMatched, self).__init__(message)
|
48
test/runner/lib/thread.py
Normal file
48
test/runner/lib/thread.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
"""Python threading tools."""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import threading
|
||||
import sys
|
||||
|
||||
try:
|
||||
# noinspection PyPep8Naming
|
||||
import Queue as queue
|
||||
except ImportError:
|
||||
# noinspection PyUnresolvedReferences
|
||||
import queue # pylint: disable=locally-disabled, import-error
|
||||
|
||||
|
||||
class WrappedThread(threading.Thread):
|
||||
"""Wrapper around Thread which captures results and exceptions."""
|
||||
def __init__(self, action):
|
||||
"""
|
||||
:type action: () -> any
|
||||
"""
|
||||
super(WrappedThread, self).__init__()
|
||||
self._result = queue.Queue()
|
||||
self.action = action
|
||||
|
||||
def run(self):
|
||||
"""
|
||||
Run action and capture results or exception.
|
||||
Do not override. Do not call directly. Executed by the start() method.
|
||||
"""
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
self._result.put((self.action(), None))
|
||||
except: # pylint: disable=locally-disabled, bare-except
|
||||
self._result.put((None, sys.exc_info()))
|
||||
|
||||
def wait_for_result(self):
|
||||
"""
|
||||
Wait for thread to exit and return the result or raise an exception.
|
||||
:rtype: any
|
||||
"""
|
||||
result, exception = self._result.get()
|
||||
if exception:
|
||||
if sys.version_info[0] > 2:
|
||||
raise exception[0](exception[1]).with_traceback(exception[2])
|
||||
# noinspection PyRedundantParentheses
|
||||
exec('raise exception[0], exception[1], exception[2]') # pylint: disable=locally-disabled, exec-used
|
||||
return result
|
415
test/runner/lib/util.py
Normal file
415
test/runner/lib/util.py
Normal file
|
@ -0,0 +1,415 @@
|
|||
"""Miscellaneous utility functions and classes."""
|
||||
|
||||
from __future__ import absolute_import, print_function
|
||||
|
||||
import errno
|
||||
import os
|
||||
import pipes
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
|
||||
def is_shippable():
|
||||
"""
|
||||
:rtype: bool
|
||||
"""
|
||||
return os.environ.get('SHIPPABLE') == 'true'
|
||||
|
||||
|
||||
def remove_file(path):
|
||||
"""
|
||||
:type path: str
|
||||
"""
|
||||
if os.path.isfile(path):
|
||||
os.remove(path)
|
||||
|
||||
|
||||
def find_executable(executable, cwd=None, path=None, required=True):
|
||||
"""
|
||||
:type executable: str
|
||||
:type cwd: str
|
||||
:type path: str
|
||||
:type required: bool | str
|
||||
:rtype: str | None
|
||||
"""
|
||||
match = None
|
||||
real_cwd = os.getcwd()
|
||||
|
||||
if not cwd:
|
||||
cwd = real_cwd
|
||||
|
||||
if os.path.dirname(executable):
|
||||
target = os.path.join(cwd, executable)
|
||||
if os.path.exists(target) and os.access(target, os.F_OK | os.X_OK):
|
||||
match = executable
|
||||
else:
|
||||
if path is None:
|
||||
path = os.environ.get('PATH', os.defpath)
|
||||
|
||||
if path:
|
||||
path_dirs = path.split(os.pathsep)
|
||||
seen_dirs = set()
|
||||
|
||||
for path_dir in path_dirs:
|
||||
if path_dir in seen_dirs:
|
||||
continue
|
||||
|
||||
seen_dirs.add(path_dir)
|
||||
|
||||
if os.path.abspath(path_dir) == real_cwd:
|
||||
path_dir = cwd
|
||||
|
||||
candidate = os.path.join(path_dir, executable)
|
||||
|
||||
if os.path.exists(candidate) and os.access(candidate, os.F_OK | os.X_OK):
|
||||
match = candidate
|
||||
break
|
||||
|
||||
if not match and required:
|
||||
message = 'Required program "%s" not found.' % executable
|
||||
|
||||
if required != 'warning':
|
||||
raise ApplicationError(message)
|
||||
|
||||
display.warning(message)
|
||||
|
||||
return match
|
||||
|
||||
|
||||
def run_command(args, cmd, capture=False, env=None, data=None, cwd=None, always=False, stdin=None, stdout=None):
|
||||
"""
|
||||
:type args: CommonConfig
|
||||
:type cmd: collections.Iterable[str]
|
||||
:type capture: bool
|
||||
:type env: dict[str, str] | None
|
||||
:type data: str | None
|
||||
:type cwd: str | None
|
||||
:type always: bool
|
||||
:type stdin: file | None
|
||||
:type stdout: file | None
|
||||
:rtype: str | None, str | None
|
||||
"""
|
||||
explain = args.explain and not always
|
||||
return raw_command(cmd, capture=capture, env=env, data=data, cwd=cwd, explain=explain, stdin=stdin, stdout=stdout)
|
||||
|
||||
|
||||
def raw_command(cmd, capture=False, env=None, data=None, cwd=None, explain=False, stdin=None, stdout=None):
|
||||
"""
|
||||
:type cmd: collections.Iterable[str]
|
||||
:type capture: bool
|
||||
:type env: dict[str, str] | None
|
||||
:type data: str | None
|
||||
:type cwd: str | None
|
||||
:type explain: bool
|
||||
:type stdin: file | None
|
||||
:type stdout: file | None
|
||||
:rtype: str | None, str | None
|
||||
"""
|
||||
if not cwd:
|
||||
cwd = os.getcwd()
|
||||
|
||||
if not env:
|
||||
env = common_environment()
|
||||
|
||||
cmd = list(cmd)
|
||||
|
||||
escaped_cmd = ' '.join(pipes.quote(c) for c in cmd)
|
||||
|
||||
display.info('Run command: %s' % escaped_cmd, verbosity=1)
|
||||
display.info('Working directory: %s' % cwd, verbosity=2)
|
||||
|
||||
program = find_executable(cmd[0], cwd=cwd, path=env['PATH'], required='warning')
|
||||
|
||||
if program:
|
||||
display.info('Program found: %s' % program, verbosity=2)
|
||||
|
||||
for key in sorted(env.keys()):
|
||||
display.info('%s=%s' % (key, env[key]), verbosity=2)
|
||||
|
||||
if explain:
|
||||
return None, None
|
||||
|
||||
communicate = False
|
||||
|
||||
if stdin is not None:
|
||||
data = None
|
||||
communicate = True
|
||||
elif data is not None:
|
||||
stdin = subprocess.PIPE
|
||||
communicate = True
|
||||
|
||||
if stdout:
|
||||
communicate = True
|
||||
|
||||
if capture:
|
||||
stdout = stdout or subprocess.PIPE
|
||||
stderr = subprocess.PIPE
|
||||
communicate = True
|
||||
else:
|
||||
stderr = None
|
||||
|
||||
start = time.time()
|
||||
|
||||
try:
|
||||
process = subprocess.Popen(cmd, env=env, stdin=stdin, stdout=stdout, stderr=stderr, cwd=cwd)
|
||||
except OSError as ex:
|
||||
if ex.errno == errno.ENOENT:
|
||||
raise ApplicationError('Required program "%s" not found.' % cmd[0])
|
||||
raise
|
||||
|
||||
if communicate:
|
||||
stdout, stderr = process.communicate(data)
|
||||
else:
|
||||
process.wait()
|
||||
stdout, stderr = None, None
|
||||
|
||||
status = process.returncode
|
||||
runtime = time.time() - start
|
||||
|
||||
display.info('Command exited with status %s after %s seconds.' % (status, runtime), verbosity=4)
|
||||
|
||||
if status == 0:
|
||||
return stdout, stderr
|
||||
|
||||
raise SubprocessError(cmd, status, stdout, stderr, runtime)
|
||||
|
||||
|
||||
def common_environment():
|
||||
"""Common environment used for executing all programs."""
|
||||
env = dict(
|
||||
LC_ALL='en_US.UTF-8',
|
||||
PATH=os.environ.get('PATH', os.defpath),
|
||||
)
|
||||
|
||||
required = (
|
||||
'HOME',
|
||||
)
|
||||
|
||||
optional = (
|
||||
'HTTPTESTER',
|
||||
)
|
||||
|
||||
env.update(pass_vars(required=required, optional=optional))
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def pass_vars(required=None, optional=None):
|
||||
"""
|
||||
:type required: collections.Iterable[str]
|
||||
:type optional: collections.Iterable[str]
|
||||
:rtype: dict[str, str]
|
||||
"""
|
||||
env = {}
|
||||
|
||||
for name in required:
|
||||
if name not in os.environ:
|
||||
raise MissingEnvironmentVariable(name)
|
||||
env[name] = os.environ[name]
|
||||
|
||||
for name in optional:
|
||||
if name not in os.environ:
|
||||
continue
|
||||
env[name] = os.environ[name]
|
||||
|
||||
return env
|
||||
|
||||
|
||||
def deepest_path(path_a, path_b):
|
||||
"""Return the deepest of two paths, or None if the paths are unrelated.
|
||||
:type path_a: str
|
||||
:type path_b: str
|
||||
:return: str | None
|
||||
"""
|
||||
if path_a == '.':
|
||||
path_a = ''
|
||||
|
||||
if path_b == '.':
|
||||
path_b = ''
|
||||
|
||||
if path_a.startswith(path_b):
|
||||
return path_a or '.'
|
||||
|
||||
if path_b.startswith(path_a):
|
||||
return path_b or '.'
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def remove_tree(path):
|
||||
"""
|
||||
:type path: str
|
||||
"""
|
||||
try:
|
||||
shutil.rmtree(path)
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
|
||||
def make_dirs(path):
|
||||
"""
|
||||
:type path: str
|
||||
"""
|
||||
try:
|
||||
os.makedirs(path)
|
||||
except OSError as ex:
|
||||
if ex.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
|
||||
class Display(object):
|
||||
"""Manages color console output."""
|
||||
clear = '\033[0m'
|
||||
red = '\033[31m'
|
||||
green = '\033[32m'
|
||||
yellow = '\033[33m'
|
||||
blue = '\033[34m'
|
||||
purple = '\033[35m'
|
||||
cyan = '\033[36m'
|
||||
|
||||
verbosity_colors = {
|
||||
0: None,
|
||||
1: green,
|
||||
2: blue,
|
||||
3: cyan,
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self.verbosity = 0
|
||||
self.color = True
|
||||
self.warnings = []
|
||||
|
||||
def __warning(self, message):
|
||||
"""
|
||||
:type message: str
|
||||
"""
|
||||
self.print_message('WARNING: %s' % message, color=self.purple, fd=sys.stderr)
|
||||
|
||||
def review_warnings(self):
|
||||
"""Review all warnings which previously occurred."""
|
||||
if not self.warnings:
|
||||
return
|
||||
|
||||
self.__warning('Reviewing previous %d warning(s):' % len(self.warnings))
|
||||
|
||||
for warning in self.warnings:
|
||||
self.__warning(warning)
|
||||
|
||||
def warning(self, message):
|
||||
"""
|
||||
:type message: str
|
||||
"""
|
||||
self.__warning(message)
|
||||
self.warnings.append(message)
|
||||
|
||||
def notice(self, message):
|
||||
"""
|
||||
:type message: str
|
||||
"""
|
||||
self.print_message('NOTICE: %s' % message, color=self.purple, fd=sys.stderr)
|
||||
|
||||
def error(self, message):
|
||||
"""
|
||||
:type message: str
|
||||
"""
|
||||
self.print_message('ERROR: %s' % message, color=self.red, fd=sys.stderr)
|
||||
|
||||
def info(self, message, verbosity=0):
|
||||
"""
|
||||
:type message: str
|
||||
:type verbosity: int
|
||||
"""
|
||||
if self.verbosity >= verbosity:
|
||||
color = self.verbosity_colors.get(verbosity, self.yellow)
|
||||
self.print_message(message, color=color)
|
||||
|
||||
def print_message(self, message, color=None, fd=sys.stdout): # pylint: disable=locally-disabled, invalid-name
|
||||
"""
|
||||
:type message: str
|
||||
:type color: str | None
|
||||
:type fd: file
|
||||
"""
|
||||
if color and self.color:
|
||||
# convert color resets in message to desired color
|
||||
message = message.replace(self.clear, color)
|
||||
message = '%s%s%s' % (color, message, self.clear)
|
||||
|
||||
print(message, file=fd)
|
||||
fd.flush()
|
||||
|
||||
|
||||
class ApplicationError(Exception):
|
||||
"""General application error."""
|
||||
def __init__(self, message=None):
|
||||
"""
|
||||
:type message: str | None
|
||||
"""
|
||||
super(ApplicationError, self).__init__(message)
|
||||
|
||||
|
||||
class ApplicationWarning(Exception):
|
||||
"""General application warning which interrupts normal program flow."""
|
||||
def __init__(self, message=None):
|
||||
"""
|
||||
:type message: str | None
|
||||
"""
|
||||
super(ApplicationWarning, self).__init__(message)
|
||||
|
||||
|
||||
class SubprocessError(ApplicationError):
|
||||
"""Error resulting from failed subprocess execution."""
|
||||
def __init__(self, cmd, status=0, stdout=None, stderr=None, runtime=None):
|
||||
"""
|
||||
:type cmd: list[str]
|
||||
:type status: int
|
||||
:type stdout: str | None
|
||||
:type stderr: str | None
|
||||
:type runtime: float | None
|
||||
"""
|
||||
message = 'Command "%s" returned exit status %s.\n' % (' '.join(pipes.quote(c) for c in cmd), status)
|
||||
|
||||
if stderr:
|
||||
message += '>>> Standard Error\n'
|
||||
message += '%s%s\n' % (stderr.strip(), Display.clear)
|
||||
|
||||
if stdout:
|
||||
message += '>>> Standard Output\n'
|
||||
message += '%s%s\n' % (stdout.strip(), Display.clear)
|
||||
|
||||
message = message.strip()
|
||||
|
||||
super(SubprocessError, self).__init__(message)
|
||||
|
||||
self.cmd = cmd
|
||||
self.status = status
|
||||
self.stdout = stdout
|
||||
self.stderr = stderr
|
||||
self.runtime = runtime
|
||||
|
||||
|
||||
class MissingEnvironmentVariable(ApplicationError):
|
||||
"""Error caused by missing environment variable."""
|
||||
def __init__(self, name):
|
||||
"""
|
||||
:type name: str
|
||||
"""
|
||||
super(MissingEnvironmentVariable, self).__init__('Missing environment variable: %s' % name)
|
||||
|
||||
self.name = name
|
||||
|
||||
|
||||
class CommonConfig(object):
|
||||
"""Configuration common to all commands."""
|
||||
def __init__(self, args):
|
||||
"""
|
||||
:type args: any
|
||||
"""
|
||||
self.color = args.color # type: bool
|
||||
self.explain = args.explain # type: bool
|
||||
self.verbosity = args.verbosity # type: int
|
||||
|
||||
|
||||
display = Display() # pylint: disable=locally-disabled, invalid-name
|
Loading…
Add table
Add a link
Reference in a new issue