Remove all CI scripts since this branch is now End of Life.

This commit is contained in:
Felix Fontein 2025-06-15 13:26:06 +02:00
commit 00b531eef3
31 changed files with 0 additions and 1555 deletions

View file

@ -1,9 +0,0 @@
<!--
Copyright (c) Ansible Project
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
SPDX-License-Identifier: GPL-3.0-or-later
-->
## Azure Pipelines Configuration
Please see the [Documentation](https://github.com/ansible/community/wiki/Testing:-Azure-Pipelines) for more information.

View file

@ -1,356 +0,0 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
trigger:
batch: true
branches:
include:
- main
- stable-*
pr:
autoCancel: true
branches:
include:
- main
- stable-*
schedules:
- cron: 0 8 * * *
displayName: Nightly (main)
always: true
branches:
include:
- main
- cron: 0 10 * * *
displayName: Nightly (active stable branches)
always: true
branches:
include:
- stable-10
- stable-9
- cron: 0 11 * * 0
displayName: Weekly (old stable branches)
always: true
branches:
include:
- stable-8
variables:
- name: checkoutPath
value: ansible_collections/community/general
- name: coverageBranches
value: main
- name: entryPoint
value: tests/utils/shippable/shippable.sh
- name: fetchDepth
value: 0
resources:
containers:
- container: default
image: quay.io/ansible/azure-pipelines-test-container:6.0.0
pool: Standard
stages:
### Sanity
- stage: Sanity_2_18
displayName: Sanity 2.18
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Test {0}
testFormat: 2.18/sanity/{0}
targets:
- test: 1
- test: 2
- test: 3
- test: 4
- test: extra
- stage: Sanity_2_17
displayName: Sanity 2.17
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Test {0}
testFormat: 2.17/sanity/{0}
targets:
- test: 1
- test: 2
- test: 3
- test: 4
- stage: Sanity_2_16
displayName: Sanity 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Test {0}
testFormat: 2.16/sanity/{0}
targets:
- test: 1
- test: 2
- test: 3
- test: 4
### Units
- stage: Units_2_18
displayName: Units 2.18
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: 2.18/units/{0}/1
targets:
- test: 3.8
- test: 3.9
- test: '3.10'
- test: '3.11'
- test: '3.12'
- test: '3.13'
- stage: Units_2_17
displayName: Units 2.17
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: 2.17/units/{0}/1
targets:
- test: 3.7
- test: "3.12"
- stage: Units_2_16
displayName: Units 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
nameFormat: Python {0}
testFormat: 2.16/units/{0}/1
targets:
- test: 2.7
- test: 3.6
- test: "3.11"
## Remote
- stage: Remote_2_18_extra_vms
displayName: Remote 2.18 extra VMs
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.18/{0}
targets:
- name: Alpine 3.20
test: alpine/3.20
# - name: Fedora 40
# test: fedora/40
- name: Ubuntu 22.04
test: ubuntu/22.04
- name: Ubuntu 24.04
test: ubuntu/24.04
groups:
- vm
- stage: Remote_2_18
displayName: Remote 2.18
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.18/{0}
targets:
- name: macOS 14.3
test: macos/14.3
- name: RHEL 9.4
test: rhel/9.4
- name: FreeBSD 14.1
test: freebsd/14.1
groups:
- 1
- 2
- 3
- stage: Remote_2_17
displayName: Remote 2.17
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.17/{0}
targets:
- name: FreeBSD 13.3
test: freebsd/13.3
- name: RHEL 9.3
test: rhel/9.3
groups:
- 1
- 2
- 3
- stage: Remote_2_16
displayName: Remote 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.16/{0}
targets:
- name: macOS 13.2
test: macos/13.2
- name: RHEL 9.2
test: rhel/9.2
- name: RHEL 8.8
test: rhel/8.8
- name: RHEL 7.9
test: rhel/7.9
# - name: FreeBSD 13.2
# test: freebsd/13.2
groups:
- 1
- 2
- 3
### Docker
- stage: Docker_2_18
displayName: Docker 2.18
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.18/linux/{0}
targets:
- name: Fedora 40
test: fedora40
- name: Alpine 3.20
test: alpine320
- name: Ubuntu 22.04
test: ubuntu2204
- name: Ubuntu 24.04
test: ubuntu2404
groups:
- 1
- 2
- 3
- stage: Docker_2_17
displayName: Docker 2.17
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.17/linux/{0}
targets:
- name: Fedora 39
test: fedora39
- name: Alpine 3.19
test: alpine319
- name: Ubuntu 20.04
test: ubuntu2004
groups:
- 1
- 2
- 3
- stage: Docker_2_16
displayName: Docker 2.16
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.16/linux/{0}
targets:
- name: Fedora 38
test: fedora38
- name: openSUSE 15
test: opensuse15
- name: Alpine 3
test: alpine3
- name: CentOS 7
test: centos7
groups:
- 1
- 2
- 3
### Community Docker
- stage: Docker_community_2_18
displayName: Docker (community images) 2.18
dependsOn: []
jobs:
- template: templates/matrix.yml
parameters:
testFormat: 2.18/linux-community/{0}
targets:
- name: Debian Bullseye
test: debian-bullseye/3.9
- name: Debian Bookworm
test: debian-bookworm/3.11
- name: ArchLinux
test: archlinux/3.13
groups:
- 1
- 2
- 3
### Generic
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
# - stage: Generic_2_18
# displayName: Generic 2.18
# dependsOn: []
# jobs:
# - template: templates/matrix.yml
# parameters:
# nameFormat: Python {0}
# testFormat: 2.18/generic/{0}/1
# targets:
# - test: '3.8'
# - test: '3.11'
# - test: '3.13'
# - stage: Generic_2_17
# displayName: Generic 2.17
# dependsOn: []
# jobs:
# - template: templates/matrix.yml
# parameters:
# nameFormat: Python {0}
# testFormat: 2.17/generic/{0}/1
# targets:
# - test: '3.7'
# - test: '3.12'
# - stage: Generic_2_16
# displayName: Generic 2.16
# dependsOn: []
# jobs:
# - template: templates/matrix.yml
# parameters:
# nameFormat: Python {0}
# testFormat: 2.16/generic/{0}/1
# targets:
# - test: '2.7'
# - test: '3.6'
# - test: '3.11'
- stage: Summary
condition: succeededOrFailed()
dependsOn:
- Sanity_2_18
- Sanity_2_17
- Sanity_2_16
- Units_2_18
- Units_2_17
- Units_2_16
- Remote_2_18_extra_vms
- Remote_2_18
- Remote_2_17
- Remote_2_16
- Docker_2_18
- Docker_2_17
- Docker_2_16
- Docker_community_2_18
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
# - Generic_2_18
# - Generic_2_17
# - Generic_2_16
jobs:
- template: templates/coverage.yml

View file

@ -1,24 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# Aggregate code coverage results for later processing.
set -o pipefail -eu
agent_temp_directory="$1"
PATH="${PWD}/bin:${PATH}"
mkdir "${agent_temp_directory}/coverage/"
options=(--venv --venv-system-site-packages --color -v)
ansible-test coverage combine --group-by command --export "${agent_temp_directory}/coverage/" "${options[@]}"
if ansible-test coverage analyze targets generate --help >/dev/null 2>&1; then
# Only analyze coverage if the installed version of ansible-test supports it.
# Doing so allows this script to work unmodified for multiple Ansible versions.
ansible-test coverage analyze targets generate "${agent_temp_directory}/coverage/coverage-analyze-targets.json" "${options[@]}"
fi

View file

@ -1,64 +0,0 @@
#!/usr/bin/env python
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
"""
Combine coverage data from multiple jobs, keeping the data only from the most recent attempt from each job.
Coverage artifacts must be named using the format: "Coverage $(System.JobAttempt) {StableUniqueNameForEachJob}"
The recommended coverage artifact name format is: Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)
Keep in mind that Azure Pipelines does not enforce unique job display names (only names).
It is up to pipeline authors to avoid name collisions when deviating from the recommended format.
"""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
import shutil
import sys
def main():
"""Main program entry point."""
source_directory = sys.argv[1]
if '/ansible_collections/' in os.getcwd():
output_path = "tests/output"
else:
output_path = "test/results"
destination_directory = os.path.join(output_path, 'coverage')
if not os.path.exists(destination_directory):
os.makedirs(destination_directory)
jobs = {}
count = 0
for name in os.listdir(source_directory):
match = re.search('^Coverage (?P<attempt>[0-9]+) (?P<label>.+)$', name)
label = match.group('label')
attempt = int(match.group('attempt'))
jobs[label] = max(attempt, jobs.get(label, 0))
for label, attempt in jobs.items():
name = 'Coverage {attempt} {label}'.format(label=label, attempt=attempt)
source = os.path.join(source_directory, name)
source_files = os.listdir(source)
for source_file in source_files:
source_path = os.path.join(source, source_file)
destination_path = os.path.join(destination_directory, source_file + '.' + label)
print('"%s" -> "%s"' % (source_path, destination_path))
shutil.copyfile(source_path, destination_path)
count += 1
print('Coverage file count: %d' % count)
print('##vso[task.setVariable variable=coverageFileCount]%d' % count)
print('##vso[task.setVariable variable=outputPath]%s' % output_path)
if __name__ == '__main__':
main()

View file

@ -1,28 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# Check the test results and set variables for use in later steps.
set -o pipefail -eu
if [[ "$PWD" =~ /ansible_collections/ ]]; then
output_path="tests/output"
else
output_path="test/results"
fi
echo "##vso[task.setVariable variable=outputPath]${output_path}"
if compgen -G "${output_path}"'/junit/*.xml' > /dev/null; then
echo "##vso[task.setVariable variable=haveTestResults]true"
fi
if compgen -G "${output_path}"'/bot/ansible-test-*' > /dev/null; then
echo "##vso[task.setVariable variable=haveBotResults]true"
fi
if compgen -G "${output_path}"'/coverage/*' > /dev/null; then
echo "##vso[task.setVariable variable=haveCoverageData]true"
fi

View file

@ -1,105 +0,0 @@
#!/usr/bin/env python
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
"""
Upload code coverage reports to codecov.io.
Multiple coverage files from multiple languages are accepted and aggregated after upload.
Python coverage, as well as PowerShell and Python stubs can all be uploaded.
"""
import argparse
import dataclasses
import pathlib
import shutil
import subprocess
import tempfile
import typing as t
import urllib.request
@dataclasses.dataclass(frozen=True)
class CoverageFile:
name: str
path: pathlib.Path
flags: t.List[str]
@dataclasses.dataclass(frozen=True)
class Args:
dry_run: bool
path: pathlib.Path
def parse_args() -> Args:
parser = argparse.ArgumentParser()
parser.add_argument('-n', '--dry-run', action='store_true')
parser.add_argument('path', type=pathlib.Path)
args = parser.parse_args()
# Store arguments in a typed dataclass
fields = dataclasses.fields(Args)
kwargs = {field.name: getattr(args, field.name) for field in fields}
return Args(**kwargs)
def process_files(directory: pathlib.Path) -> t.Tuple[CoverageFile, ...]:
processed = []
for file in directory.joinpath('reports').glob('coverage*.xml'):
name = file.stem.replace('coverage=', '')
# Get flags from name
flags = name.replace('-powershell', '').split('=') # Drop '-powershell' suffix
flags = [flag if not flag.startswith('stub') else flag.split('-')[0] for flag in flags] # Remove "-01" from stub files
processed.append(CoverageFile(name, file, flags))
return tuple(processed)
def upload_files(codecov_bin: pathlib.Path, files: t.Tuple[CoverageFile, ...], dry_run: bool = False) -> None:
for file in files:
cmd = [
str(codecov_bin),
'--name', file.name,
'--file', str(file.path),
]
for flag in file.flags:
cmd.extend(['--flags', flag])
if dry_run:
print(f'DRY-RUN: Would run command: {cmd}')
continue
subprocess.run(cmd, check=True)
def download_file(url: str, dest: pathlib.Path, flags: int, dry_run: bool = False) -> None:
if dry_run:
print(f'DRY-RUN: Would download {url} to {dest} and set mode to {flags:o}')
return
with urllib.request.urlopen(url) as resp:
with dest.open('w+b') as f:
# Read data in chunks rather than all at once
shutil.copyfileobj(resp, f, 64 * 1024)
dest.chmod(flags)
def main():
args = parse_args()
url = 'https://ansible-ci-files.s3.amazonaws.com/codecov/linux/codecov'
with tempfile.TemporaryDirectory(prefix='codecov-') as tmpdir:
codecov_bin = pathlib.Path(tmpdir) / 'codecov'
download_file(url, codecov_bin, 0o755, args.dry_run)
files = process_files(args.path)
upload_files(codecov_bin, files, args.dry_run)
if __name__ == '__main__':
main()

View file

@ -1,19 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# Generate code coverage reports for uploading to Azure Pipelines and codecov.io.
set -o pipefail -eu
PATH="${PWD}/bin:${PATH}"
if ! ansible-test --help >/dev/null 2>&1; then
# Install the devel version of ansible-test for generating code coverage reports.
# This is only used by Ansible Collections, which are typically tested against multiple Ansible versions (in separate jobs).
# Since a version of ansible-test is required that can work the output from multiple older releases, the devel version is used.
pip install https://github.com/ansible/ansible/archive/devel.tar.gz --disable-pip-version-check
fi
ansible-test coverage xml --group-by command --stub --venv --venv-system-site-packages --color -v

View file

@ -1,38 +0,0 @@
#!/usr/bin/env bash
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# Configure the test environment and run the tests.
set -o pipefail -eu
entry_point="$1"
test="$2"
read -r -a coverage_branches <<< "$3" # space separated list of branches to run code coverage on for scheduled builds
export COMMIT_MESSAGE
export COMPLETE
export COVERAGE
export IS_PULL_REQUEST
if [ "${SYSTEM_PULLREQUEST_TARGETBRANCH:-}" ]; then
IS_PULL_REQUEST=true
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD^2)
else
IS_PULL_REQUEST=
COMMIT_MESSAGE=$(git log --format=%B -n 1 HEAD)
fi
COMPLETE=
COVERAGE=
if [ "${BUILD_REASON}" = "Schedule" ]; then
COMPLETE=yes
if printf '%s\n' "${coverage_branches[@]}" | grep -q "^${BUILD_SOURCEBRANCHNAME}$"; then
COVERAGE=yes
fi
fi
"${entry_point}" "${test}" 2>&1 | "$(dirname "$0")/time-command.py"

View file

@ -1,29 +0,0 @@
#!/usr/bin/env python
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
"""Prepends a relative timestamp to each input line from stdin and writes it to stdout."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import time
def main():
"""Main program entry point."""
start = time.time()
sys.stdin.reconfigure(errors='surrogateescape')
sys.stdout.reconfigure(errors='surrogateescape')
for line in sys.stdin:
seconds = time.time() - start
sys.stdout.write('%02d:%02d %s' % (seconds // 60, seconds % 60, line))
sys.stdout.flush()
if __name__ == '__main__':
main()

View file

@ -1,34 +0,0 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# This template adds a job for processing code coverage data.
# It will upload results to Azure Pipelines and codecov.io.
# Use it from a job stage that completes after all other jobs have completed.
# This can be done by placing it in a separate summary stage that runs after the test stage(s) have completed.
jobs:
- job: Coverage
displayName: Code Coverage
container: default
workspace:
clean: all
steps:
- checkout: self
fetchDepth: $(fetchDepth)
path: $(checkoutPath)
- task: DownloadPipelineArtifact@2
displayName: Download Coverage Data
inputs:
path: coverage/
patterns: "Coverage */*=coverage.combined"
- bash: .azure-pipelines/scripts/combine-coverage.py coverage/
displayName: Combine Coverage Data
- bash: .azure-pipelines/scripts/report-coverage.sh
displayName: Generate Coverage Report
condition: gt(variables.coverageFileCount, 0)
- bash: .azure-pipelines/scripts/publish-codecov.py "$(outputPath)"
displayName: Publish to codecov.io
condition: gt(variables.coverageFileCount, 0)
continueOnError: true

View file

@ -1,60 +0,0 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# This template uses the provided targets and optional groups to generate a matrix which is then passed to the test template.
# If this matrix template does not provide the required functionality, consider using the test template directly instead.
parameters:
# A required list of dictionaries, one per test target.
# Each item in the list must contain a "test" or "name" key.
# Both may be provided. If one is omitted, the other will be used.
- name: targets
type: object
# An optional list of values which will be used to multiply the targets list into a matrix.
# Values can be strings or numbers.
- name: groups
type: object
default: []
# An optional format string used to generate the job name.
# - {0} is the name of an item in the targets list.
- name: nameFormat
type: string
default: "{0}"
# An optional format string used to generate the test name.
# - {0} is the name of an item in the targets list.
- name: testFormat
type: string
default: "{0}"
# An optional format string used to add the group to the job name.
# {0} is the formatted name of an item in the targets list.
# {{1}} is the group -- be sure to include the double "{{" and "}}".
- name: nameGroupFormat
type: string
default: "{0} - {{1}}"
# An optional format string used to add the group to the test name.
# {0} is the formatted test of an item in the targets list.
# {{1}} is the group -- be sure to include the double "{{" and "}}".
- name: testGroupFormat
type: string
default: "{0}/{{1}}"
jobs:
- template: test.yml
parameters:
jobs:
- ${{ if eq(length(parameters.groups), 0) }}:
- ${{ each target in parameters.targets }}:
- name: ${{ format(parameters.nameFormat, coalesce(target.name, target.test)) }}
test: ${{ format(parameters.testFormat, coalesce(target.test, target.name)) }}
- ${{ if not(eq(length(parameters.groups), 0)) }}:
- ${{ each group in parameters.groups }}:
- ${{ each target in parameters.targets }}:
- name: ${{ format(format(parameters.nameGroupFormat, parameters.nameFormat), coalesce(target.name, target.test), group) }}
test: ${{ format(format(parameters.testGroupFormat, parameters.testFormat), coalesce(target.test, target.name), group) }}

View file

@ -1,50 +0,0 @@
---
# Copyright (c) Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
# This template uses the provided list of jobs to create test one or more test jobs.
# It can be used directly if needed, or through the matrix template.
parameters:
# A required list of dictionaries, one per test job.
# Each item in the list must contain a "job" and "name" key.
- name: jobs
type: object
jobs:
- ${{ each job in parameters.jobs }}:
- job: test_${{ replace(replace(replace(job.test, '/', '_'), '.', '_'), '-', '_') }}
displayName: ${{ job.name }}
container: default
workspace:
clean: all
steps:
- checkout: self
fetchDepth: $(fetchDepth)
path: $(checkoutPath)
- bash: .azure-pipelines/scripts/run-tests.sh "$(entryPoint)" "${{ job.test }}" "$(coverageBranches)"
displayName: Run Tests
- bash: .azure-pipelines/scripts/process-results.sh
condition: succeededOrFailed()
displayName: Process Results
- bash: .azure-pipelines/scripts/aggregate-coverage.sh "$(Agent.TempDirectory)"
condition: eq(variables.haveCoverageData, 'true')
displayName: Aggregate Coverage Data
- task: PublishTestResults@2
condition: eq(variables.haveTestResults, 'true')
inputs:
testResultsFiles: "$(outputPath)/junit/*.xml"
displayName: Publish Test Results
- task: PublishPipelineArtifact@1
condition: eq(variables.haveBotResults, 'true')
displayName: Publish Bot Results
inputs:
targetPath: "$(outputPath)/bot/"
artifactName: "Bot $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"
- task: PublishPipelineArtifact@1
condition: eq(variables.haveCoverageData, 'true')
displayName: Publish Coverage Data
inputs:
targetPath: "$(Agent.TempDirectory)/coverage/"
artifactName: "Coverage $(System.JobAttempt) $(System.StageDisplayName) $(System.JobDisplayName)"