mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-06-04 23:39:09 -07:00
[stable-10] Use antsibull-nox instead of extra sanity test runner and extra workfows (#10042)
Use antsibull-nox instead of extra sanity test runner and extra workflows (#10022)
* Use antsibull-nox instead of extra sanity test runner and extra workflows.
* Avoid sys.argv[0].
(cherry picked from commit 3ee55c6828
)
This commit is contained in:
parent
11a847a7b5
commit
9dd7be05dc
28 changed files with 302 additions and 655 deletions
|
@ -70,7 +70,6 @@ stages:
|
|||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
- test: extra
|
||||
- stage: Sanity_2_18
|
||||
displayName: Sanity 2.18
|
||||
dependsOn: []
|
||||
|
|
20
.github/workflows/import-galaxy.yml
vendored
20
.github/workflows/import-galaxy.yml
vendored
|
@ -1,20 +0,0 @@
|
|||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
name: import-galaxy
|
||||
'on':
|
||||
# Run CI against all pushes (direct commits, also merged PRs) to main, and all Pull Requests
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- stable-*
|
||||
pull_request:
|
||||
|
||||
jobs:
|
||||
import-galaxy:
|
||||
permissions:
|
||||
contents: read
|
||||
name: Test to import built collection artifact with Galaxy importer
|
||||
uses: ansible-community/github-action-test-galaxy-import/.github/workflows/test-galaxy-import.yml@main
|
28
.github/workflows/nox.yml
vendored
Normal file
28
.github/workflows/nox.yml
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
name: nox
|
||||
'on':
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- stable-*
|
||||
pull_request:
|
||||
# Run CI once per day (at 08:00 UTC)
|
||||
schedule:
|
||||
- cron: '0 8 * * *'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
nox:
|
||||
runs-on: ubuntu-latest
|
||||
name: "Run extra sanity tests"
|
||||
steps:
|
||||
- name: Check out collection
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
- name: Run nox
|
||||
uses: ansible-community/antsibull-nox@main
|
35
.github/workflows/reuse.yml
vendored
35
.github/workflows/reuse.yml
vendored
|
@ -1,35 +0,0 @@
|
|||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
name: Verify REUSE
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- stable-*
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
branches:
|
||||
- main
|
||||
- stable-*
|
||||
# Run CI once per day (at 07:30 UTC)
|
||||
schedule:
|
||||
- cron: '30 7 * * *'
|
||||
|
||||
jobs:
|
||||
check:
|
||||
permissions:
|
||||
contents: read
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
ref: ${{ github.event.pull_request.head.sha || '' }}
|
||||
|
||||
- name: REUSE Compliance Check
|
||||
uses: fsfe/reuse-action@v5
|
|
@ -9,6 +9,7 @@ SPDX-License-Identifier: GPL-3.0-or-later
|
|||
[](https://docs.ansible.com/ansible/latest/collections/community/general/)
|
||||
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
||||
[](https://github.com/ansible-collections/community.general/actions)
|
||||
[](https://github.com/ansible-collections/community.general/actions)
|
||||
[](https://codecov.io/gh/ansible-collections/community.general)
|
||||
[](https://api.reuse.software/info/github.com/ansible-collections/community.general)
|
||||
|
||||
|
|
48
antsibull-nox.toml
Normal file
48
antsibull-nox.toml
Normal file
|
@ -0,0 +1,48 @@
|
|||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-FileCopyrightText: 2025 Felix Fontein <felix@fontein.de>
|
||||
|
||||
[collection_sources]
|
||||
"ansible.posix" = "git+https://github.com/ansible-collections/ansible.posix.git,main"
|
||||
"community.crypto" = "git+https://github.com/ansible-collections/community.crypto.git,main"
|
||||
"community.docker" = "git+https://github.com/ansible-collections/community.docker.git,main"
|
||||
"community.internal_test_tools" = "git+https://github.com/ansible-collections/community.internal_test_tools.git,main"
|
||||
|
||||
[sessions]
|
||||
|
||||
[sessions.docs_check]
|
||||
validate_collection_refs="all"
|
||||
|
||||
[sessions.license_check]
|
||||
|
||||
[sessions.extra_checks]
|
||||
run_no_unwanted_files = true
|
||||
no_unwanted_files_module_extensions = [".py"]
|
||||
no_unwanted_files_yaml_extensions = [".yml"]
|
||||
run_action_groups = true
|
||||
|
||||
[[sessions.extra_checks.action_groups_config]]
|
||||
name = "consul"
|
||||
pattern = "^consul_.*$"
|
||||
exclusions = [
|
||||
"consul_acl_bootstrap",
|
||||
"consul_kv",
|
||||
]
|
||||
doc_fragment = "community.general.consul.actiongroup_consul"
|
||||
|
||||
[[sessions.extra_checks.action_groups_config]]
|
||||
name = "keycloak"
|
||||
pattern = "^keycloak_.*$"
|
||||
exclusions = [
|
||||
"keycloak_realm_info",
|
||||
]
|
||||
doc_fragment = "community.general.keycloak.actiongroup_keycloak"
|
||||
|
||||
[[sessions.extra_checks.action_groups_config]]
|
||||
name = "proxmox"
|
||||
pattern = "^proxmox(_.*)?$"
|
||||
exclusions = []
|
||||
doc_fragment = "community.general.proxmox.actiongroup_proxmox"
|
||||
|
||||
[sessions.build_import_check]
|
||||
run_galaxy_importer = true
|
38
noxfile.py
Normal file
38
noxfile.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
# SPDX-FileCopyrightText: 2025 Felix Fontein <felix@fontein.de>
|
||||
|
||||
# /// script
|
||||
# dependencies = ["nox>=2025.02.09", "antsibull-nox"]
|
||||
# ///
|
||||
|
||||
import sys
|
||||
|
||||
import nox
|
||||
|
||||
|
||||
try:
|
||||
import antsibull_nox
|
||||
except ImportError:
|
||||
print("You need to install antsibull-nox in the same Python environment as nox.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
antsibull_nox.load_antsibull_nox_toml()
|
||||
|
||||
|
||||
@nox.session(name="aliases", python=False, default=True)
|
||||
def aliases(session: nox.Session) -> None:
|
||||
session.run("python", "tests/sanity/extra/aliases.py")
|
||||
|
||||
|
||||
@nox.session(name="botmeta", default=True)
|
||||
def botmeta(session: nox.Session) -> None:
|
||||
session.install("PyYAML", "voluptuous")
|
||||
session.run("python", "tests/sanity/extra/botmeta.py")
|
||||
|
||||
|
||||
# Allow to run the noxfile with `python noxfile.py`, `pipx run noxfile.py`, or similar.
|
||||
# Requires nox >= 2025.02.09
|
||||
if __name__ == "__main__":
|
||||
nox.main()
|
|
@ -1,12 +0,0 @@
|
|||
{
|
||||
"include_symlinks": true,
|
||||
"prefixes": [
|
||||
"meta/runtime.yml",
|
||||
"plugins/modules/",
|
||||
"tests/sanity/extra/action-group."
|
||||
],
|
||||
"output": "path-message",
|
||||
"requirements": [
|
||||
"pyyaml"
|
||||
]
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
|
@ -1,134 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright (c) 2024, Felix Fontein <felix@fontein.de>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
"""Make sure all modules that should show up in the action group."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import yaml
|
||||
|
||||
|
||||
ACTION_GROUPS = {
|
||||
# The format is as follows:
|
||||
# * 'pattern': a regular expression matching all module names potentially belonging to the action group;
|
||||
# * 'exclusions': a list of modules that are not part of the action group; all other modules matching 'pattern' must be part of it;
|
||||
# * 'doc_fragment': the docs fragment that documents membership of the action group.
|
||||
'consul': {
|
||||
'pattern': re.compile('^consul_.*$'),
|
||||
'exclusions': [
|
||||
'consul_acl_bootstrap',
|
||||
'consul_kv',
|
||||
],
|
||||
'doc_fragment': 'community.general.consul.actiongroup_consul',
|
||||
},
|
||||
'keycloak': {
|
||||
'pattern': re.compile('^keycloak_.*$'),
|
||||
'exclusions': [
|
||||
'keycloak_realm_info',
|
||||
],
|
||||
'doc_fragment': 'community.general.keycloak.actiongroup_keycloak',
|
||||
},
|
||||
'proxmox': {
|
||||
'pattern': re.compile('^proxmox(_.*)?$'),
|
||||
'exclusions': [],
|
||||
'doc_fragment': 'community.general.proxmox.actiongroup_proxmox',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
|
||||
# Load redirects
|
||||
meta_runtime = 'meta/runtime.yml'
|
||||
self_path = 'tests/sanity/extra/action-group.py'
|
||||
try:
|
||||
with open(meta_runtime, 'rb') as f:
|
||||
data = yaml.safe_load(f)
|
||||
action_groups = data['action_groups']
|
||||
except Exception as exc:
|
||||
print(f'{meta_runtime}: cannot load action groups: {exc}')
|
||||
return
|
||||
|
||||
for action_group in action_groups:
|
||||
if action_group not in ACTION_GROUPS:
|
||||
print(f'{meta_runtime}: found unknown action group {action_group!r}; likely {self_path} needs updating')
|
||||
for action_group, action_group_data in list(ACTION_GROUPS.items()):
|
||||
if action_group not in action_groups:
|
||||
print(f'{meta_runtime}: cannot find action group {action_group!r}; likely {self_path} needs updating')
|
||||
|
||||
modules_directory = 'plugins/modules/'
|
||||
modules_suffix = '.py'
|
||||
|
||||
for file in os.listdir(modules_directory):
|
||||
if not file.endswith(modules_suffix):
|
||||
continue
|
||||
module_name = file[:-len(modules_suffix)]
|
||||
|
||||
for action_group, action_group_data in ACTION_GROUPS.items():
|
||||
action_group_content = action_groups.get(action_group) or []
|
||||
path = os.path.join(modules_directory, file)
|
||||
|
||||
if not action_group_data['pattern'].match(module_name):
|
||||
if module_name in action_group_content:
|
||||
print(f'{path}: module is in action group {action_group!r} despite not matching its pattern as defined in {self_path}')
|
||||
continue
|
||||
|
||||
should_be_in_action_group = module_name not in action_group_data['exclusions']
|
||||
|
||||
if should_be_in_action_group:
|
||||
if module_name not in action_group_content:
|
||||
print(f'{meta_runtime}: module {module_name!r} is not part of {action_group!r} action group')
|
||||
else:
|
||||
action_group_content.remove(module_name)
|
||||
|
||||
documentation = []
|
||||
in_docs = False
|
||||
with open(path, 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
if line.startswith('DOCUMENTATION ='):
|
||||
in_docs = True
|
||||
elif line.startswith(("'''", '"""')) and in_docs:
|
||||
in_docs = False
|
||||
elif in_docs:
|
||||
documentation.append(line)
|
||||
if in_docs:
|
||||
print(f'{path}: cannot find DOCUMENTATION end')
|
||||
if not documentation:
|
||||
print(f'{path}: cannot find DOCUMENTATION')
|
||||
continue
|
||||
|
||||
try:
|
||||
docs = yaml.safe_load('\n'.join(documentation))
|
||||
if not isinstance(docs, dict):
|
||||
raise Exception('is not a top-level dictionary')
|
||||
except Exception as exc:
|
||||
print(f'{path}: cannot load DOCUMENTATION as YAML: {exc}')
|
||||
continue
|
||||
|
||||
docs_fragments = docs.get('extends_documentation_fragment') or []
|
||||
is_in_action_group = action_group_data['doc_fragment'] in docs_fragments
|
||||
|
||||
if should_be_in_action_group != is_in_action_group:
|
||||
if should_be_in_action_group:
|
||||
print(
|
||||
f'{path}: module does not document itself as part of action group {action_group!r}, but it should;'
|
||||
f' you need to add {action_group_data["doc_fragment"]} to "extends_documentation_fragment" in DOCUMENTATION'
|
||||
)
|
||||
else:
|
||||
print(f'{path}: module documents itself as part of action group {action_group!r}, but it should not be')
|
||||
|
||||
for action_group, action_group_data in ACTION_GROUPS.items():
|
||||
action_group_content = action_groups.get(action_group) or []
|
||||
for module_name in action_group_content:
|
||||
print(
|
||||
f'{meta_runtime}: module {module_name} mentioned in {action_group!r} action group'
|
||||
f' does not exist or does not match pattern defined in {self_path}'
|
||||
)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,11 +0,0 @@
|
|||
{
|
||||
"include_symlinks": false,
|
||||
"prefixes": [
|
||||
".azure-pipelines/azure-pipelines.yml",
|
||||
"tests/integration/targets/"
|
||||
],
|
||||
"output": "path-message",
|
||||
"requirements": [
|
||||
"PyYAML"
|
||||
]
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
13
tests/sanity/extra/aliases.py
Executable file → Normal file
13
tests/sanity/extra/aliases.py
Executable file → Normal file
|
@ -6,6 +6,7 @@
|
|||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import glob
|
||||
import sys
|
||||
|
||||
import yaml
|
||||
|
@ -13,9 +14,6 @@ import yaml
|
|||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
paths = sys.argv[1:] or sys.stdin.read().splitlines()
|
||||
paths = [path for path in paths if path.endswith('/aliases')]
|
||||
|
||||
with open('.azure-pipelines/azure-pipelines.yml', 'rb') as f:
|
||||
azp = yaml.safe_load(f)
|
||||
|
||||
|
@ -27,6 +25,9 @@ def main():
|
|||
for group in job['parameters']['groups']:
|
||||
allowed_targets.add('azp/posix/{0}'.format(group))
|
||||
|
||||
paths = glob.glob("tests/integration/targets/*/aliases")
|
||||
|
||||
has_errors = False
|
||||
for path in paths:
|
||||
targets = []
|
||||
skip = False
|
||||
|
@ -56,10 +57,14 @@ def main():
|
|||
if 'targets/setup_' in path:
|
||||
continue
|
||||
print('%s: %s' % (path, 'found no targets'))
|
||||
has_errors = True
|
||||
for target in targets:
|
||||
if target not in allowed_targets:
|
||||
print('%s: %s' % (path, 'found invalid target "{0}"'.format(target)))
|
||||
has_errors = True
|
||||
|
||||
return 1 if has_errors else 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,8 +0,0 @@
|
|||
{
|
||||
"include_symlinks": false,
|
||||
"output": "path-line-column-message",
|
||||
"requirements": [
|
||||
"PyYAML",
|
||||
"voluptuous==0.12.1"
|
||||
]
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
344
tests/sanity/extra/botmeta.py
Executable file → Normal file
344
tests/sanity/extra/botmeta.py
Executable file → Normal file
|
@ -54,184 +54,194 @@ IGNORE_NO_MAINTAINERS = [
|
|||
'plugins/filter/random_mac.py',
|
||||
]
|
||||
|
||||
FILENAME = '.github/BOTMETA.yml'
|
||||
|
||||
LIST_ENTRIES = frozenset(('supershipit', 'maintainers', 'labels', 'keywords', 'notify', 'ignore'))
|
||||
class BotmetaCheck:
|
||||
def __init__(self):
|
||||
self.errors: list[str] = []
|
||||
self.botmeta_filename = '.github/BOTMETA.yml'
|
||||
self.list_entries = frozenset(('supershipit', 'maintainers', 'labels', 'keywords', 'notify', 'ignore'))
|
||||
self.author_regex = re.compile(r'^\w.*\(@([\w-]+)\)(?![\w.])')
|
||||
|
||||
AUTHOR_REGEX = re.compile(r'^\w.*\(@([\w-]+)\)(?![\w.])')
|
||||
def report_error(self, error: str) -> None:
|
||||
self.errors.append(error)
|
||||
|
||||
def read_authors(self, filename: str) -> list[str]:
|
||||
data = {}
|
||||
try:
|
||||
documentation = []
|
||||
in_docs = False
|
||||
with open(filename, 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
if line.startswith('DOCUMENTATION ='):
|
||||
in_docs = True
|
||||
elif line.startswith(("'''", '"""')) and in_docs:
|
||||
in_docs = False
|
||||
elif in_docs:
|
||||
documentation.append(line)
|
||||
if in_docs:
|
||||
self.report_error(f'{filename}: cannot find DOCUMENTATION end')
|
||||
return []
|
||||
if not documentation:
|
||||
self.report_error(f'{filename}: cannot find DOCUMENTATION')
|
||||
return []
|
||||
|
||||
def read_authors(filename):
|
||||
data = {}
|
||||
try:
|
||||
documentation = []
|
||||
in_docs = False
|
||||
with open(filename, 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
if line.startswith('DOCUMENTATION ='):
|
||||
in_docs = True
|
||||
elif line.startswith(("'''", '"""')) and in_docs:
|
||||
in_docs = False
|
||||
elif in_docs:
|
||||
documentation.append(line)
|
||||
if in_docs:
|
||||
print(f'{filename}: cannot find DOCUMENTATION end')
|
||||
return []
|
||||
if not documentation:
|
||||
print(f'{filename}: cannot find DOCUMENTATION')
|
||||
data = yaml.safe_load('\n'.join(documentation))
|
||||
|
||||
except Exception as e:
|
||||
self.report_error(f'{filename}:0:0: Cannot load DOCUMENTATION: {e}')
|
||||
return []
|
||||
|
||||
data = yaml.safe_load('\n'.join(documentation))
|
||||
author = data.get('author') or []
|
||||
if isinstance(author, str):
|
||||
author = [author]
|
||||
return author
|
||||
|
||||
except Exception as e:
|
||||
print(f'{filename}:0:0: Cannot load DOCUMENTATION: {e}')
|
||||
return []
|
||||
def extract_author_name(self, author: str) -> str | None:
|
||||
m = self.author_regex.match(author)
|
||||
if m:
|
||||
return m.group(1)
|
||||
if author == 'Ansible Core Team':
|
||||
return '$team_ansible_core'
|
||||
return None
|
||||
|
||||
author = data.get('author') or []
|
||||
if isinstance(author, str):
|
||||
author = [author]
|
||||
return author
|
||||
def validate(self, filename: str, filedata: dict) -> None:
|
||||
if not filename.startswith('plugins/'):
|
||||
return
|
||||
if filename.startswith(('plugins/doc_fragments/', 'plugins/module_utils/')):
|
||||
return
|
||||
# Compile list of all active and inactive maintainers
|
||||
all_maintainers = filedata['maintainers'] + filedata['ignore']
|
||||
if not filename.startswith(('plugins/action/', 'plugins/doc_fragments/', 'plugins/filter/', 'plugins/module_utils/', 'plugins/plugin_utils/')):
|
||||
maintainers = self.read_authors(filename)
|
||||
for maintainer in maintainers:
|
||||
maintainer = self.extract_author_name(maintainer)
|
||||
if maintainer is not None and maintainer not in all_maintainers:
|
||||
others = ', '.join(all_maintainers)
|
||||
msg = f'Author {maintainer} not mentioned as active or inactive maintainer for {filename} (mentioned are: {others})'
|
||||
self.report_error(f'{self.botmeta_filename}:0:0: {msg}')
|
||||
should_have_no_maintainer = filename in IGNORE_NO_MAINTAINERS
|
||||
if not all_maintainers and not should_have_no_maintainer:
|
||||
self.report_error(f'{self.botmeta_filename}:0:0: No (active or inactive) maintainer mentioned for {filename}')
|
||||
if all_maintainers and should_have_no_maintainer:
|
||||
own_path = os.path.relpath(__file__, os.getcwd())
|
||||
self.report_error(f'{self.botmeta_filename}:0:0: Please remove {filename} from the ignore list of {own_path}')
|
||||
|
||||
def run(self) -> None:
|
||||
try:
|
||||
with open(self.botmeta_filename, 'rb') as f:
|
||||
botmeta = yaml.safe_load(f)
|
||||
except yaml.error.MarkedYAMLError as ex:
|
||||
msg = re.sub(r'\s+', ' ', str(ex))
|
||||
self.report_error('f{self.botmeta_filename}:{ex.context_mark.line + 1}:{ex.context_mark.column + 1}: YAML load failed: {msg}')
|
||||
return
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
msg = re.sub(r'\s+', ' ', str(ex))
|
||||
self.report_error(f'{self.botmeta_filename}:0:0: YAML load failed: {msg}')
|
||||
return
|
||||
|
||||
# Validate schema
|
||||
|
||||
MacroSchema = Schema({
|
||||
(str): Any(str, None),
|
||||
}, extra=PREVENT_EXTRA)
|
||||
|
||||
FilesSchema = Schema({
|
||||
(str): {
|
||||
('supershipit'): str,
|
||||
('support'): Any('community'),
|
||||
('maintainers'): str,
|
||||
('labels'): str,
|
||||
('keywords'): str,
|
||||
('notify'): str,
|
||||
('ignore'): str,
|
||||
},
|
||||
}, extra=PREVENT_EXTRA)
|
||||
|
||||
schema = Schema({
|
||||
('notifications'): bool,
|
||||
('automerge'): bool,
|
||||
('macros'): MacroSchema,
|
||||
('files'): FilesSchema,
|
||||
}, extra=PREVENT_EXTRA)
|
||||
|
||||
try:
|
||||
schema(botmeta)
|
||||
except MultipleInvalid as ex:
|
||||
for error in ex.errors:
|
||||
# No way to get line/column numbers
|
||||
self.report_error(f'{self.botmeta_filename}:0:0: {humanize_error(botmeta, error)}')
|
||||
return
|
||||
|
||||
# Preprocess (substitute macros, convert to lists)
|
||||
macros = botmeta.get('macros') or {}
|
||||
macro_re = re.compile(r'\$([a-zA-Z_]+)')
|
||||
|
||||
def convert_macros(text, macros):
|
||||
def f(m):
|
||||
macro = m.group(1)
|
||||
replacement = (macros[macro] or '')
|
||||
if macro == 'team_ansible_core':
|
||||
return f'$team_ansible_core {replacement}'
|
||||
return replacement
|
||||
|
||||
return macro_re.sub(f, text)
|
||||
|
||||
files = {}
|
||||
try:
|
||||
for file, filedata in (botmeta.get('files') or {}).items():
|
||||
file = convert_macros(file, macros)
|
||||
filedata = {k: convert_macros(v, macros) for k, v in filedata.items()}
|
||||
files[file] = filedata
|
||||
for k, v in filedata.items():
|
||||
if k in self.list_entries:
|
||||
filedata[k] = v.split()
|
||||
except KeyError as e:
|
||||
self.report_error(f'{self.botmeta_filename}:0:0: Found unknown macro {e}')
|
||||
return
|
||||
|
||||
# Scan all files
|
||||
unmatched = set(files)
|
||||
for dirs in ('docs/docsite/rst', 'plugins', 'tests', 'changelogs'):
|
||||
for dirpath, _dirnames, filenames in os.walk(dirs):
|
||||
for file in sorted(filenames):
|
||||
if file.endswith('.pyc'):
|
||||
continue
|
||||
filename = os.path.join(dirpath, file)
|
||||
if os.path.islink(filename):
|
||||
continue
|
||||
if os.path.isfile(filename):
|
||||
matching_files = []
|
||||
for file, filedata in files.items():
|
||||
if filename.startswith(file):
|
||||
matching_files.append((file, filedata))
|
||||
if file in unmatched:
|
||||
unmatched.remove(file)
|
||||
if not matching_files:
|
||||
self.report_error(f'{self.botmeta_filename}:0:0: Did not find any entry for {filename}')
|
||||
|
||||
matching_files.sort(key=lambda kv: kv[0])
|
||||
filedata = {}
|
||||
for k in self.list_entries:
|
||||
filedata[k] = []
|
||||
for dummy, data in matching_files:
|
||||
for k, v in data.items():
|
||||
if k in self.list_entries:
|
||||
v = filedata[k] + v
|
||||
filedata[k] = v
|
||||
self.validate(filename, filedata)
|
||||
|
||||
for file in unmatched:
|
||||
self.report_error(f'{self.botmeta_filename}:0:0: Entry {file} was not used')
|
||||
|
||||
|
||||
def extract_author_name(author):
|
||||
m = AUTHOR_REGEX.match(author)
|
||||
if m:
|
||||
return m.group(1)
|
||||
if author == 'Ansible Core Team':
|
||||
return '$team_ansible_core'
|
||||
return None
|
||||
|
||||
|
||||
def validate(filename, filedata):
|
||||
if not filename.startswith('plugins/'):
|
||||
return
|
||||
if filename.startswith(('plugins/doc_fragments/', 'plugins/module_utils/')):
|
||||
return
|
||||
# Compile list of all active and inactive maintainers
|
||||
all_maintainers = filedata['maintainers'] + filedata['ignore']
|
||||
if not filename.startswith(('plugins/action/', 'plugins/doc_fragments/', 'plugins/filter/', 'plugins/module_utils/', 'plugins/plugin_utils/')):
|
||||
maintainers = read_authors(filename)
|
||||
for maintainer in maintainers:
|
||||
maintainer = extract_author_name(maintainer)
|
||||
if maintainer is not None and maintainer not in all_maintainers:
|
||||
others = ', '.join(all_maintainers)
|
||||
msg = f'Author {maintainer} not mentioned as active or inactive maintainer for {filename} (mentioned are: {others})'
|
||||
print(f'{FILENAME}:0:0: {msg}')
|
||||
should_have_no_maintainer = filename in IGNORE_NO_MAINTAINERS
|
||||
if not all_maintainers and not should_have_no_maintainer:
|
||||
print(f'{FILENAME}:0:0: No (active or inactive) maintainer mentioned for {filename}')
|
||||
if all_maintainers and should_have_no_maintainer:
|
||||
print(f'{FILENAME}:0:0: Please remove {filename} from the ignore list of {sys.argv[0]}')
|
||||
|
||||
|
||||
def main():
|
||||
def main() -> int:
|
||||
"""Main entry point."""
|
||||
try:
|
||||
with open(FILENAME, 'rb') as f:
|
||||
botmeta = yaml.safe_load(f)
|
||||
except yaml.error.MarkedYAMLError as ex:
|
||||
msg = re.sub(r'\s+', ' ', str(ex))
|
||||
print('f{FILENAME}:{ex.context_mark.line + 1}:{ex.context_mark.column + 1}: YAML load failed: {msg}')
|
||||
return
|
||||
except Exception as ex: # pylint: disable=broad-except
|
||||
msg = re.sub(r'\s+', ' ', str(ex))
|
||||
print(f'{FILENAME}:0:0: YAML load failed: {msg}')
|
||||
return
|
||||
|
||||
# Validate schema
|
||||
|
||||
MacroSchema = Schema({
|
||||
(str): Any(str, None),
|
||||
}, extra=PREVENT_EXTRA)
|
||||
|
||||
FilesSchema = Schema({
|
||||
(str): {
|
||||
('supershipit'): str,
|
||||
('support'): Any('community'),
|
||||
('maintainers'): str,
|
||||
('labels'): str,
|
||||
('keywords'): str,
|
||||
('notify'): str,
|
||||
('ignore'): str,
|
||||
},
|
||||
}, extra=PREVENT_EXTRA)
|
||||
|
||||
schema = Schema({
|
||||
('notifications'): bool,
|
||||
('automerge'): bool,
|
||||
('macros'): MacroSchema,
|
||||
('files'): FilesSchema,
|
||||
}, extra=PREVENT_EXTRA)
|
||||
|
||||
try:
|
||||
schema(botmeta)
|
||||
except MultipleInvalid as ex:
|
||||
for error in ex.errors:
|
||||
# No way to get line/column numbers
|
||||
print(f'{FILENAME}:0:0: {humanize_error(botmeta, error)}')
|
||||
return
|
||||
|
||||
# Preprocess (substitute macros, convert to lists)
|
||||
macros = botmeta.get('macros') or {}
|
||||
macro_re = re.compile(r'\$([a-zA-Z_]+)')
|
||||
|
||||
def convert_macros(text, macros):
|
||||
def f(m):
|
||||
macro = m.group(1)
|
||||
replacement = (macros[macro] or '')
|
||||
if macro == 'team_ansible_core':
|
||||
return f'$team_ansible_core {replacement}'
|
||||
return replacement
|
||||
|
||||
return macro_re.sub(f, text)
|
||||
|
||||
files = {}
|
||||
try:
|
||||
for file, filedata in (botmeta.get('files') or {}).items():
|
||||
file = convert_macros(file, macros)
|
||||
filedata = {k: convert_macros(v, macros) for k, v in filedata.items()}
|
||||
files[file] = filedata
|
||||
for k, v in filedata.items():
|
||||
if k in LIST_ENTRIES:
|
||||
filedata[k] = v.split()
|
||||
except KeyError as e:
|
||||
print(f'{FILENAME}:0:0: Found unknown macro {e}')
|
||||
return
|
||||
|
||||
# Scan all files
|
||||
unmatched = set(files)
|
||||
for dirs in ('docs/docsite/rst', 'plugins', 'tests', 'changelogs'):
|
||||
for dirpath, _dirnames, filenames in os.walk(dirs):
|
||||
for file in sorted(filenames):
|
||||
if file.endswith('.pyc'):
|
||||
continue
|
||||
filename = os.path.join(dirpath, file)
|
||||
if os.path.islink(filename):
|
||||
continue
|
||||
if os.path.isfile(filename):
|
||||
matching_files = []
|
||||
for file, filedata in files.items():
|
||||
if filename.startswith(file):
|
||||
matching_files.append((file, filedata))
|
||||
if file in unmatched:
|
||||
unmatched.remove(file)
|
||||
if not matching_files:
|
||||
print(f'{FILENAME}:0:0: Did not find any entry for {filename}')
|
||||
|
||||
matching_files.sort(key=lambda kv: kv[0])
|
||||
filedata = {}
|
||||
for k in LIST_ENTRIES:
|
||||
filedata[k] = []
|
||||
for dummy, data in matching_files:
|
||||
for k, v in data.items():
|
||||
if k in LIST_ENTRIES:
|
||||
v = filedata[k] + v
|
||||
filedata[k] = v
|
||||
validate(filename, filedata)
|
||||
|
||||
for file in unmatched:
|
||||
print(f'{FILENAME}:0:0: Entry {file} was not used')
|
||||
check = BotmetaCheck()
|
||||
check.run()
|
||||
for error in sorted(check.errors):
|
||||
print(error)
|
||||
return 1 if check.errors else 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
sys.exit(main())
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
{
|
||||
"include_symlinks": false,
|
||||
"prefixes": [
|
||||
"docs/docsite/",
|
||||
"plugins/",
|
||||
"roles/"
|
||||
],
|
||||
"output": "path-line-column-message",
|
||||
"requirements": [
|
||||
"ansible-core",
|
||||
"antsibull-docs"
|
||||
]
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
|
@ -1,29 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
"""Check extra collection docs with antsibull-docs."""
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
env = os.environ.copy()
|
||||
suffix = ':{env}'.format(env=env["ANSIBLE_COLLECTIONS_PATH"]) if 'ANSIBLE_COLLECTIONS_PATH' in env else ''
|
||||
env['ANSIBLE_COLLECTIONS_PATH'] = '{root}{suffix}'.format(root=os.path.dirname(os.path.dirname(os.path.dirname(os.getcwd()))), suffix=suffix)
|
||||
p = subprocess.run(
|
||||
['antsibull-docs', 'lint-collection-docs', '--plugin-docs', '--skip-rstcheck', '.'],
|
||||
env=env,
|
||||
check=False,
|
||||
)
|
||||
if p.returncode not in (0, 3):
|
||||
print('{0}:0:0: unexpected return code {1}'.format(sys.argv[0], p.returncode))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,4 +0,0 @@
|
|||
{
|
||||
"include_symlinks": false,
|
||||
"output": "path-message"
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
|
@ -1,110 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright (c) 2022, Felix Fontein <felix@fontein.de>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
"""Prevent files without a correct license identifier from being added to the source tree."""
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import glob
|
||||
import sys
|
||||
|
||||
|
||||
def format_license_list(licenses):
|
||||
if not licenses:
|
||||
return '(empty)'
|
||||
return ', '.join(['"%s"' % license for license in licenses])
|
||||
|
||||
|
||||
def find_licenses(filename, relax=False):
|
||||
spdx_license_identifiers = []
|
||||
other_license_identifiers = []
|
||||
has_copyright = False
|
||||
try:
|
||||
with open(filename, 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
line = line.rstrip()
|
||||
if 'Copyright ' in line:
|
||||
has_copyright = True
|
||||
if 'Copyright: ' in line:
|
||||
print('%s: found copyright line with "Copyright:". Please remove the colon.' % (filename, ))
|
||||
if 'SPDX-FileCopyrightText: ' in line:
|
||||
has_copyright = True
|
||||
idx = line.find('SPDX-License-Identifier: ')
|
||||
if idx >= 0:
|
||||
lic_id = line[idx + len('SPDX-License-Identifier: '):]
|
||||
spdx_license_identifiers.extend(lic_id.split(' OR '))
|
||||
if 'GNU General Public License' in line:
|
||||
if 'v3.0+' in line:
|
||||
other_license_identifiers.append('GPL-3.0-or-later')
|
||||
if 'version 3 or later' in line:
|
||||
other_license_identifiers.append('GPL-3.0-or-later')
|
||||
if 'Simplified BSD License' in line:
|
||||
other_license_identifiers.append('BSD-2-Clause')
|
||||
if 'Apache License 2.0' in line:
|
||||
other_license_identifiers.append('Apache-2.0')
|
||||
if 'PSF License' in line or 'Python-2.0' in line:
|
||||
other_license_identifiers.append('PSF-2.0')
|
||||
if 'MIT License' in line:
|
||||
other_license_identifiers.append('MIT')
|
||||
except Exception as exc:
|
||||
print('%s: error while processing file: %s' % (filename, exc))
|
||||
if len(set(spdx_license_identifiers)) < len(spdx_license_identifiers):
|
||||
print('%s: found identical SPDX-License-Identifier values' % (filename, ))
|
||||
if other_license_identifiers and set(other_license_identifiers) != set(spdx_license_identifiers):
|
||||
print('%s: SPDX-License-Identifier yielded the license list %s, while manual guessing yielded the license list %s' % (
|
||||
filename, format_license_list(spdx_license_identifiers), format_license_list(other_license_identifiers)))
|
||||
if not has_copyright and not relax:
|
||||
print('%s: found no copyright notice' % (filename, ))
|
||||
return sorted(spdx_license_identifiers)
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
paths = sys.argv[1:] or sys.stdin.read().splitlines()
|
||||
|
||||
# The following paths are allowed to have no license identifier
|
||||
no_comments_allowed = [
|
||||
'changelogs/fragments/*.yml',
|
||||
'changelogs/fragments/*.yaml',
|
||||
]
|
||||
|
||||
# These files are completely ignored
|
||||
ignore_paths = [
|
||||
'.ansible-test-timeout.json',
|
||||
'.reuse/dep5',
|
||||
'LICENSES/*.txt',
|
||||
'COPYING',
|
||||
]
|
||||
|
||||
no_comments_allowed = [fn for pattern in no_comments_allowed for fn in glob.glob(pattern)]
|
||||
ignore_paths = [fn for pattern in ignore_paths for fn in glob.glob(pattern)]
|
||||
|
||||
valid_licenses = [license_file[len('LICENSES/'):-len('.txt')] for license_file in glob.glob('LICENSES/*.txt')]
|
||||
|
||||
for path in paths:
|
||||
if path.startswith('./'):
|
||||
path = path[2:]
|
||||
if path in ignore_paths or path.startswith('tests/output/'):
|
||||
continue
|
||||
if os.stat(path).st_size == 0:
|
||||
continue
|
||||
if not path.endswith('.license') and os.path.exists(path + '.license'):
|
||||
path = path + '.license'
|
||||
valid_licenses_for_path = valid_licenses
|
||||
if path.startswith('plugins/') and not path.startswith(('plugins/modules/', 'plugins/module_utils/', 'plugins/doc_fragments/')):
|
||||
valid_licenses_for_path = [license for license in valid_licenses if license == 'GPL-3.0-or-later']
|
||||
licenses = find_licenses(path, relax=path in no_comments_allowed)
|
||||
if not licenses:
|
||||
if path not in no_comments_allowed:
|
||||
print('%s: must have at least one license' % (path, ))
|
||||
else:
|
||||
for license in licenses:
|
||||
if license not in valid_licenses_for_path:
|
||||
print('%s: found not allowed license "%s", must be one of %s' % (
|
||||
path, license, format_license_list(valid_licenses_for_path)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,3 +0,0 @@
|
|||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: 2022, Felix Fontein <felix@fontein.de>
|
|
@ -1,7 +0,0 @@
|
|||
{
|
||||
"include_symlinks": true,
|
||||
"prefixes": [
|
||||
"plugins/"
|
||||
],
|
||||
"output": "path-message"
|
||||
}
|
|
@ -1,3 +0,0 @@
|
|||
GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
SPDX-License-Identifier: GPL-3.0-or-later
|
||||
SPDX-FileCopyrightText: Ansible Project
|
|
@ -1,58 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
"""Prevent unwanted files from being added to the source tree."""
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point."""
|
||||
paths = sys.argv[1:] or sys.stdin.read().splitlines()
|
||||
|
||||
allowed_extensions = (
|
||||
'.cs',
|
||||
'.ps1',
|
||||
'.psm1',
|
||||
'.py',
|
||||
)
|
||||
|
||||
skip_paths = set([
|
||||
])
|
||||
|
||||
skip_directories = (
|
||||
)
|
||||
|
||||
yaml_directories = (
|
||||
'plugins/test/',
|
||||
'plugins/filter/',
|
||||
)
|
||||
|
||||
for path in paths:
|
||||
if path in skip_paths:
|
||||
continue
|
||||
|
||||
if any(path.startswith(skip_directory) for skip_directory in skip_directories):
|
||||
continue
|
||||
|
||||
if os.path.islink(path):
|
||||
print('%s: is a symbolic link' % (path, ))
|
||||
elif not os.path.isfile(path):
|
||||
print('%s: is not a regular file' % (path, ))
|
||||
|
||||
ext = os.path.splitext(path)[1]
|
||||
|
||||
if ext in ('.yml', ) and any(path.startswith(yaml_directory) for yaml_directory in yaml_directories):
|
||||
continue
|
||||
|
||||
if ext not in allowed_extensions:
|
||||
print('%s: extension must be one of: %s' % (path, ', '.join(allowed_extensions)))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -16,11 +16,6 @@ else
|
|||
base_branch=""
|
||||
fi
|
||||
|
||||
if [ "${group}" == "extra" ]; then
|
||||
../internal_test_tools/tools/run.py --color --bot --junit
|
||||
exit
|
||||
fi
|
||||
|
||||
case "${group}" in
|
||||
1) options=(--skip-test pylint --skip-test ansible-doc --skip-test validate-modules) ;;
|
||||
2) options=( --test ansible-doc --test validate-modules) ;;
|
||||
|
@ -28,17 +23,6 @@ case "${group}" in
|
|||
4) options=(--test pylint --exclude plugins/modules/) ;;
|
||||
esac
|
||||
|
||||
# allow collection migration sanity tests for groups 3 and 4 to pass without updating this script during migration
|
||||
network_path="lib/ansible/modules/network/"
|
||||
|
||||
if [ -d "${network_path}" ]; then
|
||||
if [ "${group}" -eq 3 ]; then
|
||||
options+=(--exclude "${network_path}")
|
||||
elif [ "${group}" -eq 4 ]; then
|
||||
options+=("${network_path}")
|
||||
fi
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2086
|
||||
ansible-test sanity --color -v --junit ${COVERAGE:+"$COVERAGE"} ${CHANGED:+"$CHANGED"} \
|
||||
--docker --base-branch "${base_branch}" \
|
||||
|
|
|
@ -67,10 +67,6 @@ fi
|
|||
|
||||
export ANSIBLE_COLLECTIONS_PATHS="${PWD}/../../../"
|
||||
|
||||
if [ "${test}" == "sanity/extra" ]; then
|
||||
retry pip install junit-xml --disable-pip-version-check
|
||||
fi
|
||||
|
||||
# START: HACK install dependencies
|
||||
|
||||
# Nothing further should be added to this list.
|
||||
|
@ -79,7 +75,7 @@ retry git clone --depth=1 --single-branch https://github.com/ansible-collections
|
|||
# NOTE: we're installing with git to work around Galaxy being a huge PITA (https://github.com/ansible/galaxy/issues/2429)
|
||||
# retry ansible-galaxy -vvv collection install community.internal_test_tools
|
||||
|
||||
if [ "${script}" != "sanity" ] && [ "${script}" != "units" ] && [ "${test}" != "sanity/extra" ]; then
|
||||
if [ "${script}" != "sanity" ] && [ "${script}" != "units" ]; then
|
||||
# To prevent Python dependencies on other collections only install other collections for integration tests
|
||||
retry git clone --depth=1 --single-branch https://github.com/ansible-collections/ansible.posix.git "${ANSIBLE_COLLECTIONS_PATHS}/ansible_collections/ansible/posix"
|
||||
retry git clone --depth=1 --single-branch https://github.com/ansible-collections/community.crypto.git "${ANSIBLE_COLLECTIONS_PATHS}/ansible_collections/community/crypto"
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue