mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-10-23 12:33:59 -07:00
Compare commits
134 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d483fd9482 | ||
|
|
8da9cf3276 | ||
|
|
3c5c3a0113 |
||
|
|
7def57a71f |
||
|
|
e5930aabcb |
||
|
|
48bfba435f |
||
|
|
9740b76f3c |
||
|
|
24cf561135 |
||
|
|
61324ed9eb |
||
|
|
99336ba5fe |
||
|
|
9d99ccef2d |
||
|
|
a146eb3118 |
||
|
|
c7f7bd6050 |
||
|
|
54099d77ff |
||
|
|
ee07d8320a |
||
|
|
0729f0c262 |
||
|
|
57cd48f3cf |
||
|
|
afd2151672 |
||
|
|
ea9b272043 |
||
|
|
60addb332d |
||
|
|
1ade62c5bc |
||
|
|
7c8cc96d8b | ||
|
|
ca177a0ceb |
||
|
|
c0e769e5f5 |
||
|
|
585dbc3171 |
||
|
|
b400491ef3 |
||
|
|
490baed566 |
||
|
|
811c4a304a |
||
|
|
c0fde76b79 |
||
|
|
16c7615b82 |
||
|
|
474364c862 |
||
|
|
1da5f7dc54 |
||
|
|
559c914e36 |
||
|
|
91cca4ae49 |
||
|
|
82a9db9738 |
||
|
|
3fd84d71b8 |
||
|
|
a17124f3c4 |
||
|
|
efc2cbf840 |
||
|
|
aa136aca4c |
||
|
|
a1ca89b058 |
||
|
|
dd70419d18 |
||
|
|
ef5ac023cf |
||
|
|
8bc5494ad5 |
||
|
|
d95a821d5b |
||
|
|
b7697fe3de |
||
|
|
16e05ab5f3 |
||
|
|
5cf7ce705a |
||
|
|
c8b8668212 |
||
|
|
2d450a5a36 |
||
|
|
e08412c345 |
||
|
|
c355f93d62 |
||
|
|
80206b5a53 | ||
|
|
e978fd4d61 | ||
|
|
6fc8492ecf | ||
|
|
95beb452a8 |
||
|
|
c10e9e2650 |
||
|
|
ac35bf4acb |
||
|
|
50b9855ace |
||
|
|
2ab26db197 |
||
|
|
5fcf5d0c8b |
||
|
|
0f0ad6b6d1 |
||
|
|
95f3109ddc |
||
|
|
6037c5d1e6 |
||
|
|
a70d9773dd |
||
|
|
bc50b48205 |
||
|
|
02e6a8608f |
||
|
|
82f4b51873 |
||
|
|
589e8fd5e1 |
||
|
|
58f74b96ef |
||
|
|
1489c080a7 |
||
|
|
6f845f61f0 |
||
|
|
c17f5ff3e8 |
||
|
|
ff21afb227 |
||
|
|
c1d6e5c3c2 |
||
|
|
377b5d4ccd | ||
|
|
f3f7b2776f | ||
|
|
df8bfad9b9 |
||
|
|
8a231e4b36 |
||
|
|
671f850069 |
||
|
|
2fa36592e4 | ||
|
|
51d704bfe3 |
||
|
|
2b0e335752 |
||
|
|
cc28cde3a2 |
||
|
|
2d616bf4d1 |
||
|
|
25d9ab8dcd |
||
|
|
9abda18071 |
||
|
|
406fa12142 |
||
|
|
caaebb38e7 |
||
|
|
2bc74f4f04 |
||
|
|
e1e89f7735 |
||
|
|
efedd0d6e2 |
||
|
|
8079aea1ee |
||
|
|
ee7fdf5f8c |
||
|
|
ced1baad63 |
||
|
|
a0d4ee4fc1 |
||
|
|
d930c8d877 |
||
|
|
352e91a389 |
||
|
|
4b7554445b |
||
|
|
3a456a645d |
||
|
|
6f4580ebd9 |
||
|
|
8d83557e52 |
||
|
|
5ebd980e26 |
||
|
|
17447d2a84 |
||
|
|
ffee01cd9c |
||
|
|
38b4e316ae |
||
|
|
b52a6f3611 |
||
|
|
2435fb3f30 |
||
|
|
d6d9f84b0a |
||
|
|
4b04e3cc32 |
||
|
|
c681249364 |
||
|
|
57a4195b0d |
||
|
|
41a23f093d |
||
|
|
0bd085714f | ||
|
|
a4be229f67 | ||
|
|
9c4487ebc5 |
||
|
|
09ea441316 |
||
|
|
fef6abc8c8 |
||
|
|
618e567377 |
||
|
|
246abffce5 | ||
|
|
076ebb4b2d |
||
|
|
4948b521a3 |
||
|
|
e9ec26ff1b |
||
|
|
72d4476813 |
||
|
|
e96bfd07b4 |
||
|
|
c6d0419460 |
||
|
|
081b4068a0 |
||
|
|
8fba9ca751 |
||
|
|
fad4c2d956 | ||
|
|
6065dd0f18 | ||
|
|
a411ff5ea8 | ||
|
|
42b245eabf | ||
|
|
9a676bb88f | ||
|
|
cd26aec2f3 | ||
|
|
e9327a0464 |
363 changed files with 7511 additions and 1576 deletions
|
|
@ -29,6 +29,7 @@ schedules:
|
|||
always: true
|
||||
branches:
|
||||
include:
|
||||
- stable-6
|
||||
- stable-5
|
||||
- cron: 0 11 * * 0
|
||||
displayName: Weekly (old stable branches)
|
||||
|
|
@ -188,6 +189,24 @@ stages:
|
|||
- test: 3.5
|
||||
|
||||
## Remote
|
||||
- stage: Remote_devel_extra_vms
|
||||
displayName: Remote devel extra VMs
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: devel/{0}
|
||||
targets:
|
||||
- name: Alpine 3.17
|
||||
test: alpine/3.17
|
||||
# - name: Fedora 37
|
||||
# test: fedora/37
|
||||
# - name: Ubuntu 20.04
|
||||
# test: ubuntu/20.04
|
||||
- name: Ubuntu 22.04
|
||||
test: ubuntu/22.04
|
||||
groups:
|
||||
- vm
|
||||
- stage: Remote_devel
|
||||
displayName: Remote devel
|
||||
dependsOn: []
|
||||
|
|
@ -200,12 +219,12 @@ stages:
|
|||
test: macos/12.0
|
||||
- name: RHEL 7.9
|
||||
test: rhel/7.9
|
||||
- name: RHEL 9.0
|
||||
test: rhel/9.0
|
||||
- name: FreeBSD 12.3
|
||||
test: freebsd/12.3
|
||||
- name: RHEL 9.1
|
||||
test: rhel/9.1
|
||||
- name: FreeBSD 13.1
|
||||
test: freebsd/13.1
|
||||
- name: FreeBSD 12.4
|
||||
test: freebsd/12.4
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
|
|
@ -220,8 +239,8 @@ stages:
|
|||
targets:
|
||||
- name: RHEL 9.0
|
||||
test: rhel/9.0
|
||||
- name: FreeBSD 13.1
|
||||
test: freebsd/13.1
|
||||
- name: FreeBSD 12.3
|
||||
test: freebsd/12.3
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
|
|
@ -288,8 +307,8 @@ stages:
|
|||
targets:
|
||||
- name: CentOS 7
|
||||
test: centos7
|
||||
- name: Fedora 36
|
||||
test: fedora36
|
||||
- name: Fedora 37
|
||||
test: fedora37
|
||||
- name: openSUSE 15
|
||||
test: opensuse15
|
||||
- name: Ubuntu 20.04
|
||||
|
|
@ -310,8 +329,8 @@ stages:
|
|||
parameters:
|
||||
testFormat: 2.14/linux/{0}
|
||||
targets:
|
||||
- name: Ubuntu 20.04
|
||||
test: ubuntu2004
|
||||
- name: Fedora 36
|
||||
test: fedora36
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
|
|
@ -385,7 +404,7 @@ stages:
|
|||
- name: ArchLinux
|
||||
test: archlinux/3.10
|
||||
- name: CentOS Stream 8
|
||||
test: centos-stream8/3.8
|
||||
test: centos-stream8/3.9
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
|
|
@ -458,6 +477,7 @@ stages:
|
|||
- Units_2_12
|
||||
- Units_2_13
|
||||
- Units_2_14
|
||||
- Remote_devel_extra_vms
|
||||
- Remote_devel
|
||||
- Remote_2_11
|
||||
- Remote_2_12
|
||||
|
|
@ -469,10 +489,11 @@ stages:
|
|||
- Docker_2_13
|
||||
- Docker_2_14
|
||||
- Docker_community_devel
|
||||
- Generic_devel
|
||||
- Generic_2_11
|
||||
- Generic_2_12
|
||||
- Generic_2_13
|
||||
- Generic_2_14
|
||||
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
||||
# - Generic_devel
|
||||
# - Generic_2_11
|
||||
# - Generic_2_12
|
||||
# - Generic_2_13
|
||||
# - Generic_2_14
|
||||
jobs:
|
||||
- template: templates/coverage.yml
|
||||
|
|
|
|||
26
.github/BOTMETA.yml
vendored
26
.github/BOTMETA.yml
vendored
|
|
@ -265,6 +265,8 @@ files:
|
|||
maintainers: delineaKrehl tylerezimmerman
|
||||
$module_utils/:
|
||||
labels: module_utils
|
||||
$module_utils/deps.py:
|
||||
maintainers: russoz
|
||||
$module_utils/gconftool2.py:
|
||||
labels: gconftool2
|
||||
maintainers: russoz
|
||||
|
|
@ -279,9 +281,14 @@ files:
|
|||
maintainers: $team_huawei
|
||||
$module_utils/identity/keycloak/keycloak.py:
|
||||
maintainers: $team_keycloak
|
||||
$module_utils/identity/keycloak/keycloak_clientsecret.py:
|
||||
maintainers: $team_keycloak fynncfchen johncant
|
||||
$module_utils/ipa.py:
|
||||
labels: ipa
|
||||
maintainers: $team_ipa
|
||||
$module_utils/jenkins.py:
|
||||
labels: jenkins
|
||||
maintainers: russoz
|
||||
$module_utils/manageiq.py:
|
||||
labels: manageiq
|
||||
maintainers: $team_manageiq
|
||||
|
|
@ -302,6 +309,9 @@ files:
|
|||
$module_utils/pipx.py:
|
||||
labels: pipx
|
||||
maintainers: russoz
|
||||
$module_utils/puppet.py:
|
||||
labels: puppet
|
||||
maintainers: russoz
|
||||
$module_utils/pure.py:
|
||||
labels: pure pure_storage
|
||||
maintainers: $team_purestorage
|
||||
|
|
@ -313,6 +323,8 @@ files:
|
|||
$module_utils/scaleway.py:
|
||||
labels: cloud scaleway
|
||||
maintainers: $team_scaleway
|
||||
$module_utils/ssh.py:
|
||||
maintainers: russoz
|
||||
$module_utils/storage/hpe3par/hpe3par.py:
|
||||
maintainers: farhan7500 gautamphegde
|
||||
$module_utils/utm_utils.py:
|
||||
|
|
@ -665,6 +677,10 @@ files:
|
|||
maintainers: Gaetan2907
|
||||
$modules/keycloak_clientscope.py:
|
||||
maintainers: Gaetan2907
|
||||
$modules/keycloak_clientsecret_info.py:
|
||||
maintainers: fynncfchen johncant
|
||||
$modules/keycloak_clientsecret_regenerate.py:
|
||||
maintainers: fynncfchen johncant
|
||||
$modules/keycloak_group.py:
|
||||
maintainers: adamgoossens
|
||||
$modules/keycloak_identity_provider.py:
|
||||
|
|
@ -814,6 +830,10 @@ files:
|
|||
maintainers: shane-walker xcambar
|
||||
$modules/nsupdate.py:
|
||||
maintainers: nerzhul
|
||||
$modules/ocapi_command.py:
|
||||
maintainers: $team_wdc
|
||||
$modules/ocapi_info.py:
|
||||
maintainers: $team_wdc
|
||||
$modules/oci_vcn.py:
|
||||
maintainers: $team_oracle rohitChaware
|
||||
$modules/odbc.py:
|
||||
|
|
@ -822,7 +842,8 @@ files:
|
|||
maintainers: marc-sensenich
|
||||
$modules/ohai.py:
|
||||
labels: ohai
|
||||
maintainers: $team_ansible_core mpdehaan
|
||||
maintainers: $team_ansible_core
|
||||
ignore: mpdehaan
|
||||
$modules/omapi_host.py:
|
||||
maintainers: amasolov nerzhul
|
||||
$modules/one_:
|
||||
|
|
@ -1072,7 +1093,8 @@ files:
|
|||
$modules/sapcar_extract.py:
|
||||
maintainers: RainerLeber
|
||||
$modules/say.py:
|
||||
maintainers: $team_ansible_core mpdehaan
|
||||
maintainers: $team_ansible_core
|
||||
ignore: mpdehaan
|
||||
$modules/scaleway_:
|
||||
maintainers: $team_scaleway
|
||||
$modules/scaleway_compute_private_network.py:
|
||||
|
|
|
|||
93
.github/workflows/docs-pr.yml
vendored
93
.github/workflows/docs-pr.yml
vendored
|
|
@ -1,93 +0,0 @@
|
|||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
name: Collection Docs
|
||||
concurrency:
|
||||
group: docs-${{ github.head_ref }}
|
||||
cancel-in-progress: true
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened, closed]
|
||||
paths-ignore:
|
||||
- '.azure-pipelines/**'
|
||||
- 'changelogs/**'
|
||||
- 'meta/**'
|
||||
- 'tests/**'
|
||||
|
||||
jobs:
|
||||
build-docs:
|
||||
permissions:
|
||||
contents: read
|
||||
name: Build Ansible Docs
|
||||
uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-pr.yml@main
|
||||
with:
|
||||
init-fail-on-error: true
|
||||
provide-link-targets: |
|
||||
ansible_collections.ansible.builtin.dict2items_filter
|
||||
ansible_collections.ansible.builtin.items_lookup
|
||||
ansible_collections.ansible.builtin.path_join_filter
|
||||
ansible_collections.community.kubevirt.kubevirt_cdi_upload_module
|
||||
ansible_collections.community.kubevirt.kubevirt_inventory
|
||||
ansible_collections.community.kubevirt.kubevirt_preset_module
|
||||
ansible_collections.community.kubevirt.kubevirt_pvc_module
|
||||
ansible_collections.community.kubevirt.kubevirt_rs_module
|
||||
ansible_collections.community.kubevirt.kubevirt_template_module
|
||||
ansible_collections.community.kubevirt.kubevirt_vm_module
|
||||
ansible_collections.infoblox.nios_modules.nios_a_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_aaaa_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_cname_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_dns_view_module
|
||||
ansible_collections.infoblox.nios_modules.nios_fixed_address_module
|
||||
ansible_collections.infoblox.nios_modules.nios_host_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_lookup_lookup
|
||||
ansible_collections.infoblox.nios_modules.nios_member_module
|
||||
ansible_collections.infoblox.nios_modules.nios_mx_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_naptr_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_network_module
|
||||
ansible_collections.infoblox.nios_modules.nios_network_view_module
|
||||
ansible_collections.infoblox.nios_modules.nios_next_ip_lookup
|
||||
ansible_collections.infoblox.nios_modules.nios_next_network_lookup
|
||||
ansible_collections.infoblox.nios_modules.nios_nsgroup_module
|
||||
ansible_collections.infoblox.nios_modules.nios_ptr_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_srv_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_txt_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_zone_module
|
||||
|
||||
comment:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-docs
|
||||
name: PR comments
|
||||
steps:
|
||||
- name: PR comment
|
||||
uses: ansible-community/github-docs-build/actions/ansible-docs-build-comment@main
|
||||
with:
|
||||
body-includes: '## Docs Build'
|
||||
reactions: heart
|
||||
action: ${{ needs.build-docs.outputs.changed != 'true' && 'remove' || '' }}
|
||||
on-closed-body: |
|
||||
## Docs Build 📝
|
||||
|
||||
This PR is closed and any previously published docsite has been unpublished.
|
||||
on-merged-body: |
|
||||
## Docs Build 📝
|
||||
|
||||
Thank you for contribution!✨
|
||||
|
||||
This PR has been merged and your docs changes will be incorporated when they are next published.
|
||||
body: |
|
||||
## Docs Build 📝
|
||||
|
||||
Thank you for contribution!✨
|
||||
|
||||
The docsite for **this PR** is available for download as an artifact from this run:
|
||||
${{ needs.build-docs.outputs.artifact-url }}
|
||||
|
||||
File changes:
|
||||
|
||||
${{ needs.build-docs.outputs.diff-files-rendered }}
|
||||
|
||||
${{ needs.build-docs.outputs.diff-rendered }}
|
||||
255
CHANGELOG.rst
255
CHANGELOG.rst
|
|
@ -6,13 +6,219 @@ Community General Release Notes
|
|||
|
||||
This changelog describes changes after version 5.0.0.
|
||||
|
||||
v6.0.0-a1
|
||||
=========
|
||||
v6.3.0
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
This is a pre-release for the upcoming 6.0.0 major release. The main objective of this pre-release is to make it possible to test the large stuctural changes by flattening the directory structure. See the corresponding entry in the changelog for details.
|
||||
Regular bugfix and feature release.
|
||||
|
||||
Minor Changes
|
||||
-------------
|
||||
|
||||
- apache2_module - add module argument ``warn_mpm_absent`` to control whether warning are raised in some edge cases (https://github.com/ansible-collections/community.general/pull/5793).
|
||||
- bitwarden lookup plugin - can now retrieve secrets from custom fields (https://github.com/ansible-collections/community.general/pull/5694).
|
||||
- bitwarden lookup plugin - implement filtering results by ``collection_id`` parameter (https://github.com/ansible-collections/community.general/issues/5849).
|
||||
- dig lookup plugin - support CAA record type (https://github.com/ansible-collections/community.general/pull/5913).
|
||||
- gitlab_project - add ``builds_access_level``, ``container_registry_access_level`` and ``forking_access_level`` options (https://github.com/ansible-collections/community.general/pull/5706).
|
||||
- gitlab_runner - add new boolean option ``access_level_on_creation``. It controls, whether the value of ``access_level`` is used for runner registration or not. The option ``access_level`` has been ignored on registration so far and was only used on updates (https://github.com/ansible-collections/community.general/issues/5907, https://github.com/ansible-collections/community.general/pull/5908).
|
||||
- ilo_redfish_utils module utils - change implementation of DNS Server IP and NTP Server IP update (https://github.com/ansible-collections/community.general/pull/5804).
|
||||
- ipa_group - allow to add and remove external users with the ``external_user`` option (https://github.com/ansible-collections/community.general/pull/5897).
|
||||
- iptables_state - minor refactoring within the module (https://github.com/ansible-collections/community.general/pull/5844).
|
||||
- one_vm - add a new ``updateconf`` option which implements the ``one.vm.updateconf`` API call (https://github.com/ansible-collections/community.general/pull/5812).
|
||||
- opkg - refactored module to use ``CmdRunner`` for executing ``opkg`` (https://github.com/ansible-collections/community.general/pull/5718).
|
||||
- redhat_subscription - adds ``token`` parameter for subscription-manager authentication using Red Hat API token (https://github.com/ansible-collections/community.general/pull/5725).
|
||||
- snap - minor refactor when executing module (https://github.com/ansible-collections/community.general/pull/5773).
|
||||
- snap_alias - refactored module to use ``CmdRunner`` to execute ``snap`` (https://github.com/ansible-collections/community.general/pull/5486).
|
||||
- sudoers - add ``setenv`` parameters to support passing environment variables via sudo. (https://github.com/ansible-collections/community.general/pull/5883)
|
||||
|
||||
Breaking Changes / Porting Guide
|
||||
--------------------------------
|
||||
|
||||
- ModuleHelper module utils - when the module sets output variables named ``msg``, ``exception``, ``output``, ``vars``, or ``changed``, the actual output will prefix those names with ``_`` (underscore symbol) only when they clash with output variables generated by ModuleHelper itself, which only occurs when handling exceptions. Please note that this breaking change does not require a new major release since before this release, it was not possible to add such variables to the output `due to a bug <https://github.com/ansible-collections/community.general/pull/5755>`__ (https://github.com/ansible-collections/community.general/pull/5765).
|
||||
|
||||
Deprecated Features
|
||||
-------------------
|
||||
|
||||
- consul - deprecate using parameters unused for ``state=absent`` (https://github.com/ansible-collections/community.general/pull/5772).
|
||||
- gitlab_runner - the default of the new option ``access_level_on_creation`` will change from ``false`` to ``true`` in community.general 7.0.0. This will cause ``access_level`` to be used during runner registration as well, and not only during updates (https://github.com/ansible-collections/community.general/pull/5908).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- ModuleHelper - fix bug when adjusting the name of reserved output variables (https://github.com/ansible-collections/community.general/pull/5755).
|
||||
- alternatives - support subcommands on Fedora 37, which uses ``follower`` instead of ``slave`` (https://github.com/ansible-collections/community.general/pull/5794).
|
||||
- bitwarden lookup plugin - clarify what to do, if the bitwarden vault is not unlocked (https://github.com/ansible-collections/community.general/pull/5811).
|
||||
- dig lookup plugin - correctly handle DNSKEY record type's ``algorithm`` field (https://github.com/ansible-collections/community.general/pull/5914).
|
||||
- gem - fix force parameter not being passed to gem command when uninstalling (https://github.com/ansible-collections/community.general/pull/5822).
|
||||
- gem - fix hang due to interactive prompt for confirmation on specific version uninstall (https://github.com/ansible-collections/community.general/pull/5751).
|
||||
- gitlab_deploy_key - also update ``title`` and not just ``can_push`` (https://github.com/ansible-collections/community.general/pull/5888).
|
||||
- keycloak_user_federation - fixes federation creation issue. When a new federation was created and at the same time a default / standard mapper was also changed / updated the creation process failed as a bad None set variable led to a bad malformed url request (https://github.com/ansible-collections/community.general/pull/5750).
|
||||
- keycloak_user_federation - fixes idempotency detection issues. In some cases the module could fail to properly detect already existing user federations because of a buggy seemingly superflous extra query parameter (https://github.com/ansible-collections/community.general/pull/5732).
|
||||
- loganalytics callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- logdna callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- logstash callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- nsupdate - fix zone lookup. The SOA record for an existing zone is returned as an answer RR and not as an authority RR (https://github.com/ansible-collections/community.general/issues/5817, https://github.com/ansible-collections/community.general/pull/5818).
|
||||
- proxmox_disk - fixed issue with read timeout on import action (https://github.com/ansible-collections/community.general/pull/5803).
|
||||
- redfish_utils - removed basic auth HTTP header when performing a GET on the service root resource and when performing a POST to the session collection (https://github.com/ansible-collections/community.general/issues/5886).
|
||||
- splunk callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- sumologic callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- syslog_json callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- terraform - fix ``current`` workspace never getting appended to the ``all`` key in the ``workspace_ctf`` object (https://github.com/ansible-collections/community.general/pull/5735).
|
||||
- terraform - fix ``terraform init`` failure when there are multiple workspaces on the remote backend and when ``default`` workspace is missing by setting ``TF_WORKSPACE`` environmental variable to the value of ``workspace`` when used (https://github.com/ansible-collections/community.general/pull/5735).
|
||||
- terraform module - disable ANSI escape sequences during validation phase (https://github.com/ansible-collections/community.general/pull/5843).
|
||||
- xml - fixed a bug where empty ``children`` list would not be set (https://github.com/ansible-collections/community.general/pull/5808).
|
||||
|
||||
New Modules
|
||||
-----------
|
||||
|
||||
- ocapi_command - Manages Out-Of-Band controllers using Open Composable API (OCAPI)
|
||||
- ocapi_info - Manages Out-Of-Band controllers using Open Composable API (OCAPI)
|
||||
|
||||
v6.2.0
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix and feature release.
|
||||
|
||||
Minor Changes
|
||||
-------------
|
||||
|
||||
- opkg - allow installing a package in a certain version (https://github.com/ansible-collections/community.general/pull/5688).
|
||||
- proxmox - added new module parameter ``tags`` for use with PVE 7+ (https://github.com/ansible-collections/community.general/pull/5714).
|
||||
- puppet - refactored module to use ``CmdRunner`` for executing ``puppet`` (https://github.com/ansible-collections/community.general/pull/5612).
|
||||
- redhat_subscription - add a ``server_proxy_scheme`` parameter to configure the scheme for the proxy server (https://github.com/ansible-collections/community.general/pull/5662).
|
||||
- ssh_config - refactor code to module util to fix sanity check (https://github.com/ansible-collections/community.general/pull/5720).
|
||||
- sudoers - adds ``host`` parameter for setting hostname restrictions in sudoers rules (https://github.com/ansible-collections/community.general/issues/5702).
|
||||
|
||||
Deprecated Features
|
||||
-------------------
|
||||
|
||||
- manageiq_policies - deprecate ``state=list`` in favour of using ``community.general.manageiq_policies_info`` (https://github.com/ansible-collections/community.general/pull/5721).
|
||||
- rax - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cbs - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cbs_attachments - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cdb - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cdb_database - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cdb_user - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_clb - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_clb_nodes - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_clb_ssl - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_dns - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_dns_record - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_facts - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_files - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_files_objects - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_identity - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_keypair - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_meta - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_alarm - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_check - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_entity - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_notification - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_notification_plan - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_network - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_queue - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_scaling_group - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_scaling_policy - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- ansible_galaxy_install - set default to raise exception if command's return code is different from zero (https://github.com/ansible-collections/community.general/pull/5680).
|
||||
- ansible_galaxy_install - try ``C.UTF-8`` and then fall back to ``en_US.UTF-8`` before failing (https://github.com/ansible-collections/community.general/pull/5680).
|
||||
- gitlab_group_variables - fix dropping variables accidentally when GitLab introduced new properties (https://github.com/ansible-collections/community.general/pull/5667).
|
||||
- gitlab_project_variables - fix dropping variables accidentally when GitLab introduced new properties (https://github.com/ansible-collections/community.general/pull/5667).
|
||||
- lxc_container - fix the arguments of the lxc command which broke the creation and cloning of containers (https://github.com/ansible-collections/community.general/issues/5578).
|
||||
- opkg - fix issue that ``force=reinstall`` would not reinstall an existing package (https://github.com/ansible-collections/community.general/pull/5705).
|
||||
- proxmox_disk - fixed possible issues with redundant ``vmid`` parameter (https://github.com/ansible-collections/community.general/issues/5492, https://github.com/ansible-collections/community.general/pull/5672).
|
||||
- proxmox_nic - fixed possible issues with redundant ``vmid`` parameter (https://github.com/ansible-collections/community.general/issues/5492, https://github.com/ansible-collections/community.general/pull/5672).
|
||||
- unixy callback plugin - fix typo introduced when updating to use Ansible's configuration manager for handling options (https://github.com/ansible-collections/community.general/issues/5600).
|
||||
|
||||
v6.1.0
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix and feature release.
|
||||
|
||||
Minor Changes
|
||||
-------------
|
||||
|
||||
- cmd_runner module utils - ``cmd_runner_fmt.as_bool()`` can now take an extra parameter to format when value is false (https://github.com/ansible-collections/community.general/pull/5647).
|
||||
- gconftool2 - refactor using ``ModuleHelper`` and ``CmdRunner`` (https://github.com/ansible-collections/community.general/pull/5545).
|
||||
- java_certs - add more detailed error output when extracting certificate from PKCS12 fails (https://github.com/ansible-collections/community.general/pull/5550).
|
||||
- jenkins_plugin - refactor code to module util to fix sanity check (https://github.com/ansible-collections/community.general/pull/5565).
|
||||
- lxd_project - refactored code out to module utils to clear sanity check (https://github.com/ansible-collections/community.general/pull/5549).
|
||||
- nmap inventory plugin - add new options ``udp_scan``, ``icmp_timestamp``, and ``dns_resolve`` for different types of scans (https://github.com/ansible-collections/community.general/pull/5566).
|
||||
- rax_scaling_group - refactored out code to the ``rax`` module utils to clear the sanity check (https://github.com/ansible-collections/community.general/pull/5563).
|
||||
- redfish_command - add ``PerformRequestedOperations`` command to perform any operations necessary to continue the update flow (https://github.com/ansible-collections/community.general/issues/4276).
|
||||
- redfish_command - add ``update_apply_time`` to ``SimpleUpdate`` command (https://github.com/ansible-collections/community.general/issues/3910).
|
||||
- redfish_command - add ``update_status`` to output of ``SimpleUpdate`` command to allow a user monitor the update in progress (https://github.com/ansible-collections/community.general/issues/4276).
|
||||
- redfish_info - add ``GetUpdateStatus`` command to check the progress of a previous update request (https://github.com/ansible-collections/community.general/issues/4276).
|
||||
- redfish_utils module utils - added PUT (``put_request()``) functionality (https://github.com/ansible-collections/community.general/pull/5490).
|
||||
- slack - add option ``prepend_hash`` which allows to control whether a ``#`` is prepended to ``channel_id``. The current behavior (value ``auto``) is to prepend ``#`` unless some specific prefixes are found. That list of prefixes is incomplete, and there does not seem to exist a documented condition on when exactly ``#`` must not be prepended. We recommend to explicitly set ``prepend_hash=always`` or ``prepend_hash=never`` to avoid any ambiguity (https://github.com/ansible-collections/community.general/pull/5629).
|
||||
- spotinst_aws_elastigroup - add ``elements`` attribute when missing in ``list`` parameters (https://github.com/ansible-collections/community.general/pull/5553).
|
||||
- ssh_config - add ``host_key_algorithms`` option (https://github.com/ansible-collections/community.general/pull/5605).
|
||||
- udm_share - added ``elements`` attribute to ``list`` type parameters (https://github.com/ansible-collections/community.general/pull/5557).
|
||||
- udm_user - add ``elements`` attribute when missing in ``list`` parameters (https://github.com/ansible-collections/community.general/pull/5559).
|
||||
|
||||
Deprecated Features
|
||||
-------------------
|
||||
|
||||
- The ``sap`` modules ``sapcar_extract``, ``sap_task_list_execute``, and ``hana_query``, will be removed from this collection in community.general 7.0.0 and replaced with redirects to ``community.sap_libs``. If you want to continue using these modules, make sure to also install ``community.sap_libs`` (it is part of the Ansible package) (https://github.com/ansible-collections/community.general/pull/5614).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- chroot connection plugin - add ``inventory_hostname`` to vars under ``remote_addr``. This is needed for compatibility with ansible-core 2.13 (https://github.com/ansible-collections/community.general/pull/5570).
|
||||
- cmd_runner module utils - fixed bug when handling default cases in ``cmd_runner_fmt.as_map()`` (https://github.com/ansible-collections/community.general/pull/5538).
|
||||
- cmd_runner module utils - formatting arguments ``cmd_runner_fmt.as_fixed()`` was expecting an non-existing argument (https://github.com/ansible-collections/community.general/pull/5538).
|
||||
- keycloak_client_rolemapping - calculate ``proposed`` and ``after`` return values properly (https://github.com/ansible-collections/community.general/pull/5619).
|
||||
- keycloak_client_rolemapping - remove only listed mappings with ``state=absent`` (https://github.com/ansible-collections/community.general/pull/5619).
|
||||
- proxmox inventory plugin - fix bug while templating when using templates for the ``url``, ``user``, ``password``, ``token_id``, or ``token_secret`` options (https://github.com/ansible-collections/community.general/pull/5640).
|
||||
- proxmox inventory plugin - handle tags delimited by semicolon instead of comma, which happens from Proxmox 7.3 on (https://github.com/ansible-collections/community.general/pull/5602).
|
||||
- redhat_subscription - do not ignore ``consumer_name`` and other variables if ``activationkey`` is specified (https://github.com/ansible-collections/community.general/issues/3486, https://github.com/ansible-collections/community.general/pull/5627).
|
||||
- redhat_subscription - do not pass arguments to ``subscription-manager register`` for things already configured; now a specified ``rhsm_baseurl`` is properly set for subscription-manager (https://github.com/ansible-collections/community.general/pull/5583).
|
||||
- unixy callback plugin - fix plugin to work with ansible-core 2.14 by using Ansible's configuration manager for handling options (https://github.com/ansible-collections/community.general/issues/5600).
|
||||
- vdo - now uses ``yaml.safe_load()`` to parse command output instead of the deprecated ``yaml.load()`` which is potentially unsafe. Using ``yaml.load()`` without explicitely setting a ``Loader=`` is also an error in pyYAML 6.0 (https://github.com/ansible-collections/community.general/pull/5632).
|
||||
- vmadm - fix for index out of range error in ``get_vm_uuid`` (https://github.com/ansible-collections/community.general/pull/5628).
|
||||
|
||||
New Modules
|
||||
-----------
|
||||
|
||||
- gitlab_project_badge - Manage project badges on GitLab Server
|
||||
- keycloak_clientsecret_info - Retrieve client secret via Keycloak API
|
||||
- keycloak_clientsecret_regenerate - Regenerate Keycloak client secret via Keycloak API
|
||||
|
||||
v6.0.1
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Bugfix release for Ansible 7.0.0.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- dependent lookup plugin - avoid warning on deprecated parameter for ``Templar.template()`` (https://github.com/ansible-collections/community.general/pull/5543).
|
||||
- jenkins_build - fix the logical flaw when deleting a Jenkins build (https://github.com/ansible-collections/community.general/pull/5514).
|
||||
- one_vm - avoid splitting labels that are ``None`` (https://github.com/ansible-collections/community.general/pull/5489).
|
||||
- onepassword_raw - add missing parameter to plugin documentation (https://github.com/ansible-collections/community.general/issues/5506).
|
||||
- proxmox_disk - avoid duplicate ``vmid`` reference (https://github.com/ansible-collections/community.general/issues/5492, https://github.com/ansible-collections/community.general/pull/5493).
|
||||
|
||||
v6.0.0
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
New major release of community.general with lots of bugfixes, new features, some removed deprecated features, and some other breaking changes. Please check the coresponding sections of the changelog for more details.
|
||||
|
||||
Major Changes
|
||||
-------------
|
||||
|
|
@ -33,6 +239,7 @@ Minor Changes
|
|||
- alternatives - add ``state=absent`` to be able to remove an alternative (https://github.com/ansible-collections/community.general/pull/4654).
|
||||
- alternatives - add ``subcommands`` parameter (https://github.com/ansible-collections/community.general/pull/4654).
|
||||
- ansible_galaxy_install - minor refactoring using latest ``ModuleHelper`` updates (https://github.com/ansible-collections/community.general/pull/4752).
|
||||
- ansible_galaxy_install - refactored module to use ``CmdRunner`` to execute ``ansible-galaxy`` (https://github.com/ansible-collections/community.general/pull/5477).
|
||||
- apk - add ``world`` parameter for supporting a custom world file (https://github.com/ansible-collections/community.general/pull/4976).
|
||||
- bitwarden lookup plugin - add option ``search`` to search for other attributes than name (https://github.com/ansible-collections/community.general/pull/5297).
|
||||
- cartesian lookup plugin - start using Ansible's configuration manager to parse options (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
|
|
@ -41,6 +248,7 @@ Minor Changes
|
|||
- consul - adds ``ttl`` parameter for session (https://github.com/ansible-collections/community.general/pull/4996).
|
||||
- consul - minor refactoring (https://github.com/ansible-collections/community.general/pull/5367).
|
||||
- consul_session - adds ``token`` parameter for session (https://github.com/ansible-collections/community.general/pull/5193).
|
||||
- cpanm - refactored module to use ``CmdRunner`` to execute ``cpanm`` (https://github.com/ansible-collections/community.general/pull/5485).
|
||||
- cpanm - using ``do_raise()`` to raise exceptions in ``ModuleHelper`` derived modules (https://github.com/ansible-collections/community.general/pull/4674).
|
||||
- credstash lookup plugin - start using Ansible's configuration manager to parse options (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
- dependent lookup plugin - start using Ansible's configuration manager to parse options (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
|
|
@ -65,9 +273,11 @@ Minor Changes
|
|||
- gitlab_user - minor refactor when checking for installed dependency (https://github.com/ansible-collections/community.general/pull/5259).
|
||||
- hiera lookup plugin - start using Ansible's configuration manager to parse options. The Hiera executable and config file can now also be passed as lookup parameters (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
- homebrew, homebrew_tap - added Homebrew on Linux path to defaults (https://github.com/ansible-collections/community.general/pull/5241).
|
||||
- hponcfg - refactored module to use ``CmdRunner`` to execute ``hponcfg`` (https://github.com/ansible-collections/community.general/pull/5483).
|
||||
- keycloak_* modules - add ``http_agent`` parameter with default value ``Ansible`` (https://github.com/ansible-collections/community.general/issues/5023).
|
||||
- keyring lookup plugin - start using Ansible's configuration manager to parse options (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
- lastpass - use config manager for handling plugin options (https://github.com/ansible-collections/community.general/pull/5022).
|
||||
- ldap_attrs - allow for DNs to have ``{x}`` prefix on first RDN (https://github.com/ansible-collections/community.general/issues/977, https://github.com/ansible-collections/community.general/pull/5450).
|
||||
- linode inventory plugin - simplify option handling (https://github.com/ansible-collections/community.general/pull/5438).
|
||||
- listen_ports_facts - add new ``include_non_listening`` option which adds ``-a`` option to ``netstat`` and ``ss``. This shows both listening and non-listening (for TCP this means established connections) sockets, and returns ``state`` and ``foreign_address`` (https://github.com/ansible-collections/community.general/issues/4762, https://github.com/ansible-collections/community.general/pull/4953).
|
||||
- lmdb_kv lookup plugin - start using Ansible's configuration manager to parse options (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
|
|
@ -76,6 +286,7 @@ Minor Changes
|
|||
- machinectl become plugin - combine the success command when building the become command to be consistent with other become plugins (https://github.com/ansible-collections/community.general/pull/5287).
|
||||
- manifold lookup plugin - start using Ansible's configuration manager to parse options (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
- maven_artifact - add a new ``unredirected_headers`` option that can be used with ansible-core 2.12 and above. The default value is to not use ``Authorization`` and ``Cookie`` headers on redirects for security reasons. With ansible-core 2.11, all headers are still passed on for redirects (https://github.com/ansible-collections/community.general/pull/4812).
|
||||
- mksysb - refactored module to use ``CmdRunner`` to execute ``mksysb`` (https://github.com/ansible-collections/community.general/pull/5484).
|
||||
- mksysb - using ``do_raise()`` to raise exceptions in ``ModuleHelper`` derived modules (https://github.com/ansible-collections/community.general/pull/4674).
|
||||
- nagios - minor refactoring on parameter validation for different actions (https://github.com/ansible-collections/community.general/pull/5239).
|
||||
- netcup_dnsapi - add ``timeout`` parameter (https://github.com/ansible-collections/community.general/pull/5301).
|
||||
|
|
@ -83,6 +294,7 @@ Minor Changes
|
|||
- nmcli - add bond option ``xmit_hash_policy`` to bond options (https://github.com/ansible-collections/community.general/issues/5148).
|
||||
- nmcli - adds ``vpn`` type and parameter for supporting VPN with service type L2TP and PPTP (https://github.com/ansible-collections/community.general/pull/4746).
|
||||
- nmcli - honor IP options for VPNs (https://github.com/ansible-collections/community.general/pull/5228).
|
||||
- onepassword - support version 2 of the OnePassword CLI (https://github.com/ansible-collections/community.general/pull/4728)
|
||||
- opentelemetry callback plugin - allow configuring opentelementry callback via config file (https://github.com/ansible-collections/community.general/pull/4916).
|
||||
- opentelemetry callback plugin - send logs. This can be disabled by setting ``disable_logs=false`` (https://github.com/ansible-collections/community.general/pull/4175).
|
||||
- pacman - added parameters ``reason`` and ``reason_for`` to set/change the install reason of packages (https://github.com/ansible-collections/community.general/pull/4956).
|
||||
|
|
@ -125,6 +337,7 @@ Breaking Changes / Porting Guide
|
|||
--------------------------------
|
||||
|
||||
- newrelic_deployment - ``revision`` is required for v2 API (https://github.com/ansible-collections/community.general/pull/5341).
|
||||
- scaleway_container_registry_info - no longer replace ``secret_environment_variables`` in the output by ``SENSITIVE_VALUE`` (https://github.com/ansible-collections/community.general/pull/5497).
|
||||
|
||||
Deprecated Features
|
||||
-------------------
|
||||
|
|
@ -179,8 +392,10 @@ Bugfixes
|
|||
- filesystem - improve error messages when output cannot be parsed by including newlines in escaped form (https://github.com/ansible-collections/community.general/pull/4700).
|
||||
- funcd connection plugin - fix signature of ``exec_command`` (https://github.com/ansible-collections/community.general/pull/5111).
|
||||
- ini_file - minor refactor fixing a python lint error (https://github.com/ansible-collections/community.general/pull/5307).
|
||||
- iso_create - the module somtimes failed to add folders for Joliet and UDF formats (https://github.com/ansible-collections/community.general/issues/5275).
|
||||
- keycloak_realm - fix default groups and roles (https://github.com/ansible-collections/community.general/issues/4241).
|
||||
- keyring_info - fix the result from the keyring library never getting returned (https://github.com/ansible-collections/community.general/pull/4964).
|
||||
- ldap_attrs - fix bug which caused a ``Bad search filter`` error. The error was occuring when the ldap attribute value contained special characters such as ``(`` or ``*`` (https://github.com/ansible-collections/community.general/issues/5434, https://github.com/ansible-collections/community.general/pull/5435).
|
||||
- ldap_attrs - fix ordering issue by ignoring the ``{x}`` prefix on attribute values (https://github.com/ansible-collections/community.general/issues/977, https://github.com/ansible-collections/community.general/pull/5385).
|
||||
- listen_ports_facts - removed leftover ``EnvironmentError`` . The ``else`` clause had a wrong indentation. The check is now handled in the ``split_pid_name`` function (https://github.com/ansible-collections/community.general/pull/5202).
|
||||
- locale_gen - fix support for Ubuntu (https://github.com/ansible-collections/community.general/issues/5281).
|
||||
|
|
@ -220,6 +435,7 @@ Bugfixes
|
|||
- redis* modules - fix call to ``module.fail_json`` when failing because of missing Python libraries (https://github.com/ansible-collections/community.general/pull/4733).
|
||||
- slack - fix incorrect channel prefix ``#`` caused by incomplete pattern detection by adding ``G0`` and ``GF`` as channel ID patterns (https://github.com/ansible-collections/community.general/pull/5019).
|
||||
- slack - fix message update for channels which start with ``CP``. When ``message-id`` was passed it failed for channels which started with ``CP`` because the ``#`` symbol was added before the ``channel_id`` (https://github.com/ansible-collections/community.general/pull/5249).
|
||||
- snap - allow values in the ``options`` parameter to contain whitespaces (https://github.com/ansible-collections/community.general/pull/5475).
|
||||
- sudoers - ensure sudoers config files are created with the permissions requested by sudoers (0440) (https://github.com/ansible-collections/community.general/pull/4814).
|
||||
- sudoers - fix incorrect handling of ``state: absent`` (https://github.com/ansible-collections/community.general/issues/4852).
|
||||
- tss lookup plugin - adding support for updated Delinea library (https://github.com/DelineaXPM/python-tss-sdk/issues/9, https://github.com/ansible-collections/community.general/pull/5151).
|
||||
|
|
@ -229,8 +445,41 @@ Bugfixes
|
|||
- xfconf - fix setting of boolean values (https://github.com/ansible-collections/community.general/issues/4999, https://github.com/ansible-collections/community.general/pull/5007).
|
||||
- zfs - fix wrong quoting of properties (https://github.com/ansible-collections/community.general/issues/4707, https://github.com/ansible-collections/community.general/pull/4726).
|
||||
|
||||
New Plugins
|
||||
-----------
|
||||
|
||||
Filter
|
||||
~~~~~~
|
||||
|
||||
- counter - Counts hashable elements in a sequence
|
||||
|
||||
Lookup
|
||||
~~~~~~
|
||||
|
||||
- bitwarden - Retrieve secrets from Bitwarden
|
||||
|
||||
New Modules
|
||||
-----------
|
||||
|
||||
- gconftool2_info - Retrieve GConf configurations
|
||||
- iso_customize - Add/remove/change files in ISO file
|
||||
- keycloak_user_rolemapping - Allows administration of Keycloak user_rolemapping with the Keycloak API
|
||||
- keyring - Set or delete a passphrase using the Operating System's native keyring
|
||||
- keyring_info - Get a passphrase using the Operating System's native keyring
|
||||
- manageiq_policies_info - Listing of resource policy_profiles in ManageIQ
|
||||
- manageiq_tags_info - Retrieve resource tags in ManageIQ
|
||||
- pipx_info - Rretrieves information about applications installed with pipx
|
||||
- proxmox_disk - Management of a disk of a Qemu(KVM) VM in a Proxmox VE cluster.
|
||||
- scaleway_compute_private_network - Scaleway compute - private network management
|
||||
- scaleway_container - Scaleway Container management
|
||||
- scaleway_container_info - Retrieve information on Scaleway Container
|
||||
- scaleway_container_namespace - Scaleway Container namespace management
|
||||
- scaleway_container_namespace_info - Retrieve information on Scaleway Container namespace
|
||||
- scaleway_container_registry - Scaleway Container registry management module
|
||||
- scaleway_container_registry_info - Scaleway Container registry info module
|
||||
- scaleway_function - Scaleway Function management
|
||||
- scaleway_function_info - Retrieve information on Scaleway Function
|
||||
- scaleway_function_namespace - Scaleway Function namespace management
|
||||
- scaleway_function_namespace_info - Retrieve information on Scaleway Function namespace
|
||||
- wdc_redfish_command - Manages WDC UltraStar Data102 Out-Of-Band controllers using Redfish APIs
|
||||
- wdc_redfish_info - Manages WDC UltraStar Data102 Out-Of-Band controllers using Redfish APIs
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ Also, consider taking up a valuable, reviewed, but abandoned pull request which
|
|||
* Try committing your changes with an informative but short commit message.
|
||||
* Do not squash your commits and force-push to your branch if not needed. Reviews of your pull request are much easier with individual commits to comprehend the pull request history. All commits of your pull request branch will be squashed into one commit by GitHub upon merge.
|
||||
* Do not add merge commits to your PR. The bot will complain and you will have to rebase ([instructions for rebasing](https://docs.ansible.com/ansible/latest/dev_guide/developing_rebasing.html)) to remove them before your PR can be merged. To avoid that git automatically does merges during pulls, you can configure it to do rebases instead by running `git config pull.rebase true` inside the repository checkout.
|
||||
* Make sure your PR includes a [changelog fragment](https://docs.ansible.com/ansible/devel/community/development_process.html#changelogs-how-to). (You must not include a fragment for new modules or new plugins, except for test and filter plugins. Also you shouldn't include one for docs-only changes. If you're not sure, simply don't include one, we'll tell you whether one is needed or not :) )
|
||||
* Make sure your PR includes a [changelog fragment](https://docs.ansible.com/ansible/devel/community/development_process.html#creating-changelog-fragments). (You must not include a fragment for new modules or new plugins, except for test and filter plugins. Also you shouldn't include one for docs-only changes. If you're not sure, simply don't include one, we'll tell you whether one is needed or not :) )
|
||||
* Avoid reformatting unrelated parts of the codebase in your PR. These types of changes will likely be requested for reversion, create additional work for reviewers, and may cause approval to be delayed.
|
||||
|
||||
You can also read [our Quick-start development guide](https://github.com/ansible/community-docs/blob/main/create_pr_quick_start_guide.rst).
|
||||
|
|
|
|||
16
README.md
16
README.md
|
|
@ -6,7 +6,7 @@ SPDX-License-Identifier: GPL-3.0-or-later
|
|||
|
||||
# Community General Collection
|
||||
|
||||
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
||||
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
||||
[](https://codecov.io/gh/ansible-collections/community.general)
|
||||
|
||||
This repository contains the `community.general` Ansible Collection. The collection is a part of the Ansible package and includes many modules and plugins supported by Ansible community which are not part of more specialized community collections.
|
||||
|
|
@ -72,13 +72,13 @@ We are actively accepting new contributors.
|
|||
|
||||
All types of contributions are very welcome.
|
||||
|
||||
You don't know how to start? Refer to our [contribution guide](https://github.com/ansible-collections/community.general/blob/main/CONTRIBUTING.md)!
|
||||
You don't know how to start? Refer to our [contribution guide](https://github.com/ansible-collections/community.general/blob/stable-6/CONTRIBUTING.md)!
|
||||
|
||||
The current maintainers are listed in the [commit-rights.md](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md#people) file. If you have questions or need help, feel free to mention them in the proposals.
|
||||
The current maintainers are listed in the [commit-rights.md](https://github.com/ansible-collections/community.general/blob/stable-6/commit-rights.md#people) file. If you have questions or need help, feel free to mention them in the proposals.
|
||||
|
||||
You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html).
|
||||
|
||||
Also for some notes specific to this collection see [our CONTRIBUTING documentation](https://github.com/ansible-collections/community.general/blob/main/CONTRIBUTING.md).
|
||||
Also for some notes specific to this collection see [our CONTRIBUTING documentation](https://github.com/ansible-collections/community.general/blob/stable-6/CONTRIBUTING.md).
|
||||
|
||||
### Running tests
|
||||
|
||||
|
|
@ -88,7 +88,7 @@ See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collectio
|
|||
|
||||
To learn how to maintain / become a maintainer of this collection, refer to:
|
||||
|
||||
* [Committer guidelines](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md).
|
||||
* [Committer guidelines](https://github.com/ansible-collections/community.general/blob/stable-6/commit-rights.md).
|
||||
* [Maintainer guidelines](https://github.com/ansible/community-docs/blob/main/maintaining.rst).
|
||||
|
||||
It is necessary for maintainers of this collection to be subscribed to:
|
||||
|
|
@ -116,7 +116,7 @@ See the [Releasing guidelines](https://github.com/ansible/community-docs/blob/ma
|
|||
|
||||
## Release notes
|
||||
|
||||
See the [changelog](https://github.com/ansible-collections/community.general/blob/main/CHANGELOG.rst).
|
||||
See the [changelog](https://github.com/ansible-collections/community.general/blob/stable-6/CHANGELOG.rst).
|
||||
|
||||
## Roadmap
|
||||
|
||||
|
|
@ -135,8 +135,8 @@ See [this issue](https://github.com/ansible-collections/community.general/issues
|
|||
|
||||
This collection is primarily licensed and distributed as a whole under the GNU General Public License v3.0 or later.
|
||||
|
||||
See [LICENSES/GPL-3.0-or-later.txt](https://github.com/ansible-collections/community.general/blob/main/COPYING) for the full text.
|
||||
See [LICENSES/GPL-3.0-or-later.txt](https://github.com/ansible-collections/community.general/blob/stable-6/COPYING) for the full text.
|
||||
|
||||
Parts of the collection are licensed under the [BSD 2-Clause license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/BSD-2-Clause.txt), the [MIT license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/MIT.txt), and the [PSF 2.0 license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/PSF-2.0.txt).
|
||||
Parts of the collection are licensed under the [BSD 2-Clause license](https://github.com/ansible-collections/community.general/blob/stable-6/LICENSES/BSD-2-Clause.txt), the [MIT license](https://github.com/ansible-collections/community.general/blob/stable-6/LICENSES/MIT.txt), and the [PSF 2.0 license](https://github.com/ansible-collections/community.general/blob/stable-6/LICENSES/PSF-2.0.txt).
|
||||
|
||||
All files have a machine readable `SDPX-License-Identifier:` comment denoting its respective license(s) or an equivalent entry in an accompanying `.license` file. Only changelog fragments (which will not be part of a release) are covered by a blanket statement in `.reuse/dep5`. This conforms to the [REUSE specification](https://reuse.software/spec/).
|
||||
|
|
|
|||
|
|
@ -1,5 +1,62 @@
|
|||
ancestor: 5.0.0
|
||||
releases:
|
||||
6.0.0:
|
||||
changes:
|
||||
breaking_changes:
|
||||
- scaleway_container_registry_info - no longer replace ``secret_environment_variables``
|
||||
in the output by ``SENSITIVE_VALUE`` (https://github.com/ansible-collections/community.general/pull/5497).
|
||||
bugfixes:
|
||||
- iso_create - the module somtimes failed to add folders for Joliet and UDF
|
||||
formats (https://github.com/ansible-collections/community.general/issues/5275).
|
||||
- ldap_attrs - fix bug which caused a ``Bad search filter`` error. The error
|
||||
was occuring when the ldap attribute value contained special characters such
|
||||
as ``(`` or ``*`` (https://github.com/ansible-collections/community.general/issues/5434,
|
||||
https://github.com/ansible-collections/community.general/pull/5435).
|
||||
- snap - allow values in the ``options`` parameter to contain whitespaces (https://github.com/ansible-collections/community.general/pull/5475).
|
||||
minor_changes:
|
||||
- ansible_galaxy_install - refactored module to use ``CmdRunner`` to execute
|
||||
``ansible-galaxy`` (https://github.com/ansible-collections/community.general/pull/5477).
|
||||
- cpanm - refactored module to use ``CmdRunner`` to execute ``cpanm`` (https://github.com/ansible-collections/community.general/pull/5485).
|
||||
- hponcfg - refactored module to use ``CmdRunner`` to execute ``hponcfg`` (https://github.com/ansible-collections/community.general/pull/5483).
|
||||
- ldap_attrs - allow for DNs to have ``{x}`` prefix on first RDN (https://github.com/ansible-collections/community.general/issues/977,
|
||||
https://github.com/ansible-collections/community.general/pull/5450).
|
||||
- mksysb - refactored module to use ``CmdRunner`` to execute ``mksysb`` (https://github.com/ansible-collections/community.general/pull/5484).
|
||||
- onepassword - support version 2 of the OnePassword CLI (https://github.com/ansible-collections/community.general/pull/4728)
|
||||
release_summary: New major release of community.general with lots of bugfixes,
|
||||
new features, some removed deprecated features, and some other breaking changes.
|
||||
Please check the coresponding sections of the changelog for more details.
|
||||
fragments:
|
||||
- 4728-onepassword-v2.yml
|
||||
- 5435-escape-ldap-param.yml
|
||||
- 5450-allow-for-xordered-dns.yaml
|
||||
- 5468-iso-create-not-add-folders.yml
|
||||
- 5475-snap-option-value-whitespace.yml
|
||||
- 5477-ansible-galaxy-install-cmd-runner.yml
|
||||
- 5483-hponcfg-cmd-runner.yml
|
||||
- 5484-mksysb-cmd-runner.yml
|
||||
- 5485-cpanm-cmd-runner.yml
|
||||
- 5497-scaleway-filtering.yml
|
||||
- 6.0.0.yml
|
||||
modules:
|
||||
- description: Scaleway Container management
|
||||
name: scaleway_container
|
||||
namespace: ''
|
||||
- description: Retrieve information on Scaleway Container
|
||||
name: scaleway_container_info
|
||||
namespace: ''
|
||||
- description: Scaleway Container namespace management
|
||||
name: scaleway_container_namespace
|
||||
namespace: ''
|
||||
- description: Retrieve information on Scaleway Container namespace
|
||||
name: scaleway_container_namespace_info
|
||||
namespace: ''
|
||||
- description: Scaleway Function management
|
||||
name: scaleway_function
|
||||
namespace: ''
|
||||
- description: Retrieve information on Scaleway Function
|
||||
name: scaleway_function_info
|
||||
namespace: ''
|
||||
release_date: '2022-11-07'
|
||||
6.0.0-a1:
|
||||
changes:
|
||||
breaking_changes:
|
||||
|
|
@ -508,10 +565,462 @@ releases:
|
|||
- simplified-bsd-license.yml
|
||||
- unflatmap.yml
|
||||
modules:
|
||||
- description: Retrieve GConf configurations
|
||||
name: gconftool2_info
|
||||
namespace: ''
|
||||
- description: Add/remove/change files in ISO file
|
||||
name: iso_customize
|
||||
namespace: ''
|
||||
- description: Allows administration of Keycloak user_rolemapping with the Keycloak
|
||||
API
|
||||
name: keycloak_user_rolemapping
|
||||
namespace: ''
|
||||
- description: Set or delete a passphrase using the Operating System's native
|
||||
keyring
|
||||
name: keyring
|
||||
namespace: ''
|
||||
- description: Get a passphrase using the Operating System's native keyring
|
||||
name: keyring_info
|
||||
namespace: ''
|
||||
- description: Listing of resource policy_profiles in ManageIQ
|
||||
name: manageiq_policies_info
|
||||
namespace: ''
|
||||
- description: Retrieve resource tags in ManageIQ
|
||||
name: manageiq_tags_info
|
||||
namespace: ''
|
||||
- description: Rretrieves information about applications installed with pipx
|
||||
name: pipx_info
|
||||
namespace: ''
|
||||
- description: Management of a disk of a Qemu(KVM) VM in a Proxmox VE cluster.
|
||||
name: proxmox_disk
|
||||
namespace: ''
|
||||
- description: Scaleway compute - private network management
|
||||
name: scaleway_compute_private_network
|
||||
namespace: ''
|
||||
- description: Scaleway Container registry management module
|
||||
name: scaleway_container_registry
|
||||
namespace: ''
|
||||
- description: Scaleway Container registry info module
|
||||
name: scaleway_container_registry_info
|
||||
namespace: ''
|
||||
- description: Scaleway Function namespace management
|
||||
name: scaleway_function_namespace
|
||||
namespace: ''
|
||||
- description: Retrieve information on Scaleway Function namespace
|
||||
name: scaleway_function_namespace_info
|
||||
namespace: ''
|
||||
- description: Manages WDC UltraStar Data102 Out-Of-Band controllers using Redfish
|
||||
APIs
|
||||
name: wdc_redfish_command
|
||||
namespace: ''
|
||||
- description: Manages WDC UltraStar Data102 Out-Of-Band controllers using Redfish
|
||||
APIs
|
||||
name: wdc_redfish_info
|
||||
namespace: ''
|
||||
plugins:
|
||||
filter:
|
||||
- description: Counts hashable elements in a sequence
|
||||
name: counter
|
||||
namespace: null
|
||||
lookup:
|
||||
- description: Retrieve secrets from Bitwarden
|
||||
name: bitwarden
|
||||
namespace: null
|
||||
release_date: '2022-11-02'
|
||||
6.0.1:
|
||||
changes:
|
||||
bugfixes:
|
||||
- dependent lookup plugin - avoid warning on deprecated parameter for ``Templar.template()``
|
||||
(https://github.com/ansible-collections/community.general/pull/5543).
|
||||
- jenkins_build - fix the logical flaw when deleting a Jenkins build (https://github.com/ansible-collections/community.general/pull/5514).
|
||||
- one_vm - avoid splitting labels that are ``None`` (https://github.com/ansible-collections/community.general/pull/5489).
|
||||
- onepassword_raw - add missing parameter to plugin documentation (https://github.com/ansible-collections/community.general/issues/5506).
|
||||
- proxmox_disk - avoid duplicate ``vmid`` reference (https://github.com/ansible-collections/community.general/issues/5492,
|
||||
https://github.com/ansible-collections/community.general/pull/5493).
|
||||
release_summary: Bugfix release for Ansible 7.0.0.
|
||||
fragments:
|
||||
- 5489-nonetype-in-get-vm-by-label.yml
|
||||
- 5493-proxmox.yml
|
||||
- 5506-onepassword_raw-missing-param.yml
|
||||
- 5514-fix-logical-flaw-when-deleting-jenkins-build.yml
|
||||
- 5543-dependent-template.yml
|
||||
- 6.0.1.yml
|
||||
release_date: '2022-11-15'
|
||||
6.1.0:
|
||||
changes:
|
||||
bugfixes:
|
||||
- chroot connection plugin - add ``inventory_hostname`` to vars under ``remote_addr``.
|
||||
This is needed for compatibility with ansible-core 2.13 (https://github.com/ansible-collections/community.general/pull/5570).
|
||||
- cmd_runner module utils - fixed bug when handling default cases in ``cmd_runner_fmt.as_map()``
|
||||
(https://github.com/ansible-collections/community.general/pull/5538).
|
||||
- cmd_runner module utils - formatting arguments ``cmd_runner_fmt.as_fixed()``
|
||||
was expecting an non-existing argument (https://github.com/ansible-collections/community.general/pull/5538).
|
||||
- keycloak_client_rolemapping - calculate ``proposed`` and ``after`` return
|
||||
values properly (https://github.com/ansible-collections/community.general/pull/5619).
|
||||
- keycloak_client_rolemapping - remove only listed mappings with ``state=absent``
|
||||
(https://github.com/ansible-collections/community.general/pull/5619).
|
||||
- proxmox inventory plugin - fix bug while templating when using templates for
|
||||
the ``url``, ``user``, ``password``, ``token_id``, or ``token_secret`` options
|
||||
(https://github.com/ansible-collections/community.general/pull/5640).
|
||||
- proxmox inventory plugin - handle tags delimited by semicolon instead of comma,
|
||||
which happens from Proxmox 7.3 on (https://github.com/ansible-collections/community.general/pull/5602).
|
||||
- redhat_subscription - do not ignore ``consumer_name`` and other variables
|
||||
if ``activationkey`` is specified (https://github.com/ansible-collections/community.general/issues/3486,
|
||||
https://github.com/ansible-collections/community.general/pull/5627).
|
||||
- redhat_subscription - do not pass arguments to ``subscription-manager register``
|
||||
for things already configured; now a specified ``rhsm_baseurl`` is properly
|
||||
set for subscription-manager (https://github.com/ansible-collections/community.general/pull/5583).
|
||||
- unixy callback plugin - fix plugin to work with ansible-core 2.14 by using
|
||||
Ansible's configuration manager for handling options (https://github.com/ansible-collections/community.general/issues/5600).
|
||||
- vdo - now uses ``yaml.safe_load()`` to parse command output instead of the
|
||||
deprecated ``yaml.load()`` which is potentially unsafe. Using ``yaml.load()``
|
||||
without explicitely setting a ``Loader=`` is also an error in pyYAML 6.0 (https://github.com/ansible-collections/community.general/pull/5632).
|
||||
- vmadm - fix for index out of range error in ``get_vm_uuid`` (https://github.com/ansible-collections/community.general/pull/5628).
|
||||
deprecated_features:
|
||||
- The ``sap`` modules ``sapcar_extract``, ``sap_task_list_execute``, and ``hana_query``,
|
||||
will be removed from this collection in community.general 7.0.0 and replaced
|
||||
with redirects to ``community.sap_libs``. If you want to continue using these
|
||||
modules, make sure to also install ``community.sap_libs`` (it is part of the
|
||||
Ansible package) (https://github.com/ansible-collections/community.general/pull/5614).
|
||||
minor_changes:
|
||||
- cmd_runner module utils - ``cmd_runner_fmt.as_bool()`` can now take an extra
|
||||
parameter to format when value is false (https://github.com/ansible-collections/community.general/pull/5647).
|
||||
- gconftool2 - refactor using ``ModuleHelper`` and ``CmdRunner`` (https://github.com/ansible-collections/community.general/pull/5545).
|
||||
- java_certs - add more detailed error output when extracting certificate from
|
||||
PKCS12 fails (https://github.com/ansible-collections/community.general/pull/5550).
|
||||
- jenkins_plugin - refactor code to module util to fix sanity check (https://github.com/ansible-collections/community.general/pull/5565).
|
||||
- lxd_project - refactored code out to module utils to clear sanity check (https://github.com/ansible-collections/community.general/pull/5549).
|
||||
- nmap inventory plugin - add new options ``udp_scan``, ``icmp_timestamp``,
|
||||
and ``dns_resolve`` for different types of scans (https://github.com/ansible-collections/community.general/pull/5566).
|
||||
- rax_scaling_group - refactored out code to the ``rax`` module utils to clear
|
||||
the sanity check (https://github.com/ansible-collections/community.general/pull/5563).
|
||||
- redfish_command - add ``PerformRequestedOperations`` command to perform any
|
||||
operations necessary to continue the update flow (https://github.com/ansible-collections/community.general/issues/4276).
|
||||
- redfish_command - add ``update_apply_time`` to ``SimpleUpdate`` command (https://github.com/ansible-collections/community.general/issues/3910).
|
||||
- redfish_command - add ``update_status`` to output of ``SimpleUpdate`` command
|
||||
to allow a user monitor the update in progress (https://github.com/ansible-collections/community.general/issues/4276).
|
||||
- redfish_info - add ``GetUpdateStatus`` command to check the progress of a
|
||||
previous update request (https://github.com/ansible-collections/community.general/issues/4276).
|
||||
- redfish_utils module utils - added PUT (``put_request()``) functionality (https://github.com/ansible-collections/community.general/pull/5490).
|
||||
- slack - add option ``prepend_hash`` which allows to control whether a ``#``
|
||||
is prepended to ``channel_id``. The current behavior (value ``auto``) is to
|
||||
prepend ``#`` unless some specific prefixes are found. That list of prefixes
|
||||
is incomplete, and there does not seem to exist a documented condition on
|
||||
when exactly ``#`` must not be prepended. We recommend to explicitly set ``prepend_hash=always``
|
||||
or ``prepend_hash=never`` to avoid any ambiguity (https://github.com/ansible-collections/community.general/pull/5629).
|
||||
- spotinst_aws_elastigroup - add ``elements`` attribute when missing in ``list``
|
||||
parameters (https://github.com/ansible-collections/community.general/pull/5553).
|
||||
- ssh_config - add ``host_key_algorithms`` option (https://github.com/ansible-collections/community.general/pull/5605).
|
||||
- udm_share - added ``elements`` attribute to ``list`` type parameters (https://github.com/ansible-collections/community.general/pull/5557).
|
||||
- udm_user - add ``elements`` attribute when missing in ``list`` parameters
|
||||
(https://github.com/ansible-collections/community.general/pull/5559).
|
||||
release_summary: Regular bugfix and feature release.
|
||||
fragments:
|
||||
- 3910-redfish-add-operation-apply-time-to-simple-update.yml
|
||||
- 4276-redfish-command-updates-for-full-simple-update-workflow.yml
|
||||
- 5490-adding-put-functionality.yml
|
||||
- 5538-cmd-runner-as-fixed.yml
|
||||
- 5545-gconftool-cmd-runner.yml
|
||||
- 5549-lxd-project-sanity.yml
|
||||
- 5550-java_certs-not-enough-info-on-error.yml
|
||||
- 5553-spotinst-aws-elasticgroup-sanity.yml
|
||||
- 5557-udm-share-sanity.yml
|
||||
- 5559-udm-user-sanity.yml
|
||||
- 5563-rax-scaling-group-sanity.yml
|
||||
- 5565-jenkins-plugin-sanity.yml
|
||||
- 5566-additional-flags-nmap.yml
|
||||
- 5570-chroot-plugin-fix-default-inventory_hostname.yml
|
||||
- 5583-redhat_subscription-subscribe-parameters.yaml
|
||||
- 5601-unixy-callback-use-config-manager.yml
|
||||
- 5602-proxmox-tags.yml
|
||||
- 5605-ssh-config-add-host-key-algorithms.yaml
|
||||
- 5619-keycloak-improvements.yml
|
||||
- 5627-redhat_subscription-subscribe-parameters-2.yaml
|
||||
- 5628-fix-vmadm-off-by-one.yml
|
||||
- 5629-add-prepend-hash-option-for-channel-id.yml
|
||||
- 5632-vdo-Use-yaml-safe-load-instead-of-yaml-load.yml
|
||||
- 5640-fix-typo-proxmox-inventory.yml
|
||||
- 5647-cmd-runner-as-bool-false.yml
|
||||
- 6.1.0.yml
|
||||
- sap-removal.yml
|
||||
modules:
|
||||
- description: Manage project badges on GitLab Server
|
||||
name: gitlab_project_badge
|
||||
namespace: ''
|
||||
- description: Retrieve client secret via Keycloak API
|
||||
name: keycloak_clientsecret_info
|
||||
namespace: ''
|
||||
- description: Regenerate Keycloak client secret via Keycloak API
|
||||
name: keycloak_clientsecret_regenerate
|
||||
namespace: ''
|
||||
release_date: '2022-12-06'
|
||||
6.2.0:
|
||||
changes:
|
||||
bugfixes:
|
||||
- ansible_galaxy_install - set default to raise exception if command's return
|
||||
code is different from zero (https://github.com/ansible-collections/community.general/pull/5680).
|
||||
- ansible_galaxy_install - try ``C.UTF-8`` and then fall back to ``en_US.UTF-8``
|
||||
before failing (https://github.com/ansible-collections/community.general/pull/5680).
|
||||
- gitlab_group_variables - fix dropping variables accidentally when GitLab introduced
|
||||
new properties (https://github.com/ansible-collections/community.general/pull/5667).
|
||||
- gitlab_project_variables - fix dropping variables accidentally when GitLab
|
||||
introduced new properties (https://github.com/ansible-collections/community.general/pull/5667).
|
||||
- lxc_container - fix the arguments of the lxc command which broke the creation
|
||||
and cloning of containers (https://github.com/ansible-collections/community.general/issues/5578).
|
||||
- opkg - fix issue that ``force=reinstall`` would not reinstall an existing
|
||||
package (https://github.com/ansible-collections/community.general/pull/5705).
|
||||
- proxmox_disk - fixed possible issues with redundant ``vmid`` parameter (https://github.com/ansible-collections/community.general/issues/5492,
|
||||
https://github.com/ansible-collections/community.general/pull/5672).
|
||||
- proxmox_nic - fixed possible issues with redundant ``vmid`` parameter (https://github.com/ansible-collections/community.general/issues/5492,
|
||||
https://github.com/ansible-collections/community.general/pull/5672).
|
||||
- unixy callback plugin - fix typo introduced when updating to use Ansible's
|
||||
configuration manager for handling options (https://github.com/ansible-collections/community.general/issues/5600).
|
||||
deprecated_features:
|
||||
- manageiq_policies - deprecate ``state=list`` in favour of using ``community.general.manageiq_policies_info``
|
||||
(https://github.com/ansible-collections/community.general/pull/5721).
|
||||
- rax - module relies on deprecates library ``pyrax``. Unless maintainers step
|
||||
up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cbs - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cbs_attachments - module relies on deprecates library ``pyrax``. Unless
|
||||
maintainers step up to work on the module, it will be marked as deprecated
|
||||
in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cdb - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cdb_database - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cdb_user - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_clb - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_clb_nodes - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_clb_ssl - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_dns - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_dns_record - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_facts - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_files - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_files_objects - module relies on deprecates library ``pyrax``. Unless
|
||||
maintainers step up to work on the module, it will be marked as deprecated
|
||||
in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_identity - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_keypair - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_meta - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_alarm - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_check - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_entity - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_notification - module relies on deprecates library ``pyrax``. Unless
|
||||
maintainers step up to work on the module, it will be marked as deprecated
|
||||
in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_notification_plan - module relies on deprecates library ``pyrax``.
|
||||
Unless maintainers step up to work on the module, it will be marked as deprecated
|
||||
in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_network - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_queue - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_scaling_group - module relies on deprecates library ``pyrax``. Unless
|
||||
maintainers step up to work on the module, it will be marked as deprecated
|
||||
in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_scaling_policy - module relies on deprecates library ``pyrax``. Unless
|
||||
maintainers step up to work on the module, it will be marked as deprecated
|
||||
in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
minor_changes:
|
||||
- opkg - allow installing a package in a certain version (https://github.com/ansible-collections/community.general/pull/5688).
|
||||
- proxmox - added new module parameter ``tags`` for use with PVE 7+ (https://github.com/ansible-collections/community.general/pull/5714).
|
||||
- puppet - refactored module to use ``CmdRunner`` for executing ``puppet`` (https://github.com/ansible-collections/community.general/pull/5612).
|
||||
- redhat_subscription - add a ``server_proxy_scheme`` parameter to configure
|
||||
the scheme for the proxy server (https://github.com/ansible-collections/community.general/pull/5662).
|
||||
- ssh_config - refactor code to module util to fix sanity check (https://github.com/ansible-collections/community.general/pull/5720).
|
||||
- sudoers - adds ``host`` parameter for setting hostname restrictions in sudoers
|
||||
rules (https://github.com/ansible-collections/community.general/issues/5702).
|
||||
release_summary: Regular bugfix and feature release.
|
||||
fragments:
|
||||
- 5612-puppet-cmd-runner.yml
|
||||
- 5659-fix-lxc_container-command.yml
|
||||
- 5662-redhat_subscription-server_proxy_scheme.yaml
|
||||
- 5666-gitlab-variables.yml
|
||||
- 5672-proxmox.yml
|
||||
- 5680-ansible_galaxy_install-fx-locale.yaml
|
||||
- 5688-opkg-module-install-certain-version.yml
|
||||
- 5703-sudoers-host-support.yml
|
||||
- 5705-opkg-fix-force-reinstall.yml
|
||||
- 5714-proxmox-lxc-tag-support.yml
|
||||
- 5720-ssh_config-plugin-sanity.yml
|
||||
- 5721-manageiq-policies-deprecate-list-state.yaml
|
||||
- 5733-rax-deprecation-notice.yml
|
||||
- 5744-unixy-callback-fix-config-manager-typo.yml
|
||||
- 6.2.0.yml
|
||||
release_date: '2023-01-04'
|
||||
6.3.0:
|
||||
changes:
|
||||
breaking_changes:
|
||||
- 'ModuleHelper module utils - when the module sets output variables named ``msg``,
|
||||
``exception``, ``output``, ``vars``, or ``changed``, the actual output will
|
||||
prefix those names with ``_`` (underscore symbol) only when they clash with
|
||||
output variables generated by ModuleHelper itself, which only occurs when
|
||||
handling exceptions. Please note that this breaking change does not require
|
||||
a new major release since before this release, it was not possible to add
|
||||
such variables to the output `due to a bug <https://github.com/ansible-collections/community.general/pull/5755>`__
|
||||
(https://github.com/ansible-collections/community.general/pull/5765).
|
||||
|
||||
'
|
||||
bugfixes:
|
||||
- ModuleHelper - fix bug when adjusting the name of reserved output variables
|
||||
(https://github.com/ansible-collections/community.general/pull/5755).
|
||||
- alternatives - support subcommands on Fedora 37, which uses ``follower`` instead
|
||||
of ``slave`` (https://github.com/ansible-collections/community.general/pull/5794).
|
||||
- bitwarden lookup plugin - clarify what to do, if the bitwarden vault is not
|
||||
unlocked (https://github.com/ansible-collections/community.general/pull/5811).
|
||||
- dig lookup plugin - correctly handle DNSKEY record type's ``algorithm`` field
|
||||
(https://github.com/ansible-collections/community.general/pull/5914).
|
||||
- gem - fix force parameter not being passed to gem command when uninstalling
|
||||
(https://github.com/ansible-collections/community.general/pull/5822).
|
||||
- gem - fix hang due to interactive prompt for confirmation on specific version
|
||||
uninstall (https://github.com/ansible-collections/community.general/pull/5751).
|
||||
- gitlab_deploy_key - also update ``title`` and not just ``can_push`` (https://github.com/ansible-collections/community.general/pull/5888).
|
||||
- keycloak_user_federation - fixes federation creation issue. When a new federation
|
||||
was created and at the same time a default / standard mapper was also changed
|
||||
/ updated the creation process failed as a bad None set variable led to a
|
||||
bad malformed url request (https://github.com/ansible-collections/community.general/pull/5750).
|
||||
- 'keycloak_user_federation - fixes idempotency detection issues. In some cases
|
||||
the module could fail to properly detect already existing user federations
|
||||
because of a buggy seemingly superflous extra query parameter (https://github.com/ansible-collections/community.general/pull/5732).
|
||||
|
||||
'
|
||||
- loganalytics callback plugin - adjust type of callback to ``notification``,
|
||||
it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- logdna callback plugin - adjust type of callback to ``notification``, it was
|
||||
incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- logstash callback plugin - adjust type of callback to ``notification``, it
|
||||
was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- nsupdate - fix zone lookup. The SOA record for an existing zone is returned
|
||||
as an answer RR and not as an authority RR (https://github.com/ansible-collections/community.general/issues/5817,
|
||||
https://github.com/ansible-collections/community.general/pull/5818).
|
||||
- proxmox_disk - fixed issue with read timeout on import action (https://github.com/ansible-collections/community.general/pull/5803).
|
||||
- redfish_utils - removed basic auth HTTP header when performing a GET on the
|
||||
service root resource and when performing a POST to the session collection
|
||||
(https://github.com/ansible-collections/community.general/issues/5886).
|
||||
- splunk callback plugin - adjust type of callback to ``notification``, it was
|
||||
incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- sumologic callback plugin - adjust type of callback to ``notification``, it
|
||||
was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- syslog_json callback plugin - adjust type of callback to ``notification``,
|
||||
it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- terraform - fix ``current`` workspace never getting appended to the ``all``
|
||||
key in the ``workspace_ctf`` object (https://github.com/ansible-collections/community.general/pull/5735).
|
||||
- terraform - fix ``terraform init`` failure when there are multiple workspaces
|
||||
on the remote backend and when ``default`` workspace is missing by setting
|
||||
``TF_WORKSPACE`` environmental variable to the value of ``workspace`` when
|
||||
used (https://github.com/ansible-collections/community.general/pull/5735).
|
||||
- terraform module - disable ANSI escape sequences during validation phase (https://github.com/ansible-collections/community.general/pull/5843).
|
||||
- xml - fixed a bug where empty ``children`` list would not be set (https://github.com/ansible-collections/community.general/pull/5808).
|
||||
deprecated_features:
|
||||
- consul - deprecate using parameters unused for ``state=absent`` (https://github.com/ansible-collections/community.general/pull/5772).
|
||||
- gitlab_runner - the default of the new option ``access_level_on_creation``
|
||||
will change from ``false`` to ``true`` in community.general 7.0.0. This will
|
||||
cause ``access_level`` to be used during runner registration as well, and
|
||||
not only during updates (https://github.com/ansible-collections/community.general/pull/5908).
|
||||
minor_changes:
|
||||
- apache2_module - add module argument ``warn_mpm_absent`` to control whether
|
||||
warning are raised in some edge cases (https://github.com/ansible-collections/community.general/pull/5793).
|
||||
- bitwarden lookup plugin - can now retrieve secrets from custom fields (https://github.com/ansible-collections/community.general/pull/5694).
|
||||
- bitwarden lookup plugin - implement filtering results by ``collection_id``
|
||||
parameter (https://github.com/ansible-collections/community.general/issues/5849).
|
||||
- dig lookup plugin - support CAA record type (https://github.com/ansible-collections/community.general/pull/5913).
|
||||
- gitlab_project - add ``builds_access_level``, ``container_registry_access_level``
|
||||
and ``forking_access_level`` options (https://github.com/ansible-collections/community.general/pull/5706).
|
||||
- gitlab_runner - add new boolean option ``access_level_on_creation``. It controls,
|
||||
whether the value of ``access_level`` is used for runner registration or not.
|
||||
The option ``access_level`` has been ignored on registration so far and was
|
||||
only used on updates (https://github.com/ansible-collections/community.general/issues/5907,
|
||||
https://github.com/ansible-collections/community.general/pull/5908).
|
||||
- ilo_redfish_utils module utils - change implementation of DNS Server IP and
|
||||
NTP Server IP update (https://github.com/ansible-collections/community.general/pull/5804).
|
||||
- ipa_group - allow to add and remove external users with the ``external_user``
|
||||
option (https://github.com/ansible-collections/community.general/pull/5897).
|
||||
- iptables_state - minor refactoring within the module (https://github.com/ansible-collections/community.general/pull/5844).
|
||||
- one_vm - add a new ``updateconf`` option which implements the ``one.vm.updateconf``
|
||||
API call (https://github.com/ansible-collections/community.general/pull/5812).
|
||||
- opkg - refactored module to use ``CmdRunner`` for executing ``opkg`` (https://github.com/ansible-collections/community.general/pull/5718).
|
||||
- redhat_subscription - adds ``token`` parameter for subscription-manager authentication
|
||||
using Red Hat API token (https://github.com/ansible-collections/community.general/pull/5725).
|
||||
- snap - minor refactor when executing module (https://github.com/ansible-collections/community.general/pull/5773).
|
||||
- snap_alias - refactored module to use ``CmdRunner`` to execute ``snap`` (https://github.com/ansible-collections/community.general/pull/5486).
|
||||
- sudoers - add ``setenv`` parameters to support passing environment variables
|
||||
via sudo. (https://github.com/ansible-collections/community.general/pull/5883)
|
||||
release_summary: Regular bugfix and feature release.
|
||||
fragments:
|
||||
- 5486-snap-alias-cmd-runner.yml
|
||||
- 5694-add-custom-fields-to-bitwarden.yml
|
||||
- 5706-add-builds-forks-container-registry.yml
|
||||
- 5718-opkg-refactor.yaml
|
||||
- 5725-redhat_subscription-add-red-hat-api-token.yml
|
||||
- 5732-bugfix-keycloak-userfed-idempotency.yml
|
||||
- 5735-terraform-init-fix-when-default-workspace-doesnt-exists.yaml
|
||||
- 5750-bugfixing-keycloak-usrfed-fail-when-update-default-mapper-simultaneously.yml
|
||||
- 5751-gem-fix-uninstall-hang.yml
|
||||
- 5755-mh-fix-output-conflict.yml
|
||||
- 5761-callback-types.yml
|
||||
- 5765-mh-lax-output-conflict.yml
|
||||
- 5772-consul-deprecate-params-when-absent.yml
|
||||
- 5773-snap-mh-execute.yml
|
||||
- 5793-apache2-module-npm-warnings.yml
|
||||
- 5794-alternatives-fedora37.yml
|
||||
- 5803-proxmox-read-timeout.yml
|
||||
- 5804-minor-changes-to-hpe-ilo-collection.yml
|
||||
- 5808-xml-children-parameter-does-not-exist.yml
|
||||
- 5811-clarify-bitwarden-error.yml
|
||||
- 5812-implement-updateconf-api-call.yml
|
||||
- 5818-nsupdate-fix-zone-lookup.yml
|
||||
- 5822-gem-uninstall-force.yml
|
||||
- 5843-terraform-validate-no-color.yml
|
||||
- 5844-iptables-state-refactor.yml
|
||||
- 5851-lookup-bitwarden-add-filter-by-collection-id-parameter.yml
|
||||
- 5883-sudoers-add-support-for-setenv-parameter.yml
|
||||
- 5886-redfish-correct-basic-auth-usage-on-session-creation.yml
|
||||
- 5888-update-key-title.yml
|
||||
- 5897-ipa_group-add-external-users.yml
|
||||
- 5907-fix-gitlab_runner-not-idempotent.yml
|
||||
- 5913-dig-caa.yml
|
||||
- 5914-dig-dnskey.yml
|
||||
- 6.3.0.yml
|
||||
modules:
|
||||
- description: Manages Out-Of-Band controllers using Open Composable API (OCAPI)
|
||||
name: ocapi_command
|
||||
namespace: ''
|
||||
- description: Manages Out-Of-Band controllers using Open Composable API (OCAPI)
|
||||
name: ocapi_info
|
||||
namespace: ''
|
||||
release_date: '2023-01-31'
|
||||
|
|
|
|||
|
|
@ -1,2 +0,0 @@
|
|||
minor_changes:
|
||||
- onepassword - support version 2 of the OnePassword CLI (https://github.com/ansible-collections/community.general/pull/4728)
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
bugfixes:
|
||||
- ldap_attrs - fix bug which caused a ``Bad search filter`` error. The error was occuring when the ldap attribute value contained special characters such as ``(`` or ``*`` (https://github.com/ansible-collections/community.general/issues/5434, https://github.com/ansible-collections/community.general/pull/5435).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
minor_changes:
|
||||
- ldap_attrs - allow for DNs to have ``{x}`` prefix on first RDN (https://github.com/ansible-collections/community.general/issues/977, https://github.com/ansible-collections/community.general/pull/5450).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
bugfixes:
|
||||
- iso_create - the module somtimes failed to add folders for Joliet and UDF formats (https://github.com/ansible-collections/community.general/issues/5275).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
bugfixes:
|
||||
- snap - allow values in the ``options`` parameter to contain whitespaces (https://github.com/ansible-collections/community.general/pull/5475).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
minor_changes:
|
||||
- ansible_galaxy_install - refactored module to use ``CmdRunner`` to execute ``ansible-galaxy`` (https://github.com/ansible-collections/community.general/pull/5477).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
minor_changes:
|
||||
- hponcfg - refactored module to use ``CmdRunner`` to execute ``hponcfg`` (https://github.com/ansible-collections/community.general/pull/5483).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
minor_changes:
|
||||
- mksysb - refactored module to use ``CmdRunner`` to execute ``mksysb`` (https://github.com/ansible-collections/community.general/pull/5484).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
minor_changes:
|
||||
- cpanm - refactored module to use ``CmdRunner`` to execute ``cpanm`` (https://github.com/ansible-collections/community.general/pull/5485).
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
release_summary: >-
|
||||
New major release of community.general with lots of bugfixes, new features, some removed deprecated features, and some other breaking changes.
|
||||
Please check the coresponding sections of the changelog for more details.
|
||||
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
namespace: community
|
||||
name: general
|
||||
version: 6.0.0
|
||||
version: 6.3.0
|
||||
readme: README.md
|
||||
authors:
|
||||
- Ansible (https://github.com/ansible)
|
||||
|
|
|
|||
|
|
@ -16,15 +16,15 @@ DOCUMENTATION = '''
|
|||
- cgroups
|
||||
short_description: Profiles maximum memory usage of tasks and full execution using cgroups
|
||||
description:
|
||||
- This is an ansible callback plugin that profiles maximum memory usage of ansible and individual tasks, and displays a recap at the end using cgroups
|
||||
- This is an ansible callback plugin that profiles maximum memory usage of ansible and individual tasks, and displays a recap at the end using cgroups.
|
||||
notes:
|
||||
- Requires ansible to be run from within a cgroup, such as with C(cgexec -g memory:ansible_profile ansible-playbook ...)
|
||||
- This cgroup should only be used by ansible to get accurate results
|
||||
- To create the cgroup, first use a command such as C(sudo cgcreate -a ec2-user:ec2-user -t ec2-user:ec2-user -g memory:ansible_profile)
|
||||
- Requires ansible to be run from within a cgroup, such as with C(cgexec -g memory:ansible_profile ansible-playbook ...).
|
||||
- This cgroup should only be used by ansible to get accurate results.
|
||||
- To create the cgroup, first use a command such as C(sudo cgcreate -a ec2-user:ec2-user -t ec2-user:ec2-user -g memory:ansible_profile).
|
||||
options:
|
||||
max_mem_file:
|
||||
required: true
|
||||
description: Path to cgroups C(memory.max_usage_in_bytes) file. Example C(/sys/fs/cgroup/memory/ansible_profile/memory.max_usage_in_bytes)
|
||||
description: Path to cgroups C(memory.max_usage_in_bytes) file. Example C(/sys/fs/cgroup/memory/ansible_profile/memory.max_usage_in_bytes).
|
||||
env:
|
||||
- name: CGROUP_MAX_MEM_FILE
|
||||
ini:
|
||||
|
|
@ -32,7 +32,7 @@ DOCUMENTATION = '''
|
|||
key: max_mem_file
|
||||
cur_mem_file:
|
||||
required: true
|
||||
description: Path to C(memory.usage_in_bytes) file. Example C(/sys/fs/cgroup/memory/ansible_profile/memory.usage_in_bytes)
|
||||
description: Path to C(memory.usage_in_bytes) file. Example C(/sys/fs/cgroup/memory/ansible_profile/memory.usage_in_bytes).
|
||||
env:
|
||||
- name: CGROUP_CUR_MEM_FILE
|
||||
ini:
|
||||
|
|
|
|||
|
|
@ -13,8 +13,8 @@ DOCUMENTATION = '''
|
|||
type: aggregate
|
||||
short_description: demo callback that adds play/task context
|
||||
description:
|
||||
- Displays some play and task context along with normal output
|
||||
- This is mostly for demo purposes
|
||||
- Displays some play and task context along with normal output.
|
||||
- This is mostly for demo purposes.
|
||||
requirements:
|
||||
- whitelist in configuration
|
||||
'''
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@ DOCUMENTATION = '''
|
|||
extends_documentation_fragment:
|
||||
- default_callback
|
||||
requirements:
|
||||
- set as stdout callback in ansible.cfg (stdout_callback = counter_enabled)
|
||||
- set as stdout callback in C(ansible.cfg) (C(stdout_callback = counter_enabled))
|
||||
'''
|
||||
|
||||
from ansible import constants as C
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ short_description: minimal stdout output
|
|||
extends_documentation_fragment:
|
||||
- default_callback
|
||||
description:
|
||||
- When in verbose mode it will act the same as the default callback
|
||||
- When in verbose mode it will act the same as the default callback.
|
||||
author:
|
||||
- Dag Wieers (@dagwieers)
|
||||
requirements:
|
||||
|
|
|
|||
|
|
@ -13,10 +13,10 @@ DOCUMENTATION = '''
|
|||
type: notification
|
||||
short_description: post task events to a jabber server
|
||||
description:
|
||||
- The chatty part of ChatOps with a Hipchat server as a target
|
||||
- The chatty part of ChatOps with a Hipchat server as a target.
|
||||
- This callback plugin sends status updates to a HipChat channel during playbook execution.
|
||||
requirements:
|
||||
- xmpp (python lib https://github.com/ArchipelProject/xmpppy)
|
||||
- xmpp (Python library U(https://github.com/ArchipelProject/xmpppy))
|
||||
options:
|
||||
server:
|
||||
description: connection info to jabber server
|
||||
|
|
|
|||
|
|
@ -13,10 +13,10 @@ DOCUMENTATION = '''
|
|||
type: notification
|
||||
short_description: write playbook output to log file
|
||||
description:
|
||||
- This callback writes playbook output to a file per host in the C(/var/log/ansible/hosts) directory
|
||||
- This callback writes playbook output to a file per host in the C(/var/log/ansible/hosts) directory.
|
||||
requirements:
|
||||
- Whitelist in configuration
|
||||
- A writeable /var/log/ansible/hosts directory by the user executing Ansible on the controller
|
||||
- A writeable C(/var/log/ansible/hosts) directory by the user executing Ansible on the controller
|
||||
options:
|
||||
log_folder:
|
||||
default: /var/log/ansible/hosts
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
name: loganalytics
|
||||
type: aggregate
|
||||
type: notification
|
||||
short_description: Posts task results to Azure Log Analytics
|
||||
author: "Cyrus Li (@zhcli) <cyrus1006@gmail.com>"
|
||||
description:
|
||||
|
|
@ -155,7 +155,7 @@ class AzureLogAnalyticsSource(object):
|
|||
|
||||
class CallbackModule(CallbackBase):
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'loganalytics'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
|
|
|||
|
|
@ -9,17 +9,17 @@ __metaclass__ = type
|
|||
DOCUMENTATION = '''
|
||||
author: Unknown (!UNKNOWN)
|
||||
name: logdna
|
||||
type: aggregate
|
||||
type: notification
|
||||
short_description: Sends playbook logs to LogDNA
|
||||
description:
|
||||
- This callback will report logs from playbook actions, tasks, and events to LogDNA (https://app.logdna.com)
|
||||
- This callback will report logs from playbook actions, tasks, and events to LogDNA (U(https://app.logdna.com)).
|
||||
requirements:
|
||||
- LogDNA Python Library (https://github.com/logdna/python)
|
||||
- LogDNA Python Library (U(https://github.com/logdna/python))
|
||||
- whitelisting in configuration
|
||||
options:
|
||||
conf_key:
|
||||
required: true
|
||||
description: LogDNA Ingestion Key
|
||||
description: LogDNA Ingestion Key.
|
||||
type: string
|
||||
env:
|
||||
- name: LOGDNA_INGESTION_KEY
|
||||
|
|
@ -28,7 +28,7 @@ DOCUMENTATION = '''
|
|||
key: conf_key
|
||||
plugin_ignore_errors:
|
||||
required: false
|
||||
description: Whether to ignore errors on failing or not
|
||||
description: Whether to ignore errors on failing or not.
|
||||
type: boolean
|
||||
env:
|
||||
- name: ANSIBLE_IGNORE_ERRORS
|
||||
|
|
@ -38,7 +38,7 @@ DOCUMENTATION = '''
|
|||
default: false
|
||||
conf_hostname:
|
||||
required: false
|
||||
description: Alternative Host Name; the current host name by default
|
||||
description: Alternative Host Name; the current host name by default.
|
||||
type: string
|
||||
env:
|
||||
- name: LOGDNA_HOSTNAME
|
||||
|
|
@ -47,7 +47,7 @@ DOCUMENTATION = '''
|
|||
key: conf_hostname
|
||||
conf_tags:
|
||||
required: false
|
||||
description: Tags
|
||||
description: Tags.
|
||||
type: string
|
||||
env:
|
||||
- name: LOGDNA_TAGS
|
||||
|
|
@ -111,7 +111,7 @@ def isJSONable(obj):
|
|||
class CallbackModule(CallbackBase):
|
||||
|
||||
CALLBACK_VERSION = 0.1
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.logdna'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
|
|
|||
|
|
@ -13,15 +13,15 @@ DOCUMENTATION = '''
|
|||
short_description: Sends events to Logentries
|
||||
description:
|
||||
- This callback plugin will generate JSON objects and send them to Logentries via TCP for auditing/debugging purposes.
|
||||
- Before 2.4, if you wanted to use an ini configuration, the file must be placed in the same directory as this plugin and named logentries.ini
|
||||
- Before 2.4, if you wanted to use an ini configuration, the file must be placed in the same directory as this plugin and named C(logentries.ini).
|
||||
- In 2.4 and above you can just put it in the main Ansible configuration file.
|
||||
requirements:
|
||||
- whitelisting in configuration
|
||||
- certifi (python library)
|
||||
- flatdict (python library), if you want to use the 'flatten' option
|
||||
- certifi (Python library)
|
||||
- flatdict (Python library), if you want to use the 'flatten' option
|
||||
options:
|
||||
api:
|
||||
description: URI to the Logentries API
|
||||
description: URI to the Logentries API.
|
||||
env:
|
||||
- name: LOGENTRIES_API
|
||||
default: data.logentries.com
|
||||
|
|
@ -29,7 +29,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_logentries
|
||||
key: api
|
||||
port:
|
||||
description: HTTP port to use when connecting to the API
|
||||
description: HTTP port to use when connecting to the API.
|
||||
env:
|
||||
- name: LOGENTRIES_PORT
|
||||
default: 80
|
||||
|
|
@ -37,7 +37,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_logentries
|
||||
key: port
|
||||
tls_port:
|
||||
description: Port to use when connecting to the API when TLS is enabled
|
||||
description: Port to use when connecting to the API when TLS is enabled.
|
||||
env:
|
||||
- name: LOGENTRIES_TLS_PORT
|
||||
default: 443
|
||||
|
|
@ -45,7 +45,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_logentries
|
||||
key: tls_port
|
||||
token:
|
||||
description: The logentries "TCP token"
|
||||
description: The logentries C(TCP token).
|
||||
env:
|
||||
- name: LOGENTRIES_ANSIBLE_TOKEN
|
||||
required: true
|
||||
|
|
@ -54,7 +54,7 @@ DOCUMENTATION = '''
|
|||
key: token
|
||||
use_tls:
|
||||
description:
|
||||
- Toggle to decide whether to use TLS to encrypt the communications with the API server
|
||||
- Toggle to decide whether to use TLS to encrypt the communications with the API server.
|
||||
env:
|
||||
- name: LOGENTRIES_USE_TLS
|
||||
default: false
|
||||
|
|
@ -63,7 +63,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_logentries
|
||||
key: use_tls
|
||||
flatten:
|
||||
description: flatten complex data structures into a single dictionary with complex keys
|
||||
description: Flatten complex data structures into a single dictionary with complex keys.
|
||||
type: boolean
|
||||
default: false
|
||||
env:
|
||||
|
|
|
|||
|
|
@ -13,13 +13,13 @@ DOCUMENTATION = r'''
|
|||
type: notification
|
||||
short_description: Sends events to Logstash
|
||||
description:
|
||||
- This callback will report facts and task events to Logstash https://www.elastic.co/products/logstash
|
||||
- This callback will report facts and task events to Logstash U(https://www.elastic.co/products/logstash).
|
||||
requirements:
|
||||
- whitelisting in configuration
|
||||
- logstash (python library)
|
||||
- logstash (Python library)
|
||||
options:
|
||||
server:
|
||||
description: Address of the Logstash server
|
||||
description: Address of the Logstash server.
|
||||
env:
|
||||
- name: LOGSTASH_SERVER
|
||||
ini:
|
||||
|
|
@ -28,7 +28,7 @@ DOCUMENTATION = r'''
|
|||
version_added: 1.0.0
|
||||
default: localhost
|
||||
port:
|
||||
description: Port on which logstash is listening
|
||||
description: Port on which logstash is listening.
|
||||
env:
|
||||
- name: LOGSTASH_PORT
|
||||
ini:
|
||||
|
|
@ -37,7 +37,7 @@ DOCUMENTATION = r'''
|
|||
version_added: 1.0.0
|
||||
default: 5000
|
||||
type:
|
||||
description: Message type
|
||||
description: Message type.
|
||||
env:
|
||||
- name: LOGSTASH_TYPE
|
||||
ini:
|
||||
|
|
@ -54,7 +54,7 @@ DOCUMENTATION = r'''
|
|||
env:
|
||||
- name: LOGSTASH_PRE_COMMAND
|
||||
format_version:
|
||||
description: Logging format
|
||||
description: Logging format.
|
||||
type: str
|
||||
version_added: 2.0.0
|
||||
ini:
|
||||
|
|
@ -113,7 +113,7 @@ from ansible.plugins.callback import CallbackBase
|
|||
class CallbackModule(CallbackBase):
|
||||
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.logstash'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ DOCUMENTATION = '''
|
|||
- set as main display callback
|
||||
short_description: Don't display stuff to screen
|
||||
description:
|
||||
- This callback prevents outputing events to screen
|
||||
- This callback prevents outputing events to screen.
|
||||
'''
|
||||
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
|
|
|
|||
|
|
@ -14,12 +14,12 @@ DOCUMENTATION = '''
|
|||
type: notification
|
||||
requirements:
|
||||
- whitelisting in configuration
|
||||
- the '/usr/bin/say' command line program (standard on macOS) or 'espeak' command line program
|
||||
- the C(/usr/bin/say) command line program (standard on macOS) or C(espeak) command line program
|
||||
short_description: notify using software speech synthesizer
|
||||
description:
|
||||
- This plugin will use the 'say' or 'espeak' program to "speak" about play events.
|
||||
- This plugin will use the C(say) or C(espeak) program to "speak" about play events.
|
||||
notes:
|
||||
- In 2.8, this callback has been renamed from C(osx_say) into M(community.general.say).
|
||||
- In Ansible 2.8, this callback has been renamed from C(osx_say) into M(community.general.say).
|
||||
'''
|
||||
|
||||
import platform
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ DOCUMENTATION = '''
|
|||
options:
|
||||
nocolor:
|
||||
default: false
|
||||
description: This setting allows suppressing colorizing output
|
||||
description: This setting allows suppressing colorizing output.
|
||||
env:
|
||||
- name: ANSIBLE_NOCOLOR
|
||||
- name: ANSIBLE_SELECTIVE_DONT_COLORIZE
|
||||
|
|
|
|||
|
|
@ -18,11 +18,11 @@ DOCUMENTATION = '''
|
|||
short_description: Sends play events to a Slack channel
|
||||
description:
|
||||
- This is an ansible callback plugin that sends status updates to a Slack channel during playbook execution.
|
||||
- Before 2.4 only environment variables were available for configuring this plugin
|
||||
- Before Ansible 2.4 only environment variables were available for configuring this plugin.
|
||||
options:
|
||||
webhook_url:
|
||||
required: true
|
||||
description: Slack Webhook URL
|
||||
description: Slack Webhook URL.
|
||||
env:
|
||||
- name: SLACK_WEBHOOK_URL
|
||||
ini:
|
||||
|
|
@ -45,7 +45,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_slack
|
||||
key: username
|
||||
validate_certs:
|
||||
description: validate the SSL certificate of the Slack server. (For HTTPS URLs)
|
||||
description: Validate the SSL certificate of the Slack server for HTTPS URLs.
|
||||
env:
|
||||
- name: SLACK_VALIDATE_CERTS
|
||||
ini:
|
||||
|
|
|
|||
|
|
@ -8,27 +8,27 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
name: splunk
|
||||
type: aggregate
|
||||
type: notification
|
||||
short_description: Sends task result events to Splunk HTTP Event Collector
|
||||
author: "Stuart Hirst (!UNKNOWN) <support@convergingdata.com>"
|
||||
description:
|
||||
- This callback plugin will send task results as JSON formatted events to a Splunk HTTP collector.
|
||||
- The companion Splunk Monitoring & Diagnostics App is available here "https://splunkbase.splunk.com/app/4023/"
|
||||
- The companion Splunk Monitoring & Diagnostics App is available here U(https://splunkbase.splunk.com/app/4023/).
|
||||
- Credit to "Ryan Currah (@ryancurrah)" for original source upon which this is based.
|
||||
requirements:
|
||||
- Whitelisting this callback plugin
|
||||
- 'Create a HTTP Event Collector in Splunk'
|
||||
- 'Define the url and token in ansible.cfg'
|
||||
- 'Define the URL and token in C(ansible.cfg)'
|
||||
options:
|
||||
url:
|
||||
description: URL to the Splunk HTTP collector source
|
||||
description: URL to the Splunk HTTP collector source.
|
||||
env:
|
||||
- name: SPLUNK_URL
|
||||
ini:
|
||||
- section: callback_splunk
|
||||
key: url
|
||||
authtoken:
|
||||
description: Token to authenticate the connection to the Splunk HTTP collector
|
||||
description: Token to authenticate the connection to the Splunk HTTP collector.
|
||||
env:
|
||||
- name: SPLUNK_AUTHTOKEN
|
||||
ini:
|
||||
|
|
@ -48,7 +48,7 @@ DOCUMENTATION = '''
|
|||
version_added: '1.0.0'
|
||||
include_milliseconds:
|
||||
description: Whether to include milliseconds as part of the generated timestamp field in the event
|
||||
sent to the Splunk HTTP collector
|
||||
sent to the Splunk HTTP collector.
|
||||
env:
|
||||
- name: SPLUNK_INCLUDE_MILLISECONDS
|
||||
ini:
|
||||
|
|
@ -165,7 +165,7 @@ class SplunkHTTPCollectorSource(object):
|
|||
|
||||
class CallbackModule(CallbackBase):
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.splunk'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
|
|
|||
|
|
@ -8,18 +8,18 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
name: sumologic
|
||||
type: aggregate
|
||||
type: notification
|
||||
short_description: Sends task result events to Sumologic
|
||||
author: "Ryan Currah (@ryancurrah)"
|
||||
description:
|
||||
- This callback plugin will send task results as JSON formatted events to a Sumologic HTTP collector source
|
||||
- This callback plugin will send task results as JSON formatted events to a Sumologic HTTP collector source.
|
||||
requirements:
|
||||
- Whitelisting this callback plugin
|
||||
- 'Create a HTTP collector source in Sumologic and specify a custom timestamp format of C(yyyy-MM-dd HH:mm:ss ZZZZ) and a custom timestamp locator
|
||||
of C("timestamp": "(.*)")'
|
||||
options:
|
||||
url:
|
||||
description: URL to the Sumologic HTTP collector source
|
||||
description: URL to the Sumologic HTTP collector source.
|
||||
env:
|
||||
- name: SUMOLOGIC_URL
|
||||
ini:
|
||||
|
|
@ -28,7 +28,7 @@ options:
|
|||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
examples: >
|
||||
examples: |
|
||||
To enable, add this to your ansible.cfg file in the defaults block
|
||||
[defaults]
|
||||
callback_whitelist = community.general.sumologic
|
||||
|
|
@ -111,7 +111,7 @@ class SumologicHTTPCollectorSource(object):
|
|||
|
||||
class CallbackModule(CallbackBase):
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.sumologic'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
|
|
|||
|
|
@ -15,11 +15,11 @@ DOCUMENTATION = '''
|
|||
- whitelist in configuration
|
||||
short_description: sends JSON events to syslog
|
||||
description:
|
||||
- This plugin logs ansible-playbook and ansible runs to a syslog server in JSON format
|
||||
- Before Ansible 2.9 only environment variables were available for configuration
|
||||
- This plugin logs ansible-playbook and ansible runs to a syslog server in JSON format.
|
||||
- Before Ansible 2.9 only environment variables were available for configuration.
|
||||
options:
|
||||
server:
|
||||
description: syslog server that will receive the event
|
||||
description: Syslog server that will receive the event.
|
||||
env:
|
||||
- name: SYSLOG_SERVER
|
||||
default: localhost
|
||||
|
|
@ -27,7 +27,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_syslog_json
|
||||
key: syslog_server
|
||||
port:
|
||||
description: port on which the syslog server is listening
|
||||
description: Port on which the syslog server is listening.
|
||||
env:
|
||||
- name: SYSLOG_PORT
|
||||
default: 514
|
||||
|
|
@ -35,7 +35,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_syslog_json
|
||||
key: syslog_port
|
||||
facility:
|
||||
description: syslog facility to log as
|
||||
description: Syslog facility to log as.
|
||||
env:
|
||||
- name: SYSLOG_FACILITY
|
||||
default: user
|
||||
|
|
@ -71,7 +71,7 @@ class CallbackModule(CallbackBase):
|
|||
"""
|
||||
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.syslog_json'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@ class CallbackModule(CallbackModule_default):
|
|||
|
||||
def _preprocess_result(self, result):
|
||||
self.delegated_vars = result._result.get('_ansible_delegated_vars', None)
|
||||
self._handle_exception(result._result, use_stderr=self.display_failed_stderr)
|
||||
self._handle_exception(result._result, use_stderr=self.get_option('display_failed_stderr'))
|
||||
self._handle_warnings(result._result)
|
||||
|
||||
def _process_result_output(self, result, msg):
|
||||
|
|
@ -109,7 +109,7 @@ class CallbackModule(CallbackModule_default):
|
|||
self._display.display(msg)
|
||||
|
||||
def v2_runner_on_skipped(self, result, ignore_errors=False):
|
||||
if self.display_skipped_hosts:
|
||||
if self.get_option('display_skipped_hosts'):
|
||||
self._preprocess_result(result)
|
||||
display_color = C.COLOR_SKIP
|
||||
msg = "skipped"
|
||||
|
|
@ -128,7 +128,7 @@ class CallbackModule(CallbackModule_default):
|
|||
msg += " | item: %s" % (item_value,)
|
||||
|
||||
task_result = self._process_result_output(result, msg)
|
||||
self._display.display(" " + task_result, display_color, stderr=self.display_failed_stderr)
|
||||
self._display.display(" " + task_result, display_color, stderr=self.get_option('display_failed_stderr'))
|
||||
|
||||
def v2_runner_on_ok(self, result, msg="ok", display_color=C.COLOR_OK):
|
||||
self._preprocess_result(result)
|
||||
|
|
@ -142,7 +142,7 @@ class CallbackModule(CallbackModule_default):
|
|||
display_color = C.COLOR_CHANGED
|
||||
task_result = self._process_result_output(result, msg)
|
||||
self._display.display(" " + task_result, display_color)
|
||||
elif self.display_ok_hosts:
|
||||
elif self.get_option('display_ok_hosts'):
|
||||
task_result = self._process_result_output(result, msg)
|
||||
self._display.display(" " + task_result, display_color)
|
||||
|
||||
|
|
@ -162,7 +162,7 @@ class CallbackModule(CallbackModule_default):
|
|||
display_color = C.COLOR_UNREACHABLE
|
||||
task_result = self._process_result_output(result, msg)
|
||||
|
||||
self._display.display(" " + task_result, display_color, stderr=self.display_failed_stderr)
|
||||
self._display.display(" " + task_result, display_color, stderr=self.get_option('display_failed_stderr'))
|
||||
|
||||
def v2_on_file_diff(self, result):
|
||||
if result._task.loop and 'results' in result._result:
|
||||
|
|
@ -205,7 +205,7 @@ class CallbackModule(CallbackModule_default):
|
|||
colorize(u'ignored', t['ignored'], None)),
|
||||
log_only=True
|
||||
)
|
||||
if stats.custom and self.show_custom_stats:
|
||||
if stats.custom and self.get_option('show_custom_stats'):
|
||||
self._display.banner("CUSTOM STATS: ")
|
||||
# per host
|
||||
# TODO: come up with 'pretty format'
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ DOCUMENTATION = '''
|
|||
author: Unknown (!UNKNOWN)
|
||||
name: yaml
|
||||
type: stdout
|
||||
short_description: yaml-ized Ansible screen output
|
||||
short_description: YAML-ized Ansible screen output
|
||||
description:
|
||||
- Ansible output that can be quite a bit easier to read than the
|
||||
default JSON formatting.
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ DOCUMENTATION = '''
|
|||
- The path of the chroot you want to access.
|
||||
default: inventory_hostname
|
||||
vars:
|
||||
- name: inventory_hostname
|
||||
- name: ansible_host
|
||||
executable:
|
||||
description:
|
||||
|
|
|
|||
|
|
@ -20,9 +20,13 @@ attributes:
|
|||
description: Will return details on what has changed (or possibly needs changing in C(check_mode)), when in diff mode.
|
||||
'''
|
||||
|
||||
# platform:
|
||||
# description: Target OS/families that can be operated against.
|
||||
# support: N/A
|
||||
PLATFORM = r'''
|
||||
options: {}
|
||||
attributes:
|
||||
platform:
|
||||
description: Target OS/families that can be operated against.
|
||||
support: N/A
|
||||
'''
|
||||
|
||||
# Should be used together with the standard fragment
|
||||
INFO_MODULE = r'''
|
||||
|
|
|
|||
|
|
@ -60,7 +60,7 @@ options:
|
|||
sasl_class:
|
||||
description:
|
||||
- The class to use for SASL authentication.
|
||||
- possible choices are C(external), C(gssapi).
|
||||
- Possible choices are C(external), C(gssapi).
|
||||
type: str
|
||||
choices: ['external', 'gssapi']
|
||||
default: external
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ DOCUMENTATION = '''
|
|||
description:
|
||||
- The correct parser for the input data.
|
||||
- For example C(ifconfig).
|
||||
- "Note: use underscores instead of dashes (if any) in the parser module name."
|
||||
- See U(https://github.com/kellyjonbrazil/jc#parsers) for the latest list of parsers.
|
||||
type: string
|
||||
required: true
|
||||
|
|
|
|||
|
|
@ -55,6 +55,11 @@ DOCUMENTATION = r'''
|
|||
type: str
|
||||
default: none
|
||||
choices: [ 'STOPPED', 'STARTING', 'RUNNING', 'none' ]
|
||||
project:
|
||||
description: Filter the instance according to the given project.
|
||||
type: str
|
||||
default: default
|
||||
version_added: 6.2.0
|
||||
type_filter:
|
||||
description:
|
||||
- Filter the instances by type C(virtual-machine), C(container) or C(both).
|
||||
|
|
@ -140,6 +145,9 @@ groupby:
|
|||
vlan666:
|
||||
type: vlanid
|
||||
attribute: 666
|
||||
projectInternals:
|
||||
type: project
|
||||
attribute: internals
|
||||
'''
|
||||
|
||||
import binascii
|
||||
|
|
@ -153,6 +161,7 @@ from ansible.module_utils.common.text.converters import to_native, to_text
|
|||
from ansible.module_utils.common.dict_transformations import dict_merge
|
||||
from ansible.module_utils.six import raise_from
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
from ansible_collections.community.general.plugins.module_utils.lxd import LXDClient, LXDClientException
|
||||
|
||||
try:
|
||||
|
|
@ -330,7 +339,15 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
# "status_code": 200,
|
||||
# "type": "sync"
|
||||
# }
|
||||
instances = self.socket.do('GET', '/1.0/instances')
|
||||
url = '/1.0/instances'
|
||||
if self.project:
|
||||
url = url + '?{0}'.format(urlencode(dict(project=self.project)))
|
||||
|
||||
instances = self.socket.do('GET', url)
|
||||
|
||||
if self.project:
|
||||
return [m.split('/')[3].split('?')[0] for m in instances['metadata']]
|
||||
|
||||
return [m.split('/')[3] for m in instances['metadata']]
|
||||
|
||||
def _get_config(self, branch, name):
|
||||
|
|
@ -351,9 +368,11 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
dict(config): Config of the instance"""
|
||||
config = {}
|
||||
if isinstance(branch, (tuple, list)):
|
||||
config[name] = {branch[1]: self.socket.do('GET', '/1.0/{0}/{1}/{2}'.format(to_native(branch[0]), to_native(name), to_native(branch[1])))}
|
||||
config[name] = {branch[1]: self.socket.do(
|
||||
'GET', '/1.0/{0}/{1}/{2}?{3}'.format(to_native(branch[0]), to_native(name), to_native(branch[1]), urlencode(dict(project=self.project))))}
|
||||
else:
|
||||
config[name] = {branch: self.socket.do('GET', '/1.0/{0}/{1}'.format(to_native(branch), to_native(name)))}
|
||||
config[name] = {branch: self.socket.do(
|
||||
'GET', '/1.0/{0}/{1}?{2}'.format(to_native(branch), to_native(name), urlencode(dict(project=self.project))))}
|
||||
return config
|
||||
|
||||
def get_instance_data(self, names):
|
||||
|
|
@ -583,6 +602,8 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
self._set_data_entry(instance_name, 'network_interfaces', self.extract_network_information_from_instance_config(instance_name))
|
||||
self._set_data_entry(instance_name, 'preferred_interface', self.get_prefered_instance_network_interface(instance_name))
|
||||
self._set_data_entry(instance_name, 'vlan_ids', self.get_instance_vlans(instance_name))
|
||||
self._set_data_entry(instance_name, 'project', self._get_data_entry(
|
||||
'instances/{0}/instances/metadata/project'.format(instance_name)))
|
||||
|
||||
def build_inventory_network(self, instance_name):
|
||||
"""Add the network interfaces of the instance to the inventory
|
||||
|
|
@ -686,6 +707,8 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
# add VLAN_ID information
|
||||
if self._get_data_entry('inventory/{0}/vlan_ids'.format(instance_name)):
|
||||
self.inventory.set_variable(instance_name, 'ansible_lxd_vlan_ids', self._get_data_entry('inventory/{0}/vlan_ids'.format(instance_name)))
|
||||
# add project
|
||||
self.inventory.set_variable(instance_name, 'ansible_lxd_project', self._get_data_entry('inventory/{0}/project'.format(instance_name)))
|
||||
|
||||
def build_inventory_groups_location(self, group_name):
|
||||
"""create group by attribute: location
|
||||
|
|
@ -761,6 +784,28 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
# Ignore invalid IP addresses returned by lxd
|
||||
pass
|
||||
|
||||
def build_inventory_groups_project(self, group_name):
|
||||
"""create group by attribute: project
|
||||
|
||||
Args:
|
||||
str(group_name): Group name
|
||||
Kwargs:
|
||||
None
|
||||
Raises:
|
||||
None
|
||||
Returns:
|
||||
None"""
|
||||
# maybe we just want to expand one group
|
||||
if group_name not in self.inventory.groups:
|
||||
self.inventory.add_group(group_name)
|
||||
|
||||
gen_instances = [
|
||||
instance_name for instance_name in self.inventory.hosts
|
||||
if 'ansible_lxd_project' in self.inventory.get_host(instance_name).get_vars()]
|
||||
for instance_name in gen_instances:
|
||||
if self.groupby[group_name].get('attribute').lower() == self.inventory.get_host(instance_name).get_vars().get('ansible_lxd_project'):
|
||||
self.inventory.add_child(group_name, instance_name)
|
||||
|
||||
def build_inventory_groups_os(self, group_name):
|
||||
"""create group by attribute: os
|
||||
|
||||
|
|
@ -899,6 +944,7 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
* 'profile'
|
||||
* 'vlanid'
|
||||
* 'type'
|
||||
* 'project'
|
||||
|
||||
Args:
|
||||
str(group_name): Group name
|
||||
|
|
@ -926,6 +972,8 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
self.build_inventory_groups_vlanid(group_name)
|
||||
elif self.groupby[group_name].get('type') == 'type':
|
||||
self.build_inventory_groups_type(group_name)
|
||||
elif self.groupby[group_name].get('type') == 'project':
|
||||
self.build_inventory_groups_project(group_name)
|
||||
else:
|
||||
raise AnsibleParserError('Unknown group type: {0}'.format(to_native(group_name)))
|
||||
|
||||
|
|
@ -1032,6 +1080,7 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
try:
|
||||
self.client_key = self.get_option('client_key')
|
||||
self.client_cert = self.get_option('client_cert')
|
||||
self.project = self.get_option('project')
|
||||
self.debug = self.DEBUG
|
||||
self.data = {} # store for inventory-data
|
||||
self.groupby = self.get_option('groupby')
|
||||
|
|
|
|||
|
|
@ -46,6 +46,25 @@ DOCUMENTATION = '''
|
|||
description: use IPv6 type addresses
|
||||
type: boolean
|
||||
default: true
|
||||
udp_scan:
|
||||
description:
|
||||
- Scan via UDP.
|
||||
- Depending on your system you might need I(sudo=true) for this to work.
|
||||
type: boolean
|
||||
default: false
|
||||
version_added: 6.1.0
|
||||
icmp_timestamp:
|
||||
description:
|
||||
- Scan via ICMP Timestamp (C(-PP)).
|
||||
- Depending on your system you might need I(sudo=true) for this to work.
|
||||
type: boolean
|
||||
default: false
|
||||
version_added: 6.1.0
|
||||
dns_resolve:
|
||||
description: Whether to always (C(true)) or never (C(false)) do DNS resolution.
|
||||
type: boolean
|
||||
default: false
|
||||
version_added: 6.1.0
|
||||
notes:
|
||||
- At least one of ipv4 or ipv6 is required to be True, both can be True, but they cannot both be False.
|
||||
- 'TODO: add OS fingerprinting'
|
||||
|
|
@ -166,6 +185,15 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||
cmd.append('--exclude')
|
||||
cmd.append(','.join(self._options['exclude']))
|
||||
|
||||
if self._options['dns_resolve']:
|
||||
cmd.append('-n')
|
||||
|
||||
if self._options['udp_scan']:
|
||||
cmd.append('-sU')
|
||||
|
||||
if self._options['icmp_timestamp']:
|
||||
cmd.append('-PP')
|
||||
|
||||
cmd.append(self._options['address'])
|
||||
try:
|
||||
# execute
|
||||
|
|
|
|||
|
|
@ -408,7 +408,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||
stripped_value = value.strip()
|
||||
if stripped_value:
|
||||
parsed_key = key + "_parsed"
|
||||
properties[parsed_key] = [tag.strip() for tag in stripped_value.split(",")]
|
||||
properties[parsed_key] = [tag.strip() for tag in stripped_value.replace(',', ';').split(";")]
|
||||
|
||||
# The first field in the agent string tells you whether the agent is enabled
|
||||
# the rest of the comma separated string is extra config for the agent.
|
||||
|
|
@ -615,7 +615,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||
for o in ('url', 'user', 'password', 'token_id', 'token_secret'):
|
||||
v = self.get_option(o)
|
||||
if self.templar.is_template(v):
|
||||
v = self.templar.template(v, disable_looups=False)
|
||||
v = self.templar.template(v, disable_lookups=False)
|
||||
setattr(self, 'proxmox_%s' % o, v)
|
||||
|
||||
# some more cleanup and validation
|
||||
|
|
|
|||
|
|
@ -28,8 +28,12 @@ DOCUMENTATION = """
|
|||
default: name
|
||||
version_added: 5.7.0
|
||||
field:
|
||||
description: Field to fetch; leave unset to fetch whole response.
|
||||
description: Field to fetch. Leave unset to fetch whole response.
|
||||
type: str
|
||||
collection_id:
|
||||
description: Collection ID to filter results by collection. Leave unset to skip filtering.
|
||||
type: str
|
||||
version_added: 6.3.0
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
|
|
@ -43,10 +47,20 @@ EXAMPLES = """
|
|||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'bafba515-af11-47e6-abe3-af1200cd18b2', search='id', field='password') }}
|
||||
|
||||
- name: "Get 'password' from Bitwarden record named 'a_test' from collection"
|
||||
ansible.builtin.debug:
|
||||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'a_test', field='password', collection_id='bafba515-af11-47e6-abe3-af1200cd18b2') }}
|
||||
|
||||
- name: "Get full Bitwarden record named 'a_test'"
|
||||
ansible.builtin.debug:
|
||||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'a_test') }}
|
||||
|
||||
- name: "Get custom field 'api_key' from Bitwarden record named 'a_test'"
|
||||
ansible.builtin.debug:
|
||||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'a_test', field='api_key') }}
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
|
|
@ -78,7 +92,7 @@ class Bitwarden(object):
|
|||
return self._cli_path
|
||||
|
||||
@property
|
||||
def logged_in(self):
|
||||
def unlocked(self):
|
||||
out, err = self._run(['status'], stdin="")
|
||||
decoded = AnsibleJSONDecoder().raw_decode(out)[0]
|
||||
return decoded['status'] == 'unlocked'
|
||||
|
|
@ -91,10 +105,17 @@ class Bitwarden(object):
|
|||
raise BitwardenException(err)
|
||||
return to_text(out, errors='surrogate_or_strict'), to_text(err, errors='surrogate_or_strict')
|
||||
|
||||
def _get_matches(self, search_value, search_field):
|
||||
def _get_matches(self, search_value, search_field, collection_id):
|
||||
"""Return matching records whose search_field is equal to key.
|
||||
"""
|
||||
out, err = self._run(['list', 'items', '--search', search_value])
|
||||
|
||||
# Prepare set of params for Bitwarden CLI
|
||||
params = ['list', 'items', '--search', search_value]
|
||||
|
||||
if collection_id:
|
||||
params.extend(['--collectionid', collection_id])
|
||||
|
||||
out, err = self._run(params)
|
||||
|
||||
# This includes things that matched in different fields.
|
||||
initial_matches = AnsibleJSONDecoder().raw_decode(out)[0]
|
||||
|
|
@ -102,17 +123,27 @@ class Bitwarden(object):
|
|||
# Filter to only include results from the right field.
|
||||
return [item for item in initial_matches if item[search_field] == search_value]
|
||||
|
||||
def get_field(self, field, search_value, search_field="name"):
|
||||
"""Return a list of the specified field for records whose search_field match search_value.
|
||||
def get_field(self, field, search_value, search_field="name", collection_id=None):
|
||||
"""Return a list of the specified field for records whose search_field match search_value
|
||||
and filtered by collection if collection has been provided.
|
||||
|
||||
If field is None, return the whole record for each match.
|
||||
"""
|
||||
matches = self._get_matches(search_value, search_field)
|
||||
matches = self._get_matches(search_value, search_field, collection_id)
|
||||
|
||||
if field:
|
||||
if field in ['autofillOnPageLoad', 'password', 'passwordRevisionDate', 'totp', 'uris', 'username']:
|
||||
return [match['login'][field] for match in matches]
|
||||
|
||||
return matches
|
||||
elif not field:
|
||||
return matches
|
||||
else:
|
||||
custom_field_matches = []
|
||||
for match in matches:
|
||||
for custom_field in match['fields']:
|
||||
if custom_field['name'] == field:
|
||||
custom_field_matches.append(custom_field['value'])
|
||||
if matches and not custom_field_matches:
|
||||
raise AnsibleError("Custom field {field} does not exist in {search_value}".format(field=field, search_value=search_value))
|
||||
return custom_field_matches
|
||||
|
||||
|
||||
class LookupModule(LookupBase):
|
||||
|
|
@ -121,10 +152,11 @@ class LookupModule(LookupBase):
|
|||
self.set_options(var_options=variables, direct=kwargs)
|
||||
field = self.get_option('field')
|
||||
search_field = self.get_option('search')
|
||||
if not _bitwarden.logged_in:
|
||||
raise AnsibleError("Not logged into Bitwarden. Run 'bw login'.")
|
||||
collection_id = self.get_option('collection_id')
|
||||
if not _bitwarden.unlocked:
|
||||
raise AnsibleError("Bitwarden Vault locked. Run 'bw unlock'.")
|
||||
|
||||
return [_bitwarden.get_field(field, term, search_field) for term in terms]
|
||||
return [_bitwarden.get_field(field, term, search_field, collection_id) for term in terms]
|
||||
|
||||
|
||||
_bitwarden = Bitwarden()
|
||||
|
|
|
|||
|
|
@ -125,8 +125,16 @@ from ansible.errors import AnsibleLookupError
|
|||
from ansible.module_utils.common._collections_compat import Mapping, Sequence
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.release import __version__ as ansible_version
|
||||
from ansible.template import Templar
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
|
||||
|
||||
# Whether Templar has a cache, which can be controlled by Templar.template()'s cache option.
|
||||
# The cache was removed for ansible-core 2.14 (https://github.com/ansible/ansible/pull/78419)
|
||||
_TEMPLAR_HAS_TEMPLATE_CACHE = LooseVersion(ansible_version) < LooseVersion('2.14.0')
|
||||
|
||||
|
||||
class LookupModule(LookupBase):
|
||||
def __evaluate(self, expression, templar, variables):
|
||||
|
|
@ -136,7 +144,10 @@ class LookupModule(LookupBase):
|
|||
``variables`` are the variables to use.
|
||||
"""
|
||||
templar.available_variables = variables or {}
|
||||
return templar.template("{0}{1}{2}".format("{{", expression, "}}"), cache=False)
|
||||
expression = "{0}{1}{2}".format("{{", expression, "}}")
|
||||
if _TEMPLAR_HAS_TEMPLATE_CACHE:
|
||||
return templar.template(expression, cache=False)
|
||||
return templar.template(expression)
|
||||
|
||||
def __process(self, result, terms, index, current, templar, variables):
|
||||
"""Fills ``result`` list with evaluated items.
|
||||
|
|
|
|||
|
|
@ -35,9 +35,10 @@ DOCUMENTATION = '''
|
|||
description:
|
||||
- Record type to query.
|
||||
- C(DLV) has been removed in community.general 6.0.0.
|
||||
- C(CAA) has been added in community.general 6.3.0.
|
||||
type: str
|
||||
default: 'A'
|
||||
choices: [A, ALL, AAAA, CNAME, DNAME, DNSKEY, DS, HINFO, LOC, MX, NAPTR, NS, NSEC3PARAM, PTR, RP, RRSIG, SOA, SPF, SRV, SSHFP, TLSA, TXT]
|
||||
choices: [A, ALL, AAAA, CAA, CNAME, DNAME, DNSKEY, DS, HINFO, LOC, MX, NAPTR, NS, NSEC3PARAM, PTR, RP, RRSIG, SOA, SPF, SRV, SSHFP, TLSA, TXT]
|
||||
flat:
|
||||
description: If 0 each record is returned as a dictionary, otherwise a string.
|
||||
type: int
|
||||
|
|
@ -129,6 +130,12 @@ RETURN = """
|
|||
AAAA:
|
||||
description:
|
||||
- address
|
||||
CAA:
|
||||
description:
|
||||
- flags
|
||||
- tag
|
||||
- value
|
||||
version_added: 6.3.0
|
||||
CNAME:
|
||||
description:
|
||||
- target
|
||||
|
|
@ -198,7 +205,7 @@ try:
|
|||
import dns.resolver
|
||||
import dns.reversename
|
||||
import dns.rdataclass
|
||||
from dns.rdatatype import (A, AAAA, CNAME, DNAME, DNSKEY, DS, HINFO, LOC,
|
||||
from dns.rdatatype import (A, AAAA, CAA, CNAME, DNAME, DNSKEY, DS, HINFO, LOC,
|
||||
MX, NAPTR, NS, NSEC3PARAM, PTR, RP, SOA, SPF, SRV, SSHFP, TLSA, TXT)
|
||||
HAVE_DNS = True
|
||||
except ImportError:
|
||||
|
|
@ -218,6 +225,7 @@ def make_rdata_dict(rdata):
|
|||
supported_types = {
|
||||
A: ['address'],
|
||||
AAAA: ['address'],
|
||||
CAA: ['flags', 'tag', 'value'],
|
||||
CNAME: ['target'],
|
||||
DNAME: ['target'],
|
||||
DNSKEY: ['flags', 'algorithm', 'protocol', 'key'],
|
||||
|
|
@ -230,7 +238,7 @@ def make_rdata_dict(rdata):
|
|||
NSEC3PARAM: ['algorithm', 'flags', 'iterations', 'salt'],
|
||||
PTR: ['target'],
|
||||
RP: ['mbox', 'txt'],
|
||||
# RRSIG: ['algorithm', 'labels', 'original_ttl', 'expiration', 'inception', 'signature'],
|
||||
# RRSIG: ['type_covered', 'algorithm', 'labels', 'original_ttl', 'expiration', 'inception', 'key_tag', 'signer', 'signature'],
|
||||
SOA: ['mname', 'rname', 'serial', 'refresh', 'retry', 'expire', 'minimum'],
|
||||
SPF: ['strings'],
|
||||
SRV: ['priority', 'weight', 'port', 'target'],
|
||||
|
|
@ -251,6 +259,8 @@ def make_rdata_dict(rdata):
|
|||
|
||||
if rdata.rdtype == DS and f == 'digest':
|
||||
val = dns.rdata._hexify(rdata.digest).replace(' ', '')
|
||||
if rdata.rdtype == DNSKEY and f == 'algorithm':
|
||||
val = int(val)
|
||||
if rdata.rdtype == DNSKEY and f == 'key':
|
||||
val = dns.rdata._base64ify(rdata.key).replace(' ', '')
|
||||
if rdata.rdtype == NSEC3PARAM and f == 'salt':
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ DOCUMENTATION = '''
|
|||
section:
|
||||
description: Item section containing the field to retrieve (case-insensitive). If absent will return first match from any section.
|
||||
domain:
|
||||
description: Domain of 1Password. Default is U(1password.com).
|
||||
description: Domain of 1Password.
|
||||
version_added: 3.2.0
|
||||
default: '1password.com'
|
||||
type: str
|
||||
|
|
|
|||
|
|
@ -30,6 +30,11 @@ DOCUMENTATION = '''
|
|||
description: Item section containing the field to retrieve (case-insensitive). If absent will return first match from any section.
|
||||
subdomain:
|
||||
description: The 1Password subdomain to authenticate against.
|
||||
domain:
|
||||
description: Domain of 1Password.
|
||||
version_added: 6.0.0
|
||||
default: '1password.com'
|
||||
type: str
|
||||
username:
|
||||
description: The username used to sign in.
|
||||
secret_key:
|
||||
|
|
|
|||
|
|
@ -88,9 +88,10 @@ class FormatError(CmdRunnerException):
|
|||
|
||||
|
||||
class _ArgFormat(object):
|
||||
def __init__(self, func, ignore_none=None):
|
||||
def __init__(self, func, ignore_none=None, ignore_missing_value=False):
|
||||
self.func = func
|
||||
self.ignore_none = ignore_none
|
||||
self.ignore_missing_value = ignore_missing_value
|
||||
|
||||
def __call__(self, value, ctx_ignore_none):
|
||||
ignore_none = self.ignore_none if self.ignore_none is not None else ctx_ignore_none
|
||||
|
|
@ -102,8 +103,13 @@ class _ArgFormat(object):
|
|||
|
||||
class _Format(object):
|
||||
@staticmethod
|
||||
def as_bool(args):
|
||||
return _ArgFormat(lambda value: _ensure_list(args) if value else [])
|
||||
def as_bool(args_true, args_false=None, ignore_none=None):
|
||||
if args_false is not None:
|
||||
if ignore_none is None:
|
||||
ignore_none = False
|
||||
else:
|
||||
args_false = []
|
||||
return _ArgFormat(lambda value: _ensure_list(args_true) if value else _ensure_list(args_false), ignore_none=ignore_none)
|
||||
|
||||
@staticmethod
|
||||
def as_bool_not(args):
|
||||
|
|
@ -127,7 +133,7 @@ class _Format(object):
|
|||
|
||||
@staticmethod
|
||||
def as_fixed(args):
|
||||
return _ArgFormat(lambda value: _ensure_list(args), ignore_none=False)
|
||||
return _ArgFormat(lambda value: _ensure_list(args), ignore_none=False, ignore_missing_value=True)
|
||||
|
||||
@staticmethod
|
||||
def as_func(func, ignore_none=None):
|
||||
|
|
@ -135,14 +141,15 @@ class _Format(object):
|
|||
|
||||
@staticmethod
|
||||
def as_map(_map, default=None, ignore_none=None):
|
||||
if default is None:
|
||||
default = []
|
||||
return _ArgFormat(lambda value: _ensure_list(_map.get(value, default)), ignore_none=ignore_none)
|
||||
|
||||
@staticmethod
|
||||
def as_default_type(_type, arg="", ignore_none=None):
|
||||
fmt = _Format
|
||||
if _type == "dict":
|
||||
return fmt.as_func(lambda d: ["--{0}={1}".format(*a) for a in iteritems(d)],
|
||||
ignore_none=ignore_none)
|
||||
return fmt.as_func(lambda d: ["--{0}={1}".format(*a) for a in iteritems(d)], ignore_none=ignore_none)
|
||||
if _type == "list":
|
||||
return fmt.as_func(lambda value: ["--{0}".format(x) for x in value], ignore_none=ignore_none)
|
||||
if _type == "bool":
|
||||
|
|
@ -261,10 +268,13 @@ class _CmdRunnerContext(object):
|
|||
for arg_name in self.args_order:
|
||||
value = None
|
||||
try:
|
||||
value = named_args[arg_name]
|
||||
if arg_name in named_args:
|
||||
value = named_args[arg_name]
|
||||
elif not runner.arg_formats[arg_name].ignore_missing_value:
|
||||
raise MissingArgumentValue(self.args_order, arg_name)
|
||||
self.cmd.extend(runner.arg_formats[arg_name](value, ctx_ignore_none=self.ignore_value_none))
|
||||
except KeyError:
|
||||
raise MissingArgumentValue(self.args_order, arg_name)
|
||||
except MissingArgumentValue:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise FormatError(arg_name, value, runner.arg_formats[arg_name], e)
|
||||
|
||||
|
|
|
|||
90
plugins/module_utils/deps.py
Normal file
90
plugins/module_utils/deps.py
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# (c) 2022, Alexei Znamensky <russoz@gmail.com>
|
||||
# Copyright (c) 2022, Ansible Project
|
||||
# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||
# SPDX-License-Identifier: BSD-2-Clause
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.basic import missing_required_lib
|
||||
|
||||
|
||||
_deps = dict()
|
||||
|
||||
|
||||
class _Dependency(object):
|
||||
_states = ["pending", "failure", "success"]
|
||||
|
||||
def __init__(self, name, reason=None, url=None, msg=None):
|
||||
self.name = name
|
||||
self.reason = reason
|
||||
self.url = url
|
||||
self.msg = msg
|
||||
|
||||
self.state = 0
|
||||
self.trace = None
|
||||
self.exc = None
|
||||
|
||||
def succeed(self):
|
||||
self.state = 2
|
||||
|
||||
def fail(self, exc, trace):
|
||||
self.state = 1
|
||||
self.exc = exc
|
||||
self.trace = trace
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
if self.msg:
|
||||
return to_native(self.msg)
|
||||
else:
|
||||
return missing_required_lib(self.name, reason=self.reason, url=self.url)
|
||||
|
||||
@property
|
||||
def failed(self):
|
||||
return self.state == 1
|
||||
|
||||
def verify(self, module):
|
||||
if self.failed:
|
||||
module.fail_json(msg=self.message, exception=self.trace)
|
||||
|
||||
def __str__(self):
|
||||
return "<dependency: {0} [{1}]>".format(self.name, self._states[self.state])
|
||||
|
||||
|
||||
@contextmanager
|
||||
def declare(name, *args, **kwargs):
|
||||
dep = _Dependency(name, *args, **kwargs)
|
||||
try:
|
||||
yield dep
|
||||
except Exception as e:
|
||||
dep.fail(e, traceback.format_exc())
|
||||
else:
|
||||
dep.succeed()
|
||||
finally:
|
||||
_deps[name] = dep
|
||||
|
||||
|
||||
def validate(module, spec=None):
|
||||
dep_names = sorted(_deps)
|
||||
|
||||
if spec is not None:
|
||||
if spec.startswith("-"):
|
||||
spec_split = spec[1:].split(":")
|
||||
for d in spec_split:
|
||||
dep_names.remove(d)
|
||||
else:
|
||||
spec_split = spec[1:].split(":")
|
||||
dep_names = []
|
||||
for d in spec_split:
|
||||
_deps[d] # ensure it exists
|
||||
dep_names.append(d)
|
||||
|
||||
for dep in dep_names:
|
||||
_deps[dep].verify(module)
|
||||
|
|
@ -6,7 +6,14 @@
|
|||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, cmd_runner_fmt as fmt
|
||||
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, cmd_runner_fmt
|
||||
|
||||
|
||||
_state_map = {
|
||||
"present": "--set",
|
||||
"absent": "--unset",
|
||||
"get": "--get",
|
||||
}
|
||||
|
||||
|
||||
def gconftool2_runner(module, **kwargs):
|
||||
|
|
@ -14,14 +21,12 @@ def gconftool2_runner(module, **kwargs):
|
|||
module,
|
||||
command='gconftool-2',
|
||||
arg_formats=dict(
|
||||
key=fmt.as_list(),
|
||||
value_type=fmt.as_opt_val("--type"),
|
||||
value=fmt.as_list(),
|
||||
direct=fmt.as_bool("--direct"),
|
||||
config_source=fmt.as_opt_val("--config-source"),
|
||||
get=fmt.as_bool("--get"),
|
||||
set_arg=fmt.as_bool("--set"),
|
||||
unset=fmt.as_bool("--unset"),
|
||||
state=cmd_runner_fmt.as_map(_state_map),
|
||||
key=cmd_runner_fmt.as_list(),
|
||||
value_type=cmd_runner_fmt.as_opt_val("--type"),
|
||||
value=cmd_runner_fmt.as_list(),
|
||||
direct=cmd_runner_fmt.as_bool("--direct"),
|
||||
config_source=cmd_runner_fmt.as_opt_val("--config-source"),
|
||||
),
|
||||
**kwargs
|
||||
)
|
||||
|
|
|
|||
|
|
@ -110,3 +110,14 @@ def gitlab_authentication(module):
|
|||
GitLab remove Session API now that private tokens are removed from user API endpoints since version 10.2." % to_native(e))
|
||||
|
||||
return gitlab_instance
|
||||
|
||||
|
||||
def filter_returned_variables(gitlab_variables):
|
||||
# pop properties we don't know
|
||||
existing_variables = [dict(x.attributes) for x in gitlab_variables]
|
||||
KNOWN = ['key', 'value', 'masked', 'protected', 'variable_type', 'environment_scope']
|
||||
for item in existing_variables:
|
||||
for key in list(item.keys()):
|
||||
if key not in KNOWN:
|
||||
item.pop(key)
|
||||
return existing_variables
|
||||
|
|
|
|||
|
|
@ -58,6 +58,8 @@ URL_CLIENT_USER_ROLEMAPPINGS = "{url}/admin/realms/{realm}/users/{id}/role-mappi
|
|||
URL_CLIENT_USER_ROLEMAPPINGS_AVAILABLE = "{url}/admin/realms/{realm}/users/{id}/role-mappings/clients/{client}/available"
|
||||
URL_CLIENT_USER_ROLEMAPPINGS_COMPOSITE = "{url}/admin/realms/{realm}/users/{id}/role-mappings/clients/{client}/composite"
|
||||
|
||||
URL_CLIENTSECRET = "{url}/admin/realms/{realm}/clients/{id}/client-secret"
|
||||
|
||||
URL_AUTHENTICATION_FLOWS = "{url}/admin/realms/{realm}/authentication/flows"
|
||||
URL_AUTHENTICATION_FLOW = "{url}/admin/realms/{realm}/authentication/flows/{id}"
|
||||
URL_AUTHENTICATION_FLOW_COPY = "{url}/admin/realms/{realm}/authentication/flows/{copyfrom}/copy"
|
||||
|
|
@ -606,7 +608,7 @@ class KeycloakAPI(object):
|
|||
"""
|
||||
available_rolemappings_url = URL_CLIENT_GROUP_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=gid, client=cid)
|
||||
try:
|
||||
open_url(available_rolemappings_url, method="DELETE", http_agent=self.http_agent, headers=self.restheaders,
|
||||
open_url(available_rolemappings_url, method="DELETE", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep),
|
||||
validate_certs=self.validate_certs, timeout=self.connection_timeout)
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not delete available rolemappings for client %s in group %s, realm %s: %s"
|
||||
|
|
@ -1160,6 +1162,52 @@ class KeycloakAPI(object):
|
|||
self.module.fail_json(msg='Could not update protocolmappers for clientscope %s in realm %s: %s'
|
||||
% (mapper_rep, realm, str(e)))
|
||||
|
||||
def create_clientsecret(self, id, realm="master"):
|
||||
""" Generate a new client secret by id
|
||||
|
||||
:param id: id (not clientId) of client to be queried
|
||||
:param realm: client from this realm
|
||||
:return: dict of credential representation
|
||||
"""
|
||||
clientsecret_url = URL_CLIENTSECRET.format(url=self.baseurl, realm=realm, id=id)
|
||||
|
||||
try:
|
||||
return json.loads(to_native(open_url(clientsecret_url, method='POST', headers=self.restheaders, timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
return None
|
||||
else:
|
||||
self.module.fail_json(msg='Could not obtain clientsecret of client %s for realm %s: %s'
|
||||
% (id, realm, str(e)))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg='Could not obtain clientsecret of client %s for realm %s: %s'
|
||||
% (id, realm, str(e)))
|
||||
|
||||
def get_clientsecret(self, id, realm="master"):
|
||||
""" Obtain client secret by id
|
||||
|
||||
:param id: id (not clientId) of client to be queried
|
||||
:param realm: client from this realm
|
||||
:return: dict of credential representation
|
||||
"""
|
||||
clientsecret_url = URL_CLIENTSECRET.format(url=self.baseurl, realm=realm, id=id)
|
||||
|
||||
try:
|
||||
return json.loads(to_native(open_url(clientsecret_url, method='GET', headers=self.restheaders, timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
return None
|
||||
else:
|
||||
self.module.fail_json(msg='Could not obtain clientsecret of client %s for realm %s: %s'
|
||||
% (id, realm, str(e)))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg='Could not obtain clientsecret of client %s for realm %s: %s'
|
||||
% (id, realm, str(e)))
|
||||
|
||||
def get_groups(self, realm="master"):
|
||||
""" Fetch the name and ID of all groups on the Keycloak server.
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,77 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2022, John Cant <a.johncant@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak import \
|
||||
keycloak_argument_spec
|
||||
|
||||
|
||||
def keycloak_clientsecret_module():
|
||||
"""
|
||||
Returns an AnsibleModule definition for modules that interact with a client
|
||||
secret.
|
||||
|
||||
:return: argument_spec dict
|
||||
"""
|
||||
argument_spec = keycloak_argument_spec()
|
||||
|
||||
meta_args = dict(
|
||||
realm=dict(default='master'),
|
||||
id=dict(type='str'),
|
||||
client_id=dict(type='str', aliases=['clientId']),
|
||||
)
|
||||
|
||||
argument_spec.update(meta_args)
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True,
|
||||
required_one_of=([['id', 'client_id'],
|
||||
['token', 'auth_realm', 'auth_username', 'auth_password']]),
|
||||
required_together=([['auth_realm', 'auth_username', 'auth_password']]),
|
||||
mutually_exclusive=[
|
||||
['token', 'auth_realm'],
|
||||
['token', 'auth_username'],
|
||||
['token', 'auth_password']
|
||||
])
|
||||
|
||||
return module
|
||||
|
||||
|
||||
def keycloak_clientsecret_module_resolve_params(module, kc):
|
||||
"""
|
||||
Given an AnsibleModule definition for keycloak_clientsecret_*, and a
|
||||
KeycloakAPI client, resolve the params needed to interact with the Keycloak
|
||||
client secret, looking up the client by clientId if necessary via an API
|
||||
call.
|
||||
|
||||
:return: tuple of id, realm
|
||||
"""
|
||||
|
||||
realm = module.params.get('realm')
|
||||
id = module.params.get('id')
|
||||
client_id = module.params.get('client_id')
|
||||
|
||||
# only lookup the client_id if id isn't provided.
|
||||
# in the case that both are provided, prefer the ID, since it's one
|
||||
# less lookup.
|
||||
if id is None:
|
||||
# Due to the required_one_of spec, client_id is guaranteed to not be None
|
||||
client = kc.get_client_by_clientid(client_id, realm=realm)
|
||||
|
||||
if client is None:
|
||||
module.fail_json(
|
||||
msg='Client does not exist {client_id}'.format(client_id=client_id)
|
||||
)
|
||||
|
||||
id = client['id']
|
||||
|
||||
return id, realm
|
||||
|
|
@ -85,17 +85,16 @@ class iLORedfishUtils(RedfishUtils):
|
|||
|
||||
datetime_uri = self.manager_uri + "DateTime"
|
||||
|
||||
response = self.get_request(self.root_uri + datetime_uri)
|
||||
if not response['ret']:
|
||||
return response
|
||||
listofips = mgr_attributes['mgr_attr_value'].split(" ")
|
||||
if len(listofips) > 2:
|
||||
return {'ret': False, 'changed': False, 'msg': "More than 2 NTP Servers mentioned"}
|
||||
|
||||
data = response['data']
|
||||
ntp_list = []
|
||||
for ips in listofips:
|
||||
ntp_list.append(ips)
|
||||
|
||||
ntp_list = data[setkey]
|
||||
if len(ntp_list) == 2:
|
||||
ntp_list.pop(0)
|
||||
|
||||
ntp_list.append(mgr_attributes['mgr_attr_value'])
|
||||
while len(ntp_list) < 2:
|
||||
ntp_list.append("0.0.0.0")
|
||||
|
||||
payload = {setkey: ntp_list}
|
||||
|
||||
|
|
@ -137,18 +136,16 @@ class iLORedfishUtils(RedfishUtils):
|
|||
nic_info = self.get_manager_ethernet_uri()
|
||||
uri = nic_info["nic_addr"]
|
||||
|
||||
response = self.get_request(self.root_uri + uri)
|
||||
if not response['ret']:
|
||||
return response
|
||||
listofips = attr['mgr_attr_value'].split(" ")
|
||||
if len(listofips) > 3:
|
||||
return {'ret': False, 'changed': False, 'msg': "More than 3 DNS Servers mentioned"}
|
||||
|
||||
data = response['data']
|
||||
dns_list = []
|
||||
for ips in listofips:
|
||||
dns_list.append(ips)
|
||||
|
||||
dns_list = data["Oem"]["Hpe"]["IPv4"][key]
|
||||
|
||||
if len(dns_list) == 3:
|
||||
dns_list.pop(0)
|
||||
|
||||
dns_list.append(attr['mgr_attr_value'])
|
||||
while len(dns_list) < 3:
|
||||
dns_list.append("0.0.0.0")
|
||||
|
||||
payload = {
|
||||
"Oem": {
|
||||
|
|
|
|||
35
plugins/module_utils/jenkins.py
Normal file
35
plugins/module_utils/jenkins.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2022, Alexei Znamensky <russoz@gmail.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
|
||||
def download_updates_file(updates_expiration):
|
||||
updates_filename = 'jenkins-plugin-cache.json'
|
||||
updates_dir = os.path.expanduser('~/.ansible/tmp')
|
||||
updates_file = os.path.join(updates_dir, updates_filename)
|
||||
download_updates = True
|
||||
|
||||
# Make sure the destination directory exists
|
||||
if not os.path.isdir(updates_dir):
|
||||
os.makedirs(updates_dir, 0o700)
|
||||
|
||||
# Check if we need to download new updates file
|
||||
if os.path.isfile(updates_file):
|
||||
# Get timestamp when the file was changed last time
|
||||
ts_file = os.stat(updates_file).st_mtime
|
||||
ts_now = time.time()
|
||||
|
||||
if ts_now - ts_file < updates_expiration:
|
||||
download_updates = False
|
||||
|
||||
return updates_file, download_updates
|
||||
|
|
@ -8,8 +8,10 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
|
||||
import os
|
||||
import socket
|
||||
import ssl
|
||||
import json
|
||||
|
||||
from ansible.module_utils.urls import generic_urlparse
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
||||
|
|
@ -20,8 +22,6 @@ from ansible.module_utils.common.text.converters import to_text
|
|||
HTTPConnection = http_client.HTTPConnection
|
||||
HTTPSConnection = http_client.HTTPSConnection
|
||||
|
||||
import json
|
||||
|
||||
|
||||
class UnixHTTPConnection(HTTPConnection):
|
||||
def __init__(self, path):
|
||||
|
|
@ -124,3 +124,11 @@ class LXDClient(object):
|
|||
if err is None:
|
||||
err = resp_json.get('error', None)
|
||||
return err
|
||||
|
||||
|
||||
def default_key_file():
|
||||
return os.path.expanduser('~/.config/lxc/client.key')
|
||||
|
||||
|
||||
def default_cert_file():
|
||||
return os.path.expanduser('~/.config/lxc/client.crt')
|
||||
|
|
|
|||
|
|
@ -37,8 +37,17 @@ def cause_changes(on_success=None, on_failure=None):
|
|||
|
||||
|
||||
def module_fails_on_exception(func):
|
||||
conflict_list = ('msg', 'exception', 'output', 'vars', 'changed')
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
def fix_var_conflicts(output):
|
||||
result = dict([
|
||||
(k if k not in conflict_list else "_" + k, v)
|
||||
for k, v in output.items()
|
||||
])
|
||||
return result
|
||||
|
||||
try:
|
||||
func(self, *args, **kwargs)
|
||||
except SystemExit:
|
||||
|
|
@ -46,12 +55,16 @@ def module_fails_on_exception(func):
|
|||
except ModuleHelperException as e:
|
||||
if e.update_output:
|
||||
self.update_output(e.update_output)
|
||||
# patchy solution to resolve conflict with output variables
|
||||
output = fix_var_conflicts(self.output)
|
||||
self.module.fail_json(msg=e.msg, exception=traceback.format_exc(),
|
||||
output=self.output, vars=self.vars.output(), **self.output)
|
||||
output=self.output, vars=self.vars.output(), **output)
|
||||
except Exception as e:
|
||||
# patchy solution to resolve conflict with output variables
|
||||
output = fix_var_conflicts(self.output)
|
||||
msg = "Module failed with exception: {0}".format(str(e).strip())
|
||||
self.module.fail_json(msg=msg, exception=traceback.format_exc(),
|
||||
output=self.output, vars=self.vars.output(), **self.output)
|
||||
output=self.output, vars=self.vars.output(), **output)
|
||||
return wrapper
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -18,7 +18,6 @@ from ansible_collections.community.general.plugins.module_utils.mh.mixins.deprec
|
|||
|
||||
|
||||
class ModuleHelper(DeprecateAttrsMixin, VarsMixin, DependencyMixin, ModuleHelperBase):
|
||||
_output_conflict_list = ('msg', 'exception', 'output', 'vars', 'changed')
|
||||
facts_name = None
|
||||
output_params = ()
|
||||
diff_params = ()
|
||||
|
|
@ -60,10 +59,6 @@ class ModuleHelper(DeprecateAttrsMixin, VarsMixin, DependencyMixin, ModuleHelper
|
|||
vars_diff = self.vars.diff() or {}
|
||||
result['diff'] = dict_merge(dict(diff), vars_diff)
|
||||
|
||||
for varname in result:
|
||||
if varname in self._output_conflict_list:
|
||||
result["_" + varname] = result[varname]
|
||||
del result[varname]
|
||||
return result
|
||||
|
||||
|
||||
|
|
|
|||
502
plugins/module_utils/ocapi_utils.py
Normal file
502
plugins/module_utils/ocapi_utils.py
Normal file
|
|
@ -0,0 +1,502 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2022 Western Digital Corporation
|
||||
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
|
||||
from ansible.module_utils.urls import open_url
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
||||
|
||||
|
||||
GET_HEADERS = {'accept': 'application/json'}
|
||||
PUT_HEADERS = {'content-type': 'application/json', 'accept': 'application/json'}
|
||||
POST_HEADERS = {'content-type': 'application/json', 'accept': 'application/json'}
|
||||
DELETE_HEADERS = {'accept': 'application/json'}
|
||||
|
||||
HEALTH_OK = 5
|
||||
|
||||
|
||||
class OcapiUtils(object):
|
||||
|
||||
def __init__(self, creds, base_uri, proxy_slot_number, timeout, module):
|
||||
self.root_uri = base_uri
|
||||
self.proxy_slot_number = proxy_slot_number
|
||||
self.creds = creds
|
||||
self.timeout = timeout
|
||||
self.module = module
|
||||
|
||||
def _auth_params(self):
|
||||
"""
|
||||
Return tuple of required authentication params based on the username and password.
|
||||
|
||||
:return: tuple of username, password
|
||||
"""
|
||||
username = self.creds['user']
|
||||
password = self.creds['pswd']
|
||||
force_basic_auth = True
|
||||
return username, password, force_basic_auth
|
||||
|
||||
def get_request(self, uri):
|
||||
req_headers = dict(GET_HEADERS)
|
||||
username, password, basic_auth = self._auth_params()
|
||||
try:
|
||||
resp = open_url(uri, method="GET", headers=req_headers,
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout)
|
||||
data = json.loads(to_native(resp.read()))
|
||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||
except HTTPError as e:
|
||||
return {'ret': False,
|
||||
'msg': "HTTP Error %s on GET request to '%s'"
|
||||
% (e.code, uri),
|
||||
'status': e.code}
|
||||
except URLError as e:
|
||||
return {'ret': False, 'msg': "URL Error on GET request to '%s': '%s'"
|
||||
% (uri, e.reason)}
|
||||
# Almost all errors should be caught above, but just in case
|
||||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed GET request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'data': data, 'headers': headers}
|
||||
|
||||
def delete_request(self, uri, etag=None):
|
||||
req_headers = dict(DELETE_HEADERS)
|
||||
if etag is not None:
|
||||
req_headers['If-Match'] = etag
|
||||
username, password, basic_auth = self._auth_params()
|
||||
try:
|
||||
resp = open_url(uri, method="DELETE", headers=req_headers,
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout)
|
||||
if resp.status != 204:
|
||||
data = json.loads(to_native(resp.read()))
|
||||
else:
|
||||
data = ""
|
||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||
except HTTPError as e:
|
||||
return {'ret': False,
|
||||
'msg': "HTTP Error %s on DELETE request to '%s'"
|
||||
% (e.code, uri),
|
||||
'status': e.code}
|
||||
except URLError as e:
|
||||
return {'ret': False, 'msg': "URL Error on DELETE request to '%s': '%s'"
|
||||
% (uri, e.reason)}
|
||||
# Almost all errors should be caught above, but just in case
|
||||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed DELETE request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'data': data, 'headers': headers}
|
||||
|
||||
def put_request(self, uri, payload, etag=None):
|
||||
req_headers = dict(PUT_HEADERS)
|
||||
if etag is not None:
|
||||
req_headers['If-Match'] = etag
|
||||
username, password, basic_auth = self._auth_params()
|
||||
try:
|
||||
resp = open_url(uri, data=json.dumps(payload),
|
||||
headers=req_headers, method="PUT",
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout)
|
||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||
except HTTPError as e:
|
||||
return {'ret': False,
|
||||
'msg': "HTTP Error %s on PUT request to '%s'"
|
||||
% (e.code, uri),
|
||||
'status': e.code}
|
||||
except URLError as e:
|
||||
return {'ret': False, 'msg': "URL Error on PUT request to '%s': '%s'"
|
||||
% (uri, e.reason)}
|
||||
# Almost all errors should be caught above, but just in case
|
||||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed PUT request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'headers': headers, 'resp': resp}
|
||||
|
||||
def post_request(self, uri, payload, content_type="application/json", timeout=None):
|
||||
req_headers = dict(POST_HEADERS)
|
||||
if content_type != "application/json":
|
||||
req_headers["content-type"] = content_type
|
||||
username, password, basic_auth = self._auth_params()
|
||||
if content_type == "application/json":
|
||||
request_data = json.dumps(payload)
|
||||
else:
|
||||
request_data = payload
|
||||
try:
|
||||
resp = open_url(uri, data=request_data,
|
||||
headers=req_headers, method="POST",
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout if timeout is None else timeout)
|
||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||
except HTTPError as e:
|
||||
return {'ret': False,
|
||||
'msg': "HTTP Error %s on POST request to '%s'"
|
||||
% (e.code, uri),
|
||||
'status': e.code}
|
||||
except URLError as e:
|
||||
return {'ret': False, 'msg': "URL Error on POST request to '%s': '%s'"
|
||||
% (uri, e.reason)}
|
||||
# Almost all errors should be caught above, but just in case
|
||||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed POST request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'headers': headers, 'resp': resp}
|
||||
|
||||
def get_uri_with_slot_number_query_param(self, uri):
|
||||
"""Return the URI with proxy slot number added as a query param, if there is one.
|
||||
|
||||
If a proxy slot number is provided, to access it, we must append it as a query parameter.
|
||||
This method returns the given URI with the slotnumber query param added, if there is one.
|
||||
If there is not a proxy slot number, it just returns the URI as it was passed in.
|
||||
"""
|
||||
if self.proxy_slot_number is not None:
|
||||
parsed_url = urlparse(uri)
|
||||
return parsed_url._replace(query="slotnumber=" + str(self.proxy_slot_number)).geturl()
|
||||
else:
|
||||
return uri
|
||||
|
||||
def manage_system_power(self, command):
|
||||
"""Process a command to manage the system power.
|
||||
|
||||
:param str command: The Ansible command being processed.
|
||||
"""
|
||||
if command == "PowerGracefulRestart":
|
||||
resource_uri = self.root_uri
|
||||
resource_uri = self.get_uri_with_slot_number_query_param(resource_uri)
|
||||
|
||||
# Get the resource so that we have the Etag
|
||||
response = self.get_request(resource_uri)
|
||||
if 'etag' not in response['headers']:
|
||||
return {'ret': False, 'msg': 'Etag not found in response.'}
|
||||
etag = response['headers']['etag']
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
|
||||
# Issue the PUT to do the reboot (unless we are in check mode)
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
payload = {'Reboot': True}
|
||||
response = self.put_request(resource_uri, payload, etag)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
elif command.startswith("PowerMode"):
|
||||
return self.manage_power_mode(command)
|
||||
else:
|
||||
return {'ret': False, 'msg': 'Invalid command: ' + command}
|
||||
|
||||
return {'ret': True}
|
||||
|
||||
def manage_chassis_indicator_led(self, command):
|
||||
"""Process a command to manage the chassis indicator LED.
|
||||
|
||||
:param string command: The Ansible command being processed.
|
||||
"""
|
||||
return self.manage_indicator_led(command, self.root_uri)
|
||||
|
||||
def manage_indicator_led(self, command, resource_uri=None):
|
||||
"""Process a command to manage an indicator LED.
|
||||
|
||||
:param string command: The Ansible command being processed.
|
||||
:param string resource_uri: URI of the resource whose indicator LED is being managed.
|
||||
"""
|
||||
key = "IndicatorLED"
|
||||
if resource_uri is None:
|
||||
resource_uri = self.root_uri
|
||||
resource_uri = self.get_uri_with_slot_number_query_param(resource_uri)
|
||||
|
||||
payloads = {
|
||||
'IndicatorLedOn': {
|
||||
'ID': 2
|
||||
},
|
||||
'IndicatorLedOff': {
|
||||
'ID': 4
|
||||
}
|
||||
}
|
||||
|
||||
response = self.get_request(resource_uri)
|
||||
if 'etag' not in response['headers']:
|
||||
return {'ret': False, 'msg': 'Etag not found in response.'}
|
||||
etag = response['headers']['etag']
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
data = response['data']
|
||||
if key not in data:
|
||||
return {'ret': False, 'msg': "Key %s not found" % key}
|
||||
if 'ID' not in data[key]:
|
||||
return {'ret': False, 'msg': 'IndicatorLED for resource has no ID.'}
|
||||
|
||||
if command in payloads.keys():
|
||||
# See if the LED is already set as requested.
|
||||
current_led_status = data[key]['ID']
|
||||
if current_led_status == payloads[command]['ID']:
|
||||
return {'ret': True, 'changed': False}
|
||||
|
||||
# Set the LED (unless we are in check mode)
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
payload = {'IndicatorLED': payloads[command]}
|
||||
response = self.put_request(resource_uri, payload, etag)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
else:
|
||||
return {'ret': False, 'msg': 'Invalid command'}
|
||||
|
||||
return {'ret': True}
|
||||
|
||||
def manage_power_mode(self, command):
|
||||
key = "PowerState"
|
||||
resource_uri = self.get_uri_with_slot_number_query_param(self.root_uri)
|
||||
|
||||
payloads = {
|
||||
"PowerModeNormal": 2,
|
||||
"PowerModeLow": 4
|
||||
}
|
||||
|
||||
response = self.get_request(resource_uri)
|
||||
if 'etag' not in response['headers']:
|
||||
return {'ret': False, 'msg': 'Etag not found in response.'}
|
||||
etag = response['headers']['etag']
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
data = response['data']
|
||||
if key not in data:
|
||||
return {'ret': False, 'msg': "Key %s not found" % key}
|
||||
if 'ID' not in data[key]:
|
||||
return {'ret': False, 'msg': 'PowerState for resource has no ID.'}
|
||||
|
||||
if command in payloads.keys():
|
||||
# See if the PowerState is already set as requested.
|
||||
current_power_state = data[key]['ID']
|
||||
if current_power_state == payloads[command]:
|
||||
return {'ret': True, 'changed': False}
|
||||
|
||||
# Set the Power State (unless we are in check mode)
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
payload = {'PowerState': {"ID": payloads[command]}}
|
||||
response = self.put_request(resource_uri, payload, etag)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
else:
|
||||
return {'ret': False, 'msg': 'Invalid command: ' + command}
|
||||
|
||||
return {'ret': True}
|
||||
|
||||
def prepare_multipart_firmware_upload(self, filename):
|
||||
"""Prepare a multipart/form-data body for OCAPI firmware upload.
|
||||
|
||||
:arg filename: The name of the file to upload.
|
||||
:returns: tuple of (content_type, body) where ``content_type`` is
|
||||
the ``multipart/form-data`` ``Content-Type`` header including
|
||||
``boundary`` and ``body`` is the prepared bytestring body
|
||||
|
||||
Prepares the body to include "FirmwareFile" field with the contents of the file.
|
||||
Because some OCAPI targets do not support Base-64 encoding for multipart/form-data,
|
||||
this method sends the file as binary.
|
||||
"""
|
||||
boundary = str(uuid.uuid4()) # Generate a random boundary
|
||||
body = "--" + boundary + '\r\n'
|
||||
body += 'Content-Disposition: form-data; name="FirmwareFile"; filename="%s"\r\n' % to_native(os.path.basename(filename))
|
||||
body += 'Content-Type: application/octet-stream\r\n\r\n'
|
||||
body_bytes = bytearray(body, 'utf-8')
|
||||
with open(filename, 'rb') as f:
|
||||
body_bytes += f.read()
|
||||
body_bytes += bytearray("\r\n--%s--" % boundary, 'utf-8')
|
||||
return ("multipart/form-data; boundary=%s" % boundary,
|
||||
body_bytes)
|
||||
|
||||
def upload_firmware_image(self, update_image_path):
|
||||
"""Perform Firmware Upload to the OCAPI storage device.
|
||||
|
||||
:param str update_image_path: The path/filename of the firmware image, on the local filesystem.
|
||||
"""
|
||||
if not (os.path.exists(update_image_path) and os.path.isfile(update_image_path)):
|
||||
return {'ret': False, 'msg': 'File does not exist.'}
|
||||
url = self.root_uri + "OperatingSystem"
|
||||
url = self.get_uri_with_slot_number_query_param(url)
|
||||
content_type, b_form_data = self.prepare_multipart_firmware_upload(update_image_path)
|
||||
|
||||
# Post the firmware (unless we are in check mode)
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
result = self.post_request(url, b_form_data, content_type=content_type, timeout=300)
|
||||
if result['ret'] is False:
|
||||
return result
|
||||
return {'ret': True}
|
||||
|
||||
def update_firmware_image(self):
|
||||
"""Perform a Firmware Update on the OCAPI storage device."""
|
||||
resource_uri = self.root_uri
|
||||
resource_uri = self.get_uri_with_slot_number_query_param(resource_uri)
|
||||
# We have to do a GET to obtain the Etag. It's required on the PUT.
|
||||
response = self.get_request(resource_uri)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
if 'etag' not in response['headers']:
|
||||
return {'ret': False, 'msg': 'Etag not found in response.'}
|
||||
etag = response['headers']['etag']
|
||||
|
||||
# Issue the PUT (unless we are in check mode)
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
payload = {'FirmwareUpdate': True}
|
||||
response = self.put_request(resource_uri, payload, etag)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
|
||||
return {'ret': True, 'jobUri': response["headers"]["location"]}
|
||||
|
||||
def activate_firmware_image(self):
|
||||
"""Perform a Firmware Activate on the OCAPI storage device."""
|
||||
resource_uri = self.root_uri
|
||||
resource_uri = self.get_uri_with_slot_number_query_param(resource_uri)
|
||||
# We have to do a GET to obtain the Etag. It's required on the PUT.
|
||||
response = self.get_request(resource_uri)
|
||||
if 'etag' not in response['headers']:
|
||||
return {'ret': False, 'msg': 'Etag not found in response.'}
|
||||
etag = response['headers']['etag']
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
|
||||
# Issue the PUT (unless we are in check mode)
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
payload = {'FirmwareActivate': True}
|
||||
response = self.put_request(resource_uri, payload, etag)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
|
||||
return {'ret': True, 'jobUri': response["headers"]["location"]}
|
||||
|
||||
def get_job_status(self, job_uri):
|
||||
"""Get the status of a job.
|
||||
|
||||
:param str job_uri: The URI of the job's status monitor.
|
||||
"""
|
||||
job_uri = self.get_uri_with_slot_number_query_param(job_uri)
|
||||
response = self.get_request(job_uri)
|
||||
if response['ret'] is False:
|
||||
if response.get('status') == 404:
|
||||
# Job not found -- assume 0%
|
||||
return {
|
||||
"ret": True,
|
||||
"percentComplete": 0,
|
||||
"operationStatus": "Not Available",
|
||||
"operationStatusId": 1,
|
||||
"operationHealth": None,
|
||||
"operationHealthId": None,
|
||||
"details": "Job does not exist.",
|
||||
"jobExists": False
|
||||
}
|
||||
else:
|
||||
return response
|
||||
details = response["data"]["Status"].get("Details")
|
||||
if type(details) is str:
|
||||
details = [details]
|
||||
health_list = response["data"]["Status"]["Health"]
|
||||
return_value = {
|
||||
"ret": True,
|
||||
"percentComplete": response["data"]["PercentComplete"],
|
||||
"operationStatus": response["data"]["Status"]["State"]["Name"],
|
||||
"operationStatusId": response["data"]["Status"]["State"]["ID"],
|
||||
"operationHealth": health_list[0]["Name"] if len(health_list) > 0 else None,
|
||||
"operationHealthId": health_list[0]["ID"] if len(health_list) > 0 else None,
|
||||
"details": details,
|
||||
"jobExists": True
|
||||
}
|
||||
return return_value
|
||||
|
||||
def delete_job(self, job_uri):
|
||||
"""Delete the OCAPI job referenced by the specified job_uri."""
|
||||
job_uri = self.get_uri_with_slot_number_query_param(job_uri)
|
||||
# We have to do a GET to obtain the Etag. It's required on the DELETE.
|
||||
response = self.get_request(job_uri)
|
||||
|
||||
if response['ret'] is True:
|
||||
if 'etag' not in response['headers']:
|
||||
return {'ret': False, 'msg': 'Etag not found in response.'}
|
||||
else:
|
||||
etag = response['headers']['etag']
|
||||
|
||||
if response['data']['PercentComplete'] != 100:
|
||||
return {
|
||||
'ret': False,
|
||||
'changed': False,
|
||||
'msg': 'Cannot delete job because it is in progress.'
|
||||
}
|
||||
|
||||
if response['ret'] is False:
|
||||
if response['status'] == 404:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': False,
|
||||
'msg': 'Job already deleted.'
|
||||
}
|
||||
return response
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
|
||||
# Do the DELETE (unless we are in check mode)
|
||||
response = self.delete_request(job_uri, etag)
|
||||
if response['ret'] is False:
|
||||
if response['status'] == 404:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': False
|
||||
}
|
||||
elif response['status'] == 409:
|
||||
return {
|
||||
'ret': False,
|
||||
'changed': False,
|
||||
'msg': 'Cannot delete job because it is in progress.'
|
||||
}
|
||||
return response
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True
|
||||
}
|
||||
|
|
@ -26,6 +26,36 @@ except ImportError:
|
|||
HAS_PYONE = False
|
||||
|
||||
|
||||
# A helper function to mitigate https://github.com/OpenNebula/one/issues/6064.
|
||||
# It allows for easily handling lists like "NIC" or "DISK" in the JSON-like template representation.
|
||||
# There are either lists of dictionaries (length > 1) or just dictionaries.
|
||||
def flatten(to_flatten, extract=False):
|
||||
"""Flattens nested lists (with optional value extraction)."""
|
||||
def recurse(to_flatten):
|
||||
return sum(map(recurse, to_flatten), []) if isinstance(to_flatten, list) else [to_flatten]
|
||||
value = recurse(to_flatten)
|
||||
if extract and len(value) == 1:
|
||||
return value[0]
|
||||
return value
|
||||
|
||||
|
||||
# A helper function to mitigate https://github.com/OpenNebula/one/issues/6064.
|
||||
# It renders JSON-like template representation into OpenNebula's template syntax (string).
|
||||
def render(to_render):
|
||||
"""Converts dictionary to OpenNebula template."""
|
||||
def recurse(to_render):
|
||||
for key, value in sorted(to_render.items()):
|
||||
if isinstance(value, dict):
|
||||
yield '{0:}=[{1:}]'.format(key, ','.join(recurse(value)))
|
||||
continue
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
yield '{0:}=[{1:}]'.format(key, ','.join(recurse(item)))
|
||||
continue
|
||||
yield '{0:}="{1:}"'.format(key, value)
|
||||
return '\n'.join(recurse(to_render))
|
||||
|
||||
|
||||
class OpenNebulaModule:
|
||||
"""
|
||||
Base class for all OpenNebula Ansible Modules.
|
||||
|
|
|
|||
114
plugins/module_utils/puppet.py
Normal file
114
plugins/module_utils/puppet.py
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2022, Alexei Znamensky <russoz@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import os
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, cmd_runner_fmt
|
||||
|
||||
|
||||
_PUPPET_PATH_PREFIX = ["/opt/puppetlabs/bin"]
|
||||
|
||||
|
||||
def get_facter_dir():
|
||||
if os.getuid() == 0:
|
||||
return '/etc/facter/facts.d'
|
||||
else:
|
||||
return os.path.expanduser('~/.facter/facts.d')
|
||||
|
||||
|
||||
def _puppet_cmd(module):
|
||||
return module.get_bin_path("puppet", False, _PUPPET_PATH_PREFIX)
|
||||
|
||||
|
||||
# If the `timeout` CLI command feature is removed,
|
||||
# Then we could add this as a fixed param to `puppet_runner`
|
||||
def ensure_agent_enabled(module):
|
||||
runner = CmdRunner(
|
||||
module,
|
||||
command="puppet",
|
||||
path_prefix=_PUPPET_PATH_PREFIX,
|
||||
arg_formats=dict(
|
||||
_agent_disabled=cmd_runner_fmt.as_fixed(['config', 'print', 'agent_disabled_lockfile']),
|
||||
),
|
||||
check_rc=False,
|
||||
)
|
||||
|
||||
rc, stdout, stderr = runner("_agent_disabled").run()
|
||||
if os.path.exists(stdout.strip()):
|
||||
module.fail_json(
|
||||
msg="Puppet agent is administratively disabled.",
|
||||
disabled=True)
|
||||
elif rc != 0:
|
||||
module.fail_json(
|
||||
msg="Puppet agent state could not be determined.")
|
||||
|
||||
|
||||
def puppet_runner(module):
|
||||
|
||||
# Keeping backward compatibility, allow for running with the `timeout` CLI command.
|
||||
# If this can be replaced with ansible `timeout` parameter in playbook,
|
||||
# then this function could be removed.
|
||||
def _prepare_base_cmd():
|
||||
_tout_cmd = module.get_bin_path("timeout", False)
|
||||
if _tout_cmd:
|
||||
cmd = ["timeout", "-s", "9", module.params["timeout"], _puppet_cmd(module)]
|
||||
else:
|
||||
cmd = ["puppet"]
|
||||
return cmd
|
||||
|
||||
def noop_func(v):
|
||||
_noop = cmd_runner_fmt.as_map({
|
||||
True: "--noop",
|
||||
False: "--no-noop",
|
||||
})
|
||||
return _noop(module.check_mode or v)
|
||||
|
||||
_logdest_map = {
|
||||
"syslog": ["--logdest", "syslog"],
|
||||
"all": ["--logdest", "syslog", "--logdest", "console"],
|
||||
}
|
||||
|
||||
@cmd_runner_fmt.unpack_args
|
||||
def execute_func(execute, manifest):
|
||||
if execute:
|
||||
return ["--execute", execute]
|
||||
else:
|
||||
return [manifest]
|
||||
|
||||
runner = CmdRunner(
|
||||
module,
|
||||
command=_prepare_base_cmd(),
|
||||
path_prefix=_PUPPET_PATH_PREFIX,
|
||||
arg_formats=dict(
|
||||
_agent_fixed=cmd_runner_fmt.as_fixed([
|
||||
"agent", "--onetime", "--no-daemonize", "--no-usecacheonfailure",
|
||||
"--no-splay", "--detailed-exitcodes", "--verbose", "--color", "0",
|
||||
]),
|
||||
_apply_fixed=cmd_runner_fmt.as_fixed(["apply", "--detailed-exitcodes"]),
|
||||
puppetmaster=cmd_runner_fmt.as_opt_val("--server"),
|
||||
show_diff=cmd_runner_fmt.as_bool("--show-diff"),
|
||||
confdir=cmd_runner_fmt.as_opt_val("--confdir"),
|
||||
environment=cmd_runner_fmt.as_opt_val("--environment"),
|
||||
tags=cmd_runner_fmt.as_func(lambda v: ["--tags", ",".join(v)]),
|
||||
certname=cmd_runner_fmt.as_opt_eq_val("--certname"),
|
||||
noop=cmd_runner_fmt.as_func(noop_func),
|
||||
use_srv_records=cmd_runner_fmt.as_map({
|
||||
True: "--usr_srv_records",
|
||||
False: "--no-usr_srv_records",
|
||||
}),
|
||||
logdest=cmd_runner_fmt.as_map(_logdest_map, default=[]),
|
||||
modulepath=cmd_runner_fmt.as_opt_eq_val("--modulepath"),
|
||||
_execute=cmd_runner_fmt.as_func(execute_func),
|
||||
summarize=cmd_runner_fmt.as_bool("--summarize"),
|
||||
debug=cmd_runner_fmt.as_bool("--debug"),
|
||||
verbose=cmd_runner_fmt.as_bool("--verbose"),
|
||||
),
|
||||
check_rc=False,
|
||||
)
|
||||
return runner
|
||||
|
|
@ -314,3 +314,21 @@ def setup_rax_module(module, rax_module, region_required=True):
|
|||
(region, ','.join(rax_module.regions)))
|
||||
|
||||
return rax_module
|
||||
|
||||
|
||||
def rax_scaling_group_personality_file(module, files):
|
||||
if not files:
|
||||
return []
|
||||
|
||||
results = []
|
||||
for rpath, lpath in files.items():
|
||||
lpath = os.path.expanduser(lpath)
|
||||
try:
|
||||
with open(lpath, 'r') as f:
|
||||
results.append({
|
||||
'path': rpath,
|
||||
'contents': f.read(),
|
||||
})
|
||||
except Exception as e:
|
||||
module.fail_json(msg='Failed to load %s: %s' % (lpath, str(e)))
|
||||
return results
|
||||
|
|
|
|||
|
|
@ -19,6 +19,8 @@ POST_HEADERS = {'content-type': 'application/json', 'accept': 'application/json'
|
|||
'OData-Version': '4.0'}
|
||||
PATCH_HEADERS = {'content-type': 'application/json', 'accept': 'application/json',
|
||||
'OData-Version': '4.0'}
|
||||
PUT_HEADERS = {'content-type': 'application/json', 'accept': 'application/json',
|
||||
'OData-Version': '4.0'}
|
||||
DELETE_HEADERS = {'accept': 'application/json', 'OData-Version': '4.0'}
|
||||
|
||||
FAIL_MSG = 'Issuing a data modification command without specifying the '\
|
||||
|
|
@ -36,6 +38,8 @@ class RedfishUtils(object):
|
|||
self.timeout = timeout
|
||||
self.module = module
|
||||
self.service_root = '/redfish/v1/'
|
||||
self.session_service_uri = '/redfish/v1/SessionService'
|
||||
self.sessions_uri = '/redfish/v1/SessionService/Sessions'
|
||||
self.resource_id = resource_id
|
||||
self.data_modification = data_modification
|
||||
self.strip_etag_quotes = strip_etag_quotes
|
||||
|
|
@ -123,6 +127,10 @@ class RedfishUtils(object):
|
|||
req_headers = dict(GET_HEADERS)
|
||||
username, password, basic_auth = self._auth_params(req_headers)
|
||||
try:
|
||||
# Service root is an unauthenticated resource; remove credentials
|
||||
# in case the caller will be using sessions later.
|
||||
if uri == (self.root_uri + self.service_root):
|
||||
basic_auth = False
|
||||
resp = open_url(uri, method="GET", headers=req_headers,
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
|
|
@ -143,18 +151,28 @@ class RedfishUtils(object):
|
|||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed GET request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'data': data, 'headers': headers}
|
||||
return {'ret': True, 'data': data, 'headers': headers, 'resp': resp}
|
||||
|
||||
def post_request(self, uri, pyld):
|
||||
req_headers = dict(POST_HEADERS)
|
||||
username, password, basic_auth = self._auth_params(req_headers)
|
||||
try:
|
||||
# When performing a POST to the session collection, credentials are
|
||||
# provided in the request body. Do not provide the basic auth
|
||||
# header since this can cause conflicts with some services
|
||||
if self.sessions_uri is not None and uri == (self.root_uri + self.sessions_uri):
|
||||
basic_auth = False
|
||||
resp = open_url(uri, data=json.dumps(pyld),
|
||||
headers=req_headers, method="POST",
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout)
|
||||
try:
|
||||
data = json.loads(to_native(resp.read()))
|
||||
except Exception as e:
|
||||
# No response data; this is okay in many cases
|
||||
data = None
|
||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||
except HTTPError as e:
|
||||
msg = self._get_extended_message(e)
|
||||
|
|
@ -169,7 +187,7 @@ class RedfishUtils(object):
|
|||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed POST request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'headers': headers, 'resp': resp}
|
||||
return {'ret': True, 'data': data, 'headers': headers, 'resp': resp}
|
||||
|
||||
def patch_request(self, uri, pyld, check_pyld=False):
|
||||
req_headers = dict(PATCH_HEADERS)
|
||||
|
|
@ -219,6 +237,41 @@ class RedfishUtils(object):
|
|||
'msg': "Failed PATCH request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'changed': True, 'resp': resp, 'msg': 'Modified %s' % uri}
|
||||
|
||||
def put_request(self, uri, pyld):
|
||||
req_headers = dict(PUT_HEADERS)
|
||||
r = self.get_request(uri)
|
||||
if r['ret']:
|
||||
# Get etag from etag header or @odata.etag property
|
||||
etag = r['headers'].get('etag')
|
||||
if not etag:
|
||||
etag = r['data'].get('@odata.etag')
|
||||
if etag:
|
||||
if self.strip_etag_quotes:
|
||||
etag = etag.strip('"')
|
||||
req_headers['If-Match'] = etag
|
||||
username, password, basic_auth = self._auth_params(req_headers)
|
||||
try:
|
||||
resp = open_url(uri, data=json.dumps(pyld),
|
||||
headers=req_headers, method="PUT",
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout)
|
||||
except HTTPError as e:
|
||||
msg = self._get_extended_message(e)
|
||||
return {'ret': False,
|
||||
'msg': "HTTP Error %s on PUT request to '%s', extended message: '%s'"
|
||||
% (e.code, uri, msg),
|
||||
'status': e.code}
|
||||
except URLError as e:
|
||||
return {'ret': False, 'msg': "URL Error on PUT request to '%s': '%s'"
|
||||
% (uri, e.reason)}
|
||||
# Almost all errors should be caught above, but just in case
|
||||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed PUT request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'resp': resp}
|
||||
|
||||
def delete_request(self, uri, pyld=None):
|
||||
req_headers = dict(DELETE_HEADERS)
|
||||
username, password, basic_auth = self._auth_params(req_headers)
|
||||
|
|
@ -321,23 +374,23 @@ class RedfishUtils(object):
|
|||
return {'ret': True}
|
||||
|
||||
def _find_sessionservice_resource(self):
|
||||
# Get the service root
|
||||
response = self.get_request(self.root_uri + self.service_root)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
data = response['data']
|
||||
if 'SessionService' not in data:
|
||||
|
||||
# Check for the session service and session collection. Well-known
|
||||
# defaults are provided in the constructor, but services that predate
|
||||
# Redfish 1.6.0 might contain different values.
|
||||
self.session_service_uri = data.get('SessionService', {}).get('@odata.id')
|
||||
self.sessions_uri = data.get('Links', {}).get('Sessions', {}).get('@odata.id')
|
||||
|
||||
# If one isn't found, return an error
|
||||
if self.session_service_uri is None:
|
||||
return {'ret': False, 'msg': "SessionService resource not found"}
|
||||
else:
|
||||
session_service = data["SessionService"]["@odata.id"]
|
||||
self.session_service_uri = session_service
|
||||
response = self.get_request(self.root_uri + session_service)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
data = response['data']
|
||||
sessions = data['Sessions']['@odata.id']
|
||||
if sessions[-1:] == '/':
|
||||
sessions = sessions[:-1]
|
||||
self.sessions_uri = sessions
|
||||
if self.sessions_uri is None:
|
||||
return {'ret': False, 'msg': "SessionCollection resource not found"}
|
||||
return {'ret': True}
|
||||
|
||||
def _get_resource_uri_by_id(self, uris, id_prop):
|
||||
|
|
@ -1384,11 +1437,82 @@ class RedfishUtils(object):
|
|||
else:
|
||||
return self._software_inventory(self.software_uri)
|
||||
|
||||
def _operation_results(self, response, data, handle=None):
|
||||
"""
|
||||
Builds the results for an operation from task, job, or action response.
|
||||
|
||||
:param response: HTTP response object
|
||||
:param data: HTTP response data
|
||||
:param handle: The task or job handle that was last used
|
||||
:return: dict containing operation results
|
||||
"""
|
||||
|
||||
operation_results = {'status': None, 'messages': [], 'handle': None, 'ret': True,
|
||||
'resets_requested': []}
|
||||
|
||||
if response.status == 204:
|
||||
# No content; successful, but nothing to return
|
||||
# Use the Redfish "Completed" enum from TaskState for the operation status
|
||||
operation_results['status'] = 'Completed'
|
||||
else:
|
||||
# Parse the response body for details
|
||||
|
||||
# Determine the next handle, if any
|
||||
operation_results['handle'] = handle
|
||||
if response.status == 202:
|
||||
# Task generated; get the task monitor URI
|
||||
operation_results['handle'] = response.getheader('Location', handle)
|
||||
|
||||
# Pull out the status and messages based on the body format
|
||||
if data is not None:
|
||||
response_type = data.get('@odata.type', '')
|
||||
if response_type.startswith('#Task.') or response_type.startswith('#Job.'):
|
||||
# Task and Job have similar enough structures to treat the same
|
||||
operation_results['status'] = data.get('TaskState', data.get('JobState'))
|
||||
operation_results['messages'] = data.get('Messages', [])
|
||||
else:
|
||||
# Error response body, which is a bit of a misnomer since it's used in successful action responses
|
||||
operation_results['status'] = 'Completed'
|
||||
if response.status >= 400:
|
||||
operation_results['status'] = 'Exception'
|
||||
operation_results['messages'] = data.get('error', {}).get('@Message.ExtendedInfo', [])
|
||||
else:
|
||||
# No response body (or malformed); build based on status code
|
||||
operation_results['status'] = 'Completed'
|
||||
if response.status == 202:
|
||||
operation_results['status'] = 'New'
|
||||
elif response.status >= 400:
|
||||
operation_results['status'] = 'Exception'
|
||||
|
||||
# Clear out the handle if the operation is complete
|
||||
if operation_results['status'] in ['Completed', 'Cancelled', 'Exception', 'Killed']:
|
||||
operation_results['handle'] = None
|
||||
|
||||
# Scan the messages to see if next steps are needed
|
||||
for message in operation_results['messages']:
|
||||
message_id = message['MessageId']
|
||||
|
||||
if message_id.startswith('Update.1.') and message_id.endswith('.OperationTransitionedToJob'):
|
||||
# Operation rerouted to a job; update the status and handle
|
||||
operation_results['status'] = 'New'
|
||||
operation_results['handle'] = message['MessageArgs'][0]
|
||||
operation_results['resets_requested'] = []
|
||||
# No need to process other messages in this case
|
||||
break
|
||||
|
||||
if message_id.startswith('Base.1.') and message_id.endswith('.ResetRequired'):
|
||||
# A reset to some device is needed to continue the update
|
||||
reset = {'uri': message['MessageArgs'][0], 'type': message['MessageArgs'][1]}
|
||||
operation_results['resets_requested'].append(reset)
|
||||
|
||||
return operation_results
|
||||
|
||||
def simple_update(self, update_opts):
|
||||
image_uri = update_opts.get('update_image_uri')
|
||||
protocol = update_opts.get('update_protocol')
|
||||
targets = update_opts.get('update_targets')
|
||||
creds = update_opts.get('update_creds')
|
||||
apply_time = update_opts.get('update_apply_time')
|
||||
|
||||
if not image_uri:
|
||||
return {'ret': False, 'msg':
|
||||
|
|
@ -1439,11 +1563,65 @@ class RedfishUtils(object):
|
|||
payload["Username"] = creds.get('username')
|
||||
if creds.get('password'):
|
||||
payload["Password"] = creds.get('password')
|
||||
if apply_time:
|
||||
payload["@Redfish.OperationApplyTime"] = apply_time
|
||||
response = self.post_request(self.root_uri + update_uri, payload)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
return {'ret': True, 'changed': True,
|
||||
'msg': "SimpleUpdate requested"}
|
||||
'msg': "SimpleUpdate requested",
|
||||
'update_status': self._operation_results(response['resp'], response['data'])}
|
||||
|
||||
def get_update_status(self, update_handle):
|
||||
"""
|
||||
Gets the status of an update operation.
|
||||
|
||||
:param handle: The task or job handle tracking the update
|
||||
:return: dict containing the response of the update status
|
||||
"""
|
||||
|
||||
if not update_handle:
|
||||
return {'ret': False, 'msg': 'Must provide a handle tracking the update.'}
|
||||
|
||||
# Get the task or job tracking the update
|
||||
response = self.get_request(self.root_uri + update_handle)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
|
||||
# Inspect the response to build the update status
|
||||
return self._operation_results(response['resp'], response['data'], update_handle)
|
||||
|
||||
def perform_requested_update_operations(self, update_handle):
|
||||
"""
|
||||
Performs requested operations to allow the update to continue.
|
||||
|
||||
:param handle: The task or job handle tracking the update
|
||||
:return: dict containing the result of the operations
|
||||
"""
|
||||
|
||||
# Get the current update status
|
||||
update_status = self.get_update_status(update_handle)
|
||||
if update_status['ret'] is False:
|
||||
return update_status
|
||||
|
||||
changed = False
|
||||
|
||||
# Perform any requested updates
|
||||
for reset in update_status['resets_requested']:
|
||||
resp = self.post_request(self.root_uri + reset['uri'], {'ResetType': reset['type']})
|
||||
if resp['ret'] is False:
|
||||
# Override the 'changed' indicator since other resets may have
|
||||
# been successful
|
||||
resp['changed'] = changed
|
||||
return resp
|
||||
changed = True
|
||||
|
||||
msg = 'No operations required for the update'
|
||||
if changed:
|
||||
# Will need to consider finetuning this message if the scope of the
|
||||
# requested operations grow over time
|
||||
msg = 'One or more components reset to continue the update'
|
||||
return {'ret': True, 'changed': changed, 'msg': msg}
|
||||
|
||||
def get_bios_attributes(self, systems_uri):
|
||||
result = {}
|
||||
|
|
|
|||
|
|
@ -84,6 +84,10 @@ def parse_pagination_link(header):
|
|||
|
||||
|
||||
def filter_sensitive_attributes(container, attributes):
|
||||
'''
|
||||
WARNING: This function is effectively private, **do not use it**!
|
||||
It will be removed or renamed once changing its name no longer triggers a pylint bug.
|
||||
'''
|
||||
for attr in attributes:
|
||||
container[attr] = "SENSITIVE_VALUE"
|
||||
|
||||
|
|
|
|||
21
plugins/module_utils/ssh.py
Normal file
21
plugins/module_utils/ssh.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2015, Björn Andersson
|
||||
# Copyright (c) 2021, Ansible Project
|
||||
# Copyright (c) 2021, Abhijeet Kasurde <akasurde@redhat.com>
|
||||
# Copyright (c) 2022, Alexei Znamensky <russoz@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def determine_config_file(user, config_file):
|
||||
if user:
|
||||
config_file = os.path.join(os.path.expanduser('~%s' % user), '.ssh', 'config')
|
||||
elif config_file is None:
|
||||
config_file = '/etc/ssh/ssh_config'
|
||||
return config_file
|
||||
|
|
@ -27,7 +27,7 @@ __metaclass__ = type
|
|||
DOCUMENTATION = '''
|
||||
---
|
||||
module: ali_instance
|
||||
short_description: Create, Start, Stop, Restart or Terminate an Instance in ECS. Add or Remove Instance to/from a Security Group.
|
||||
short_description: Create, Start, Stop, Restart or Terminate an Instance in ECS; Add or Remove Instance to/from a Security Group
|
||||
description:
|
||||
- Create, start, stop, restart, modify or terminate ecs instances.
|
||||
- Add or remove ecs instances to/from security group.
|
||||
|
|
|
|||
|
|
@ -27,7 +27,7 @@ __metaclass__ = type
|
|||
DOCUMENTATION = '''
|
||||
---
|
||||
module: ali_instance_info
|
||||
short_description: Gather information on instances of Alibaba Cloud ECS.
|
||||
short_description: Gather information on instances of Alibaba Cloud ECS
|
||||
description:
|
||||
- This module fetches data from the Open API in Alicloud.
|
||||
The module must be called from within the ECS instance itself.
|
||||
|
|
|
|||
|
|
@ -60,6 +60,8 @@ options:
|
|||
description:
|
||||
- A list of subcommands.
|
||||
- Each subcommand needs a name, a link and a path parameter.
|
||||
- Subcommands are also named 'slaves' or 'followers', depending on the version
|
||||
of alternatives.
|
||||
type: list
|
||||
elements: dict
|
||||
aliases: ['slaves']
|
||||
|
|
@ -310,10 +312,10 @@ class AlternativesModule(object):
|
|||
current_mode_regex = re.compile(r'\s-\s(?:status\sis\s)?(\w*)(?:\smode|.)$', re.MULTILINE)
|
||||
current_path_regex = re.compile(r'^\s*link currently points to (.*)$', re.MULTILINE)
|
||||
current_link_regex = re.compile(r'^\s*link \w+ is (.*)$', re.MULTILINE)
|
||||
subcmd_path_link_regex = re.compile(r'^\s*slave (\S+) is (.*)$', re.MULTILINE)
|
||||
subcmd_path_link_regex = re.compile(r'^\s*(?:slave|follower) (\S+) is (.*)$', re.MULTILINE)
|
||||
|
||||
alternative_regex = re.compile(r'^(\/.*)\s-\s(?:family\s\S+\s)?priority\s(\d+)((?:\s+slave.*)*)', re.MULTILINE)
|
||||
subcmd_regex = re.compile(r'^\s+slave (.*): (.*)$', re.MULTILINE)
|
||||
alternative_regex = re.compile(r'^(\/.*)\s-\s(?:family\s\S+\s)?priority\s(\d+)((?:\s+(?:slave|follower).*)*)', re.MULTILINE)
|
||||
subcmd_regex = re.compile(r'^\s+(?:slave|follower) (.*): (.*)$', re.MULTILINE)
|
||||
|
||||
match = current_mode_regex.search(display_output)
|
||||
if not match:
|
||||
|
|
|
|||
|
|
@ -20,6 +20,10 @@ notes:
|
|||
- >
|
||||
B(Ansible 2.9/2.10): The C(ansible-galaxy) command changed significantly between Ansible 2.9 and
|
||||
ansible-base 2.10 (later ansible-core 2.11). See comments in the parameters.
|
||||
- >
|
||||
The module will try and run using the C(C.UTF-8) locale.
|
||||
If that fails, it will try C(en_US.UTF-8).
|
||||
If that one also fails, the module will fail.
|
||||
requirements:
|
||||
- Ansible 2.9, ansible-base 2.10, or ansible-core 2.11 or newer
|
||||
options:
|
||||
|
|
@ -185,7 +189,7 @@ RETURN = """
|
|||
import re
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, cmd_runner_fmt as fmt
|
||||
from ansible_collections.community.general.plugins.module_utils.module_helper import ModuleHelper
|
||||
from ansible_collections.community.general.plugins.module_utils.module_helper import ModuleHelper, ModuleHelperException
|
||||
|
||||
|
||||
class AnsibleGalaxyInstall(ModuleHelper):
|
||||
|
|
@ -226,11 +230,17 @@ class AnsibleGalaxyInstall(ModuleHelper):
|
|||
version=fmt.as_bool("--version"),
|
||||
name=fmt.as_list(),
|
||||
)
|
||||
force_lang = "en_US.UTF-8"
|
||||
check_rc = True
|
||||
|
||||
def _make_runner(self, lang):
|
||||
return CmdRunner(self.module, command=self.command, arg_formats=self.command_args_formats, force_lang=lang, check_rc=True)
|
||||
|
||||
def _get_ansible_galaxy_version(self):
|
||||
class UnsupportedLocale(ModuleHelperException):
|
||||
pass
|
||||
|
||||
def process(rc, out, err):
|
||||
if (rc != 0 and "unsupported locale setting" in err) or (rc == 0 and "cannot change locale" in err):
|
||||
raise UnsupportedLocale(msg=err)
|
||||
line = out.splitlines()[0]
|
||||
match = self._RE_GALAXY_VERSION.match(line)
|
||||
if not match:
|
||||
|
|
@ -239,12 +249,18 @@ class AnsibleGalaxyInstall(ModuleHelper):
|
|||
version = tuple(int(x) for x in version.split('.')[:3])
|
||||
return version
|
||||
|
||||
with self.runner("version", check_rc=True, output_process=process) as ctx:
|
||||
return ctx.run(version=True)
|
||||
try:
|
||||
runner = self._make_runner("C.UTF-8")
|
||||
with runner("version", check_rc=False, output_process=process) as ctx:
|
||||
return runner, ctx.run(version=True)
|
||||
except UnsupportedLocale as e:
|
||||
runner = self._make_runner("en_US.UTF-8")
|
||||
with runner("version", check_rc=True, output_process=process) as ctx:
|
||||
return runner, ctx.run(version=True)
|
||||
|
||||
def __init_module__(self):
|
||||
self.runner = CmdRunner(self.module, command=self.command, arg_formats=self.command_args_formats, force_lang=self.force_lang)
|
||||
self.ansible_version = self._get_ansible_galaxy_version()
|
||||
# self.runner = CmdRunner(self.module, command=self.command, arg_formats=self.command_args_formats, force_lang=self.force_lang)
|
||||
self.runner, self.ansible_version = self._get_ansible_galaxy_version()
|
||||
if self.ansible_version < (2, 11) and not self.vars.ack_min_ansiblecore211:
|
||||
self.module.deprecate(
|
||||
"Support for Ansible 2.9 and ansible-base 2.10 is being deprecated. "
|
||||
|
|
@ -339,11 +355,12 @@ class AnsibleGalaxyInstall(ModuleHelper):
|
|||
self._setup210plus()
|
||||
with self.runner("type galaxy_cmd force no_deps dest requirements_file name", output_process=process) as ctx:
|
||||
ctx.run(galaxy_cmd="install")
|
||||
if self.verbosity > 2:
|
||||
self.vars.set("run_info", ctx.run_info)
|
||||
|
||||
|
||||
def main():
|
||||
galaxy = AnsibleGalaxyInstall()
|
||||
galaxy.run()
|
||||
AnsibleGalaxyInstall.execute()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ author:
|
|||
- Christian Berendt (@berendt)
|
||||
- Ralf Hertel (@n0trax)
|
||||
- Robin Roth (@robinro)
|
||||
short_description: Enables/disables a module of the Apache2 webserver.
|
||||
short_description: Enables/disables a module of the Apache2 webserver
|
||||
description:
|
||||
- Enables or disables a specified module of the Apache2 webserver.
|
||||
options:
|
||||
|
|
@ -49,6 +49,12 @@ options:
|
|||
- Ignore configuration checks about inconsistent module configuration. Especially for mpm_* modules.
|
||||
type: bool
|
||||
default: false
|
||||
warn_mpm_absent:
|
||||
description:
|
||||
- Control the behavior of the warning process for MPM modules.
|
||||
type: bool
|
||||
default: true
|
||||
version_added: 6.3.0
|
||||
requirements: ["a2enmod","a2dismod"]
|
||||
notes:
|
||||
- This does not work on RedHat-based distributions. It does work on Debian- and SuSE-based distributions.
|
||||
|
|
@ -78,6 +84,18 @@ EXAMPLES = '''
|
|||
name: mpm_worker
|
||||
ignore_configcheck: true
|
||||
|
||||
- name: Disable mpm_event, enable mpm_prefork and ignore warnings about missing mpm module
|
||||
community.general.apache2_module:
|
||||
name: "{{ item.module }}"
|
||||
state: "{{ item.state }}"
|
||||
warn_mpm_absent: false
|
||||
ignore_configcheck: true
|
||||
loop:
|
||||
- module: mpm_event
|
||||
state: absent
|
||||
- module: mpm_prefork
|
||||
state: present
|
||||
|
||||
- name: Enable dump_io module, which is identified as dumpio_module inside apache2
|
||||
community.general.apache2_module:
|
||||
state: present
|
||||
|
|
@ -140,10 +158,11 @@ def _module_is_enabled(module):
|
|||
error_msg = "Error executing %s: %s" % (control_binary, stderr)
|
||||
if module.params['ignore_configcheck']:
|
||||
if 'AH00534' in stderr and 'mpm_' in module.params['name']:
|
||||
module.warnings.append(
|
||||
"No MPM module loaded! apache2 reload AND other module actions"
|
||||
" will fail if no MPM module is loaded immediately."
|
||||
)
|
||||
if module.params['warn_mpm_absent']:
|
||||
module.warnings.append(
|
||||
"No MPM module loaded! apache2 reload AND other module actions"
|
||||
" will fail if no MPM module is loaded immediately."
|
||||
)
|
||||
else:
|
||||
module.warnings.append(error_msg)
|
||||
return False
|
||||
|
|
@ -249,6 +268,7 @@ def main():
|
|||
force=dict(type='bool', default=False),
|
||||
state=dict(default='present', choices=['absent', 'present']),
|
||||
ignore_configcheck=dict(type='bool', default=False),
|
||||
warn_mpm_absent=dict(type='bool', default=True),
|
||||
),
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ __metaclass__ = type
|
|||
DOCUMENTATION = '''
|
||||
---
|
||||
module: apt_rpm
|
||||
short_description: apt_rpm package manager
|
||||
short_description: APT-RPM package manager
|
||||
description:
|
||||
- Manages packages with I(apt-rpm). Both low-level (I(rpm)) and high-level (I(apt-get)) package manager binaries required.
|
||||
options:
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ __metaclass__ = type
|
|||
DOCUMENTATION = r'''
|
||||
---
|
||||
module: beadm
|
||||
short_description: Manage ZFS boot environments on FreeBSD/Solaris/illumos systems.
|
||||
short_description: Manage ZFS boot environments on FreeBSD/Solaris/illumos systems
|
||||
description:
|
||||
- Create, delete or activate ZFS boot environments.
|
||||
- Mount and unmount ZFS boot environments.
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ __metaclass__ = type
|
|||
DOCUMENTATION = '''
|
||||
---
|
||||
module: circonus_annotation
|
||||
short_description: create an annotation in circonus
|
||||
short_description: Create an annotation in circonus
|
||||
description:
|
||||
- Create an annotation event with a given category, title and description. Optionally start, end or durations can be provided
|
||||
author: "Nick Harring (@NickatEpic)"
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
module: clc_aa_policy
|
||||
short_description: Create or Delete Anti Affinity Policies at CenturyLink Cloud.
|
||||
short_description: Create or Delete Anti Affinity Policies at CenturyLink Cloud
|
||||
description:
|
||||
- An Ansible module to Create or Delete Anti Affinity Policies at CenturyLink Cloud.
|
||||
options:
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
module: clc_alert_policy
|
||||
short_description: Create or Delete Alert Policies at CenturyLink Cloud.
|
||||
short_description: Create or Delete Alert Policies at CenturyLink Cloud
|
||||
description:
|
||||
- An Ansible module to Create or Delete Alert Policies at CenturyLink Cloud.
|
||||
options:
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
module: clc_blueprint_package
|
||||
short_description: deploys a blue print package on a set of servers in CenturyLink Cloud.
|
||||
short_description: Deploys a blue print package on a set of servers in CenturyLink Cloud
|
||||
description:
|
||||
- An Ansible module to deploy blue print package on a set of servers in CenturyLink Cloud.
|
||||
options:
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
module: clc_loadbalancer
|
||||
short_description: Create, Delete shared loadbalancers in CenturyLink Cloud.
|
||||
short_description: Create, Delete shared loadbalancers in CenturyLink Cloud
|
||||
description:
|
||||
- An Ansible module to Create, Delete shared loadbalancers in CenturyLink Cloud.
|
||||
options:
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
module: clc_modify_server
|
||||
short_description: modify servers in CenturyLink Cloud.
|
||||
short_description: Modify servers in CenturyLink Cloud
|
||||
description:
|
||||
- An Ansible module to modify servers in CenturyLink Cloud.
|
||||
options:
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
module: clc_publicip
|
||||
short_description: Add and Delete public ips on servers in CenturyLink Cloud.
|
||||
short_description: Add and Delete public ips on servers in CenturyLink Cloud
|
||||
description:
|
||||
- An Ansible module to add or delete public ip addresses on an existing server or servers in CenturyLink Cloud.
|
||||
options:
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
module: clc_server
|
||||
short_description: Create, Delete, Start and Stop servers in CenturyLink Cloud.
|
||||
short_description: Create, Delete, Start and Stop servers in CenturyLink Cloud
|
||||
description:
|
||||
- An Ansible module to Create, Delete, Start and Stop servers in CenturyLink Cloud.
|
||||
options:
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
module: clc_server_snapshot
|
||||
short_description: Create, Delete and Restore server snapshots in CenturyLink Cloud.
|
||||
short_description: Create, Delete and Restore server snapshots in CenturyLink Cloud
|
||||
description:
|
||||
- An Ansible module to Create, Delete and Restore server snapshots in CenturyLink Cloud.
|
||||
options:
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ __metaclass__ = type
|
|||
DOCUMENTATION = '''
|
||||
---
|
||||
module: cloud_init_data_facts
|
||||
short_description: Retrieve facts of cloud-init.
|
||||
short_description: Retrieve facts of cloud-init
|
||||
description:
|
||||
- Gathers facts by reading the status.json and result.json of cloud-init.
|
||||
author: René Moser (@resmo)
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
module: consul
|
||||
short_description: "Add, modify & delete services within a consul cluster."
|
||||
short_description: Add, modify & delete services within a consul cluster
|
||||
description:
|
||||
- Registers services and checks for an agent with a consul cluster.
|
||||
A service is some process running on the agent node that should be advertised by
|
||||
|
|
@ -23,7 +23,7 @@ description:
|
|||
by Consul from the Service name and id respectively by appending 'service:'
|
||||
Node level checks require a I(check_name) and optionally a I(check_id)."
|
||||
- Currently, there is no complete way to retrieve the script, interval or ttl
|
||||
metadata for a registered check. Without this metadata it is not possible to
|
||||
metadata for a registered check. Without this metadata it is not possible to
|
||||
tell if the data supplied with ansible represents a change to a check. As a
|
||||
result this does not attempt to determine changes and will always report a
|
||||
changed occurred. An API method is planned to supply this metadata so at that
|
||||
|
|
@ -37,7 +37,7 @@ options:
|
|||
state:
|
||||
type: str
|
||||
description:
|
||||
- register or deregister the consul service, defaults to present
|
||||
- Register or deregister the consul service, defaults to present.
|
||||
default: present
|
||||
choices: ['present', 'absent']
|
||||
service_name:
|
||||
|
|
@ -45,30 +45,30 @@ options:
|
|||
description:
|
||||
- Unique name for the service on a node, must be unique per node,
|
||||
required if registering a service. May be omitted if registering
|
||||
a node level check
|
||||
a node level check.
|
||||
service_id:
|
||||
type: str
|
||||
description:
|
||||
- the ID for the service, must be unique per node. If I(state=absent),
|
||||
- The ID for the service, must be unique per node. If I(state=absent),
|
||||
defaults to the service name if supplied.
|
||||
host:
|
||||
type: str
|
||||
description:
|
||||
- host of the consul agent defaults to localhost
|
||||
- Host of the consul agent defaults to localhost.
|
||||
default: localhost
|
||||
port:
|
||||
type: int
|
||||
description:
|
||||
- the port on which the consul agent is running
|
||||
- The port on which the consul agent is running.
|
||||
default: 8500
|
||||
scheme:
|
||||
type: str
|
||||
description:
|
||||
- the protocol scheme on which the consul agent is running
|
||||
- The protocol scheme on which the consul agent is running.
|
||||
default: http
|
||||
validate_certs:
|
||||
description:
|
||||
- whether to verify the TLS certificate of the consul agent
|
||||
- Whether to verify the TLS certificate of the consul agent.
|
||||
type: bool
|
||||
default: true
|
||||
notes:
|
||||
|
|
@ -78,12 +78,12 @@ options:
|
|||
service_port:
|
||||
type: int
|
||||
description:
|
||||
- the port on which the service is listening. Can optionally be supplied for
|
||||
registration of a service, i.e. if I(service_name) or I(service_id) is set
|
||||
- The port on which the service is listening. Can optionally be supplied for
|
||||
registration of a service, i.e. if I(service_name) or I(service_id) is set.
|
||||
service_address:
|
||||
type: str
|
||||
description:
|
||||
- the address to advertise that the service will be listening on.
|
||||
- The address to advertise that the service will be listening on.
|
||||
This value will be passed as the I(address) parameter to Consul's
|
||||
C(/v1/agent/service/register) API method, so refer to the Consul API
|
||||
documentation for further details.
|
||||
|
|
@ -91,63 +91,68 @@ options:
|
|||
type: list
|
||||
elements: str
|
||||
description:
|
||||
- tags that will be attached to the service registration.
|
||||
- Tags that will be attached to the service registration.
|
||||
script:
|
||||
type: str
|
||||
description:
|
||||
- the script/command that will be run periodically to check the health
|
||||
of the service. Scripts require I(interval) and vice versa.
|
||||
- The script/command that will be run periodically to check the health of the service.
|
||||
- Requires I(interval) to be provided.
|
||||
interval:
|
||||
type: str
|
||||
description:
|
||||
- the interval at which the service check will be run. This is a number
|
||||
with a s or m suffix to signify the units of seconds or minutes e.g
|
||||
C(15s) or C(1m). If no suffix is supplied, m will be used by default e.g.
|
||||
C(1) will be C(1m). Required if the I(script) parameter is specified.
|
||||
- The interval at which the service check will be run.
|
||||
This is a number with a C(s) or C(m) suffix to signify the units of seconds or minutes e.g C(15s) or C(1m).
|
||||
If no suffix is supplied C(s) will be used by default, e.g. C(10) will be C(10s).
|
||||
- Required if one of the parameters I(script), I(http), or I(tcp) is specified.
|
||||
check_id:
|
||||
type: str
|
||||
description:
|
||||
- an ID for the service check. If I(state=absent), defaults to
|
||||
- An ID for the service check. If I(state=absent), defaults to
|
||||
I(check_name). Ignored if part of a service definition.
|
||||
check_name:
|
||||
type: str
|
||||
description:
|
||||
- a name for the service check. Required if standalone, ignored if
|
||||
- Name for the service check. Required if standalone, ignored if
|
||||
part of service definition.
|
||||
ttl:
|
||||
type: str
|
||||
description:
|
||||
- checks can be registered with a ttl instead of a I(script) and I(interval)
|
||||
- Checks can be registered with a ttl instead of a I(script) and I(interval)
|
||||
this means that the service will check in with the agent before the
|
||||
ttl expires. If it doesn't the check will be considered failed.
|
||||
Required if registering a check and the script an interval are missing
|
||||
Similar to the interval this is a number with a s or m suffix to
|
||||
signify the units of seconds or minutes e.g C(15s) or C(1m). If no suffix
|
||||
is supplied, C(m) will be used by default e.g. C(1) will be C(1m)
|
||||
Similar to the interval this is a number with a C(s) or C(m) suffix to
|
||||
signify the units of seconds or minutes e.g C(15s) or C(1m).
|
||||
If no suffix is supplied C(s) will be used by default, e.g. C(10) will be C(10s).
|
||||
tcp:
|
||||
type: str
|
||||
description:
|
||||
- Checks can be registered with a TCP port. This means that consul
|
||||
will check if the connection attempt to that port is successful (that is, the port is currently accepting connections).
|
||||
The format is C(host:port), for example C(localhost:80).
|
||||
I(interval) must also be provided with this option.
|
||||
- Requires I(interval) to be provided.
|
||||
version_added: '1.3.0'
|
||||
http:
|
||||
type: str
|
||||
description:
|
||||
- checks can be registered with an HTTP endpoint. This means that consul
|
||||
- Checks can be registered with an HTTP endpoint. This means that consul
|
||||
will check that the http endpoint returns a successful HTTP status.
|
||||
I(interval) must also be provided with this option.
|
||||
- Requires I(interval) to be provided.
|
||||
timeout:
|
||||
type: str
|
||||
description:
|
||||
- A custom HTTP check timeout. The consul default is 10 seconds.
|
||||
Similar to the interval this is a number with a C(s) or C(m) suffix to
|
||||
signify the units of seconds or minutes, e.g. C(15s) or C(1m).
|
||||
If no suffix is supplied C(s) will be used by default, e.g. C(10) will be C(10s).
|
||||
token:
|
||||
type: str
|
||||
description:
|
||||
- the token key identifying an ACL rule set. May be required to register services.
|
||||
- The token key identifying an ACL rule set. May be required to register services.
|
||||
ack_params_state_absent:
|
||||
type: bool
|
||||
description:
|
||||
- Disable deprecation warning when using parameters incompatible with I(state=absent).
|
||||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
|
|
@ -583,7 +588,8 @@ def main():
|
|||
http=dict(type='str'),
|
||||
timeout=dict(type='str'),
|
||||
tags=dict(type='list', elements='str'),
|
||||
token=dict(no_log=True)
|
||||
token=dict(no_log=True),
|
||||
ack_params_state_absent=dict(type='bool'),
|
||||
),
|
||||
required_if=[
|
||||
('state', 'present', ['service_name']),
|
||||
|
|
@ -591,14 +597,29 @@ def main():
|
|||
],
|
||||
supports_check_mode=False,
|
||||
)
|
||||
p = module.params
|
||||
|
||||
test_dependencies(module)
|
||||
if p['state'] == 'absent' and any(p[x] for x in ['script', 'ttl', 'tcp', 'http', 'interval']) and not p['ack_params_state_absent']:
|
||||
module.deprecate(
|
||||
"The use of parameters 'script', 'ttl', 'tcp', 'http', 'interval' along with 'state=absent' is deprecated. "
|
||||
"In community.general 8.0.0 their use will become an error. "
|
||||
"To suppress this deprecation notice, set parameter ack_params_state_absent=true.",
|
||||
version="8.0.0",
|
||||
collection_name="community.general",
|
||||
)
|
||||
# When reaching c.g 8.0.0:
|
||||
# - Replace the deprecation with a fail_json(), remove the "ack_params_state_absent" condition from the "if"
|
||||
# - Add mutually_exclusive for ('script', 'ttl', 'tcp', 'http'), then remove that validation from parse_check()
|
||||
# - Add required_by {'script': 'interval', 'http': 'interval', 'tcp': 'interval'}, then remove checks for 'interval' in ConsulCheck.__init__()
|
||||
# - Deprecate the parameter ack_params_state_absent
|
||||
|
||||
try:
|
||||
register_with_consul(module)
|
||||
except SystemExit:
|
||||
raise
|
||||
except ConnectionError as e:
|
||||
module.fail_json(msg='Could not connect to consul agent at %s:%s, error was %s' % (
|
||||
module.params['host'], module.params['port'], str(e)))
|
||||
module.fail_json(msg='Could not connect to consul agent at %s:%s, error was %s' % (p['host'], p['port'], str(e)))
|
||||
except Exception as e:
|
||||
module.fail_json(msg=str(e))
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ __metaclass__ = type
|
|||
DOCUMENTATION = '''
|
||||
---
|
||||
module: cpanm
|
||||
short_description: Manages Perl library dependencies.
|
||||
short_description: Manages Perl library dependencies
|
||||
description:
|
||||
- Manage Perl library dependencies using cpanminus.
|
||||
options:
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ DOCUMENTATION = '''
|
|||
---
|
||||
module: deploy_helper
|
||||
author: "Ramon de la Fuente (@ramondelafuente)"
|
||||
short_description: Manages some of the steps common in deploying projects.
|
||||
short_description: Manages some of the steps common in deploying projects
|
||||
description:
|
||||
- The Deploy Helper manages some of the steps common in deploying software.
|
||||
It creates a folder structure, manages a symlink for the current release
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ __metaclass__ = type
|
|||
DOCUMENTATION = '''
|
||||
---
|
||||
module: dimensiondata_vlan
|
||||
short_description: Manage a VLAN in a Cloud Control network domain.
|
||||
short_description: Manage a VLAN in a Cloud Control network domain
|
||||
extends_documentation_fragment:
|
||||
- community.general.dimensiondata
|
||||
- community.general.dimensiondata_wait
|
||||
|
|
|
|||
|
|
@ -230,18 +230,11 @@ dnsimple_record_info:
|
|||
type: str
|
||||
'''
|
||||
|
||||
import traceback
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible.module_utils.basic import missing_required_lib
|
||||
from ansible_collections.community.general.plugins.module_utils import deps
|
||||
|
||||
try:
|
||||
with deps.declare("requests"):
|
||||
from requests import Request, Session
|
||||
except ImportError:
|
||||
HAS_REQUESTS = False
|
||||
REQUESTS_IMPORT_ERROR = traceback.format_exc()
|
||||
else:
|
||||
HAS_REQUESTS = True
|
||||
REQUESTS_IMPORT_ERROR = None
|
||||
|
||||
|
||||
def build_url(account, key, is_sandbox):
|
||||
|
|
@ -310,10 +303,7 @@ def main():
|
|||
params['api_key'],
|
||||
params['sandbox'])
|
||||
|
||||
if not HAS_REQUESTS:
|
||||
module.exit_json(
|
||||
msg=missing_required_lib('requests'),
|
||||
exception=REQUESTS_IMPORT_ERROR)
|
||||
deps.validate(module)
|
||||
|
||||
# At minimum we need account and key
|
||||
if params['account_id'] and params['api_key']:
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ __metaclass__ = type
|
|||
DOCUMENTATION = '''
|
||||
---
|
||||
module: dnsmadeeasy
|
||||
short_description: Interface with dnsmadeeasy.com (a DNS hosting service).
|
||||
short_description: Interface with dnsmadeeasy.com (a DNS hosting service)
|
||||
description:
|
||||
- >
|
||||
Manages DNS records via the v2 REST API of the DNS Made Easy service. It handles records only; there is no manipulation of domains or
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ __metaclass__ = type
|
|||
DOCUMENTATION = '''
|
||||
---
|
||||
module: etcd3
|
||||
short_description: "Set or delete key value pairs from an etcd3 cluster"
|
||||
short_description: Set or delete key value pairs from an etcd3 cluster
|
||||
requirements:
|
||||
- etcd3
|
||||
description:
|
||||
|
|
|
|||
|
|
@ -83,75 +83,17 @@ RETURN = '''
|
|||
...
|
||||
'''
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
from ansible_collections.community.general.plugins.module_utils.module_helper import StateModuleHelper
|
||||
from ansible_collections.community.general.plugins.module_utils.gconftool2 import gconftool2_runner
|
||||
|
||||
|
||||
class GConf2Preference(object):
|
||||
def __init__(self, ansible, key, value_type, value,
|
||||
direct=False, config_source=""):
|
||||
self.ansible = ansible
|
||||
self.key = key
|
||||
self.value_type = value_type
|
||||
self.value = value
|
||||
self.config_source = config_source
|
||||
self.direct = direct
|
||||
|
||||
def value_already_set(self):
|
||||
return False
|
||||
|
||||
def call(self, call_type, fail_onerr=True):
|
||||
""" Helper function to perform gconftool-2 operations """
|
||||
config_source = []
|
||||
direct = []
|
||||
changed = False
|
||||
out = ''
|
||||
|
||||
# If the configuration source is different from the default, create
|
||||
# the argument
|
||||
if self.config_source is not None and len(self.config_source) > 0:
|
||||
config_source = ["--config-source", self.config_source]
|
||||
|
||||
# If direct is true, create the argument
|
||||
if self.direct:
|
||||
direct = ["--direct"]
|
||||
|
||||
# Execute the call
|
||||
cmd = ["gconftool-2"]
|
||||
try:
|
||||
# If the call is "get", then we don't need as many parameters and
|
||||
# we can ignore some
|
||||
if call_type == 'get':
|
||||
self.ansible.deprecate(
|
||||
msg="State 'get' is deprecated. Please use the module community.general.gconftool2_info instead",
|
||||
version="8.0.0", collection_name="community.general"
|
||||
)
|
||||
cmd.extend(["--get", self.key])
|
||||
# Otherwise, we will use all relevant parameters
|
||||
elif call_type == 'set':
|
||||
cmd.extend(direct)
|
||||
cmd.extend(config_source)
|
||||
cmd.extend(["--type", self.value_type, "--{3}".format(call_type), self.key, self.value])
|
||||
elif call_type == 'unset':
|
||||
cmd.extend(["--unset", self.key])
|
||||
|
||||
# Start external command
|
||||
rc, out, err = self.ansible.run_command(cmd)
|
||||
|
||||
if err and fail_onerr:
|
||||
self.ansible.fail_json(msg='gconftool-2 failed with '
|
||||
'error: %s' % (str(err)))
|
||||
else:
|
||||
changed = True
|
||||
|
||||
except OSError as exception:
|
||||
self.ansible.fail_json(msg='gconftool-2 failed with exception: '
|
||||
'%s' % exception)
|
||||
return changed, out.rstrip()
|
||||
|
||||
|
||||
def main():
|
||||
# Setup the Ansible module
|
||||
module = AnsibleModule(
|
||||
class GConftool(StateModuleHelper):
|
||||
change_params = 'value',
|
||||
diff_params = 'value',
|
||||
output_params = ('key', 'value_type')
|
||||
facts_params = ('key', 'value_type')
|
||||
facts_name = 'gconftool2'
|
||||
module = dict(
|
||||
argument_spec=dict(
|
||||
key=dict(type='str', required=True, no_log=False),
|
||||
value_type=dict(type='str', choices=['bool', 'float', 'int', 'string']),
|
||||
|
|
@ -160,75 +102,54 @@ def main():
|
|||
direct=dict(type='bool', default=False),
|
||||
config_source=dict(type='str'),
|
||||
),
|
||||
supports_check_mode=True
|
||||
required_if=[
|
||||
('state', 'present', ['value', 'value_type']),
|
||||
('state', 'absent', ['value']),
|
||||
('direct', True, ['config_source']),
|
||||
],
|
||||
supports_check_mode=True,
|
||||
)
|
||||
|
||||
state_values = {"present": "set", "absent": "unset", "get": "get"}
|
||||
def __init_module__(self):
|
||||
self.runner = gconftool2_runner(self.module, check_rc=True)
|
||||
if self.vars.state != "get":
|
||||
if not self.vars.direct and self.vars.config_source is not None:
|
||||
self.module.fail_json(msg='If the "config_source" is specified then "direct" must be "true"')
|
||||
|
||||
# Assign module values to dictionary values
|
||||
key = module.params['key']
|
||||
value_type = module.params['value_type']
|
||||
if module.params['value'].lower() == "true":
|
||||
value = "true"
|
||||
elif module.params['value'] == "false":
|
||||
value = "false"
|
||||
else:
|
||||
value = module.params['value']
|
||||
self.vars.set('previous_value', self._get(), fact=True)
|
||||
self.vars.set('value_type', self.vars.value_type)
|
||||
self.vars.set_meta('value', initial_value=self.vars.previous_value)
|
||||
self.vars.set('playbook_value', self.vars.value, fact=True)
|
||||
|
||||
state = state_values[module.params['state']]
|
||||
direct = module.params['direct']
|
||||
config_source = module.params['config_source']
|
||||
def _make_process(self, fail_on_err):
|
||||
def process(rc, out, err):
|
||||
if err and fail_on_err:
|
||||
self.ansible.fail_json(msg='gconftool-2 failed with error: %s' % (str(err)))
|
||||
self.vars.value = out.rstrip()
|
||||
return self.vars.value
|
||||
return process
|
||||
|
||||
# Initialize some variables for later
|
||||
change = False
|
||||
new_value = ''
|
||||
def _get(self):
|
||||
return self.runner("state key", output_process=self._make_process(False)).run(state="get")
|
||||
|
||||
if state != "get":
|
||||
if value is None or value == "":
|
||||
module.fail_json(msg='State %s requires "value" to be set'
|
||||
% str(state))
|
||||
elif value_type is None or value_type == "":
|
||||
module.fail_json(msg='State %s requires "value_type" to be set'
|
||||
% str(state))
|
||||
def state_get(self):
|
||||
self.deprecate(
|
||||
msg="State 'get' is deprecated. Please use the module community.general.gconftool2_info instead",
|
||||
version="8.0.0", collection_name="community.general"
|
||||
)
|
||||
|
||||
if direct and config_source is None:
|
||||
module.fail_json(msg='If "direct" is "true" then the ' +
|
||||
'"config_source" must be specified')
|
||||
elif not direct and config_source is not None:
|
||||
module.fail_json(msg='If the "config_source" is specified ' +
|
||||
'then "direct" must be "true"')
|
||||
def state_absent(self):
|
||||
with self.runner("state key", output_process=self._make_process(False)) as ctx:
|
||||
ctx.run()
|
||||
self.vars.set('new_value', None, fact=True)
|
||||
|
||||
# Create a gconf2 preference
|
||||
gconf_pref = GConf2Preference(module, key, value_type,
|
||||
value, direct, config_source)
|
||||
# Now we get the current value, if not found don't fail
|
||||
dummy, current_value = gconf_pref.call("get", fail_onerr=False)
|
||||
def state_present(self):
|
||||
with self.runner("direct config_source value_type state key value", output_process=self._make_process(True)) as ctx:
|
||||
self.vars.set('new_value', ctx.run(), fact=True)
|
||||
|
||||
# Check if the current value equals the value we want to set. If not, make
|
||||
# a change
|
||||
if current_value != value:
|
||||
# If check mode, we know a change would have occurred.
|
||||
if module.check_mode:
|
||||
# So we will set the change to True
|
||||
change = True
|
||||
# And set the new_value to the value that would have been set
|
||||
new_value = value
|
||||
# If not check mode make the change.
|
||||
else:
|
||||
change, new_value = gconf_pref.call(state)
|
||||
# If the value we want to set is the same as the current_value, we will
|
||||
# set the new_value to the current_value for reporting
|
||||
else:
|
||||
new_value = current_value
|
||||
|
||||
facts = dict(gconftool2={'changed': change,
|
||||
'key': key,
|
||||
'value_type': value_type,
|
||||
'new_value': new_value,
|
||||
'previous_value': current_value,
|
||||
'playbook_value': module.params['value']})
|
||||
|
||||
module.exit_json(changed=change, ansible_facts=facts)
|
||||
def main():
|
||||
GConftool.execute()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
|
|
|||
|
|
@ -65,8 +65,8 @@ class GConftoolInfo(ModuleHelper):
|
|||
self.runner = gconftool2_runner(self.module, check_rc=True)
|
||||
|
||||
def __run__(self):
|
||||
with self.runner.context(args_order=["get", "key"]) as ctx:
|
||||
rc, out, err = ctx.run(get=True)
|
||||
with self.runner.context(args_order=["state", "key"]) as ctx:
|
||||
rc, out, err = ctx.run(state="get")
|
||||
self.vars.value = None if err and not out else out.rstrip()
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -105,7 +105,7 @@ options:
|
|||
required: false
|
||||
force:
|
||||
description:
|
||||
- Force gem to install, bypassing dependency checks.
|
||||
- Force gem to (un-)install, bypassing dependency checks.
|
||||
required: false
|
||||
default: false
|
||||
type: bool
|
||||
|
|
@ -234,7 +234,9 @@ def uninstall(module):
|
|||
cmd.extend(['--version', module.params['version']])
|
||||
else:
|
||||
cmd.append('--all')
|
||||
cmd.append('--executable')
|
||||
cmd.append('--executable')
|
||||
if module.params['force']:
|
||||
cmd.append('--force')
|
||||
cmd.append(module.params['name'])
|
||||
module.run_command(cmd, environ_update=environ, check_rc=True)
|
||||
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ DOCUMENTATION = '''
|
|||
---
|
||||
module: github_deploy_key
|
||||
author: "Ali (@bincyber)"
|
||||
short_description: Manages deploy keys for GitHub repositories.
|
||||
short_description: Manages deploy keys for GitHub repositories
|
||||
description:
|
||||
- "Adds or removes deploy keys for GitHub repositories. Supports authentication using username and password,
|
||||
username and password and 2-factor authentication code (OTP), OAuth2 token, or personal access token. Admin
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
module: github_issue
|
||||
short_description: View GitHub issue.
|
||||
short_description: View GitHub issue
|
||||
description:
|
||||
- View GitHub issue for a given repository and organization.
|
||||
options:
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
module: github_key
|
||||
short_description: Manage GitHub access keys.
|
||||
short_description: Manage GitHub access keys
|
||||
description:
|
||||
- Creates, removes, or updates GitHub access keys.
|
||||
options:
|
||||
|
|
|
|||
|
|
@ -108,17 +108,8 @@ EXAMPLES = '''
|
|||
'''
|
||||
|
||||
RETURN = '''
|
||||
create_release:
|
||||
description:
|
||||
- Version of the created release
|
||||
- "For Ansible version 2.5 and later, if specified release version already exists, then State is unchanged"
|
||||
- "For Ansible versions prior to 2.5, if specified release version already exists, then State is skipped"
|
||||
type: str
|
||||
returned: success
|
||||
sample: 1.1.0
|
||||
|
||||
latest_release:
|
||||
description: Version of the latest release
|
||||
tag:
|
||||
description: Version of the created/latest release.
|
||||
type: str
|
||||
returned: success
|
||||
sample: 1.1.0
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
module: gitlab_deploy_key
|
||||
short_description: Manages GitLab project deploy keys.
|
||||
short_description: Manages GitLab project deploy keys
|
||||
description:
|
||||
- Adds, updates and removes project deploy keys
|
||||
author:
|
||||
|
|
@ -151,6 +151,7 @@ class GitLabDeployKey(object):
|
|||
changed = True
|
||||
else:
|
||||
changed, deploy_key = self.update_deploy_key(self.deploy_key_object, {
|
||||
'title': key_title,
|
||||
'can_push': options['can_push']})
|
||||
|
||||
self.deploy_key_object = deploy_key
|
||||
|
|
|
|||
|
|
@ -165,7 +165,7 @@ from ansible.module_utils.six import string_types
|
|||
from ansible.module_utils.six import integer_types
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.gitlab import (
|
||||
auth_argument_spec, gitlab_authentication, ensure_gitlab_package
|
||||
auth_argument_spec, gitlab_authentication, ensure_gitlab_package, filter_returned_variables
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -296,11 +296,7 @@ def native_python_main(this_gitlab, purge, requested_variables, state, module):
|
|||
before = [x.attributes for x in gitlab_keys]
|
||||
|
||||
gitlab_keys = this_gitlab.list_all_group_variables()
|
||||
existing_variables = [x.attributes for x in gitlab_keys]
|
||||
|
||||
# preprocessing:filter out and enrich before compare
|
||||
for item in existing_variables:
|
||||
item.pop('group_id')
|
||||
existing_variables = filter_returned_variables(gitlab_keys)
|
||||
|
||||
for item in requested_variables:
|
||||
item['key'] = item.pop('name')
|
||||
|
|
@ -331,9 +327,7 @@ def native_python_main(this_gitlab, purge, requested_variables, state, module):
|
|||
if purge:
|
||||
# refetch and filter
|
||||
gitlab_keys = this_gitlab.list_all_group_variables()
|
||||
existing_variables = [x.attributes for x in gitlab_keys]
|
||||
for item in existing_variables:
|
||||
item.pop('group_id')
|
||||
existing_variables = filter_returned_variables(gitlab_keys)
|
||||
|
||||
remove = [x for x in existing_variables if x not in requested_variables]
|
||||
for item in remove:
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue