mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-10-24 04:54:00 -07:00
Compare commits
204 commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1a81760d47 | ||
|
|
c3db6343e5 |
||
|
|
e87c2c9eb4 | ||
|
|
90a1743acf |
||
|
|
eb6ef5ae2e |
||
|
|
89dd500159 |
||
|
|
9bab144d06 |
||
|
|
31eddc0ffe |
||
|
|
bc64c4035e |
||
|
|
3ed65a0a37 |
||
|
|
93008fd41c |
||
|
|
1462ed0b4a |
||
|
|
139fcdba88 |
||
|
|
e9b8692025 |
||
|
|
3d8049190c |
||
|
|
54025a2efc |
||
|
|
94015c2096 |
||
|
|
b1a711633b |
||
|
|
79d15d526a |
||
|
|
3aeaab2708 |
||
|
|
470f4e8c02 |
||
|
|
f7f79defab |
||
|
|
d4d1c847cf |
||
|
|
044c706d64 | ||
|
|
e22667b72f |
||
|
|
4fc0c9a6d8 |
||
|
|
93650233e4 |
||
|
|
a41d1851a5 |
||
|
|
bd4da7f2c0 |
||
|
|
bf853f6f35 |
||
|
|
5dc34829c4 |
||
|
|
061b861211 |
||
|
|
e4ce977079 |
||
|
|
4776ee20e3 |
||
|
|
fdfcd15960 |
||
|
|
b934e06569 |
||
|
|
5589bcb659 |
||
|
|
41af1c3693 |
||
|
|
3802d54922 |
||
|
|
ff4aff0bef |
||
|
|
86b19a2bf4 |
||
|
|
1fc53eea22 |
||
|
|
3ecbadf694 |
||
|
|
2181c2b090 |
||
|
|
0c295d4f61 |
||
|
|
db451bf68a |
||
|
|
91095240f4 |
||
|
|
e92908b66e |
||
|
|
ef09ea519c |
||
|
|
611e024550 |
||
|
|
94b4034fd2 |
||
|
|
1bfdee0830 |
||
|
|
9350954aa3 |
||
|
|
2f05cd3330 |
||
|
|
27906ca76b |
||
|
|
278b0607f5 |
||
|
|
00d1160b56 |
||
|
|
4266163c13 |
||
|
|
ec7f885e2f |
||
|
|
4f71c9384e |
||
|
|
53e5f51e57 |
||
|
|
83ff925417 |
||
|
|
8ff611089b |
||
|
|
4def87bc53 |
||
|
|
3d70bfa1e4 |
||
|
|
554ec94110 |
||
|
|
ab4f96105c |
||
|
|
d8cf32e6c4 |
||
|
|
ead9524dc3 |
||
|
|
5d5d403415 | ||
|
|
d483fd9482 | ||
|
|
8da9cf3276 | ||
|
|
3c5c3a0113 |
||
|
|
7def57a71f |
||
|
|
e5930aabcb |
||
|
|
48bfba435f |
||
|
|
9740b76f3c |
||
|
|
24cf561135 |
||
|
|
61324ed9eb |
||
|
|
99336ba5fe |
||
|
|
9d99ccef2d |
||
|
|
a146eb3118 |
||
|
|
c7f7bd6050 |
||
|
|
54099d77ff |
||
|
|
ee07d8320a |
||
|
|
0729f0c262 |
||
|
|
57cd48f3cf |
||
|
|
afd2151672 |
||
|
|
ea9b272043 |
||
|
|
60addb332d |
||
|
|
1ade62c5bc |
||
|
|
7c8cc96d8b | ||
|
|
ca177a0ceb |
||
|
|
c0e769e5f5 |
||
|
|
585dbc3171 |
||
|
|
b400491ef3 |
||
|
|
490baed566 |
||
|
|
811c4a304a |
||
|
|
c0fde76b79 |
||
|
|
16c7615b82 |
||
|
|
474364c862 |
||
|
|
1da5f7dc54 |
||
|
|
559c914e36 |
||
|
|
91cca4ae49 |
||
|
|
82a9db9738 |
||
|
|
3fd84d71b8 |
||
|
|
a17124f3c4 |
||
|
|
efc2cbf840 |
||
|
|
aa136aca4c |
||
|
|
a1ca89b058 |
||
|
|
dd70419d18 |
||
|
|
ef5ac023cf |
||
|
|
8bc5494ad5 |
||
|
|
d95a821d5b |
||
|
|
b7697fe3de |
||
|
|
16e05ab5f3 |
||
|
|
5cf7ce705a |
||
|
|
c8b8668212 |
||
|
|
2d450a5a36 |
||
|
|
e08412c345 |
||
|
|
c355f93d62 |
||
|
|
80206b5a53 | ||
|
|
e978fd4d61 | ||
|
|
6fc8492ecf | ||
|
|
95beb452a8 |
||
|
|
c10e9e2650 |
||
|
|
ac35bf4acb |
||
|
|
50b9855ace |
||
|
|
2ab26db197 |
||
|
|
5fcf5d0c8b |
||
|
|
0f0ad6b6d1 |
||
|
|
95f3109ddc |
||
|
|
6037c5d1e6 |
||
|
|
a70d9773dd |
||
|
|
bc50b48205 |
||
|
|
02e6a8608f |
||
|
|
82f4b51873 |
||
|
|
589e8fd5e1 |
||
|
|
58f74b96ef |
||
|
|
1489c080a7 |
||
|
|
6f845f61f0 |
||
|
|
c17f5ff3e8 |
||
|
|
ff21afb227 |
||
|
|
c1d6e5c3c2 |
||
|
|
377b5d4ccd | ||
|
|
f3f7b2776f | ||
|
|
df8bfad9b9 |
||
|
|
8a231e4b36 |
||
|
|
671f850069 |
||
|
|
2fa36592e4 | ||
|
|
51d704bfe3 |
||
|
|
2b0e335752 |
||
|
|
cc28cde3a2 |
||
|
|
2d616bf4d1 |
||
|
|
25d9ab8dcd |
||
|
|
9abda18071 |
||
|
|
406fa12142 |
||
|
|
caaebb38e7 |
||
|
|
2bc74f4f04 |
||
|
|
e1e89f7735 |
||
|
|
efedd0d6e2 |
||
|
|
8079aea1ee |
||
|
|
ee7fdf5f8c |
||
|
|
ced1baad63 |
||
|
|
a0d4ee4fc1 |
||
|
|
d930c8d877 |
||
|
|
352e91a389 |
||
|
|
4b7554445b |
||
|
|
3a456a645d |
||
|
|
6f4580ebd9 |
||
|
|
8d83557e52 |
||
|
|
5ebd980e26 |
||
|
|
17447d2a84 |
||
|
|
ffee01cd9c |
||
|
|
38b4e316ae |
||
|
|
b52a6f3611 |
||
|
|
2435fb3f30 |
||
|
|
d6d9f84b0a |
||
|
|
4b04e3cc32 |
||
|
|
c681249364 |
||
|
|
57a4195b0d |
||
|
|
41a23f093d |
||
|
|
0bd085714f | ||
|
|
a4be229f67 | ||
|
|
9c4487ebc5 |
||
|
|
09ea441316 |
||
|
|
fef6abc8c8 |
||
|
|
618e567377 |
||
|
|
246abffce5 | ||
|
|
076ebb4b2d |
||
|
|
4948b521a3 |
||
|
|
e9ec26ff1b |
||
|
|
72d4476813 |
||
|
|
e96bfd07b4 |
||
|
|
c6d0419460 |
||
|
|
081b4068a0 |
||
|
|
8fba9ca751 |
||
|
|
fad4c2d956 | ||
|
|
6065dd0f18 | ||
|
|
a411ff5ea8 | ||
|
|
42b245eabf | ||
|
|
9a676bb88f | ||
|
|
cd26aec2f3 | ||
|
|
e9327a0464 |
986 changed files with 15070 additions and 3505 deletions
|
|
@ -29,6 +29,7 @@ schedules:
|
|||
always: true
|
||||
branches:
|
||||
include:
|
||||
- stable-6
|
||||
- stable-5
|
||||
- cron: 0 11 * * 0
|
||||
displayName: Weekly (old stable branches)
|
||||
|
|
@ -111,19 +112,6 @@ stages:
|
|||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
- stage: Sanity_2_11
|
||||
displayName: Sanity 2.11
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Test {0}
|
||||
testFormat: 2.11/sanity/{0}
|
||||
targets:
|
||||
- test: 1
|
||||
- test: 2
|
||||
- test: 3
|
||||
- test: 4
|
||||
### Units
|
||||
- stage: Units_devel
|
||||
displayName: Units devel
|
||||
|
|
@ -175,19 +163,26 @@ stages:
|
|||
targets:
|
||||
- test: 2.6
|
||||
- test: 3.8
|
||||
- stage: Units_2_11
|
||||
displayName: Units 2.11
|
||||
|
||||
## Remote
|
||||
- stage: Remote_devel_extra_vms
|
||||
displayName: Remote devel extra VMs
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.11/units/{0}/1
|
||||
testFormat: devel/{0}
|
||||
targets:
|
||||
- test: 2.7
|
||||
- test: 3.5
|
||||
|
||||
## Remote
|
||||
- name: Alpine 3.17
|
||||
test: alpine/3.17
|
||||
# - name: Fedora 37
|
||||
# test: fedora/37
|
||||
# - name: Ubuntu 20.04
|
||||
# test: ubuntu/20.04
|
||||
- name: Ubuntu 22.04
|
||||
test: ubuntu/22.04
|
||||
groups:
|
||||
- vm
|
||||
- stage: Remote_devel
|
||||
displayName: Remote devel
|
||||
dependsOn: []
|
||||
|
|
@ -200,12 +195,12 @@ stages:
|
|||
test: macos/12.0
|
||||
- name: RHEL 7.9
|
||||
test: rhel/7.9
|
||||
- name: RHEL 9.0
|
||||
test: rhel/9.0
|
||||
- name: FreeBSD 12.3
|
||||
test: freebsd/12.3
|
||||
- name: RHEL 9.1
|
||||
test: rhel/9.1
|
||||
- name: FreeBSD 13.1
|
||||
test: freebsd/13.1
|
||||
- name: FreeBSD 12.4
|
||||
test: freebsd/12.4
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
|
|
@ -220,8 +215,8 @@ stages:
|
|||
targets:
|
||||
- name: RHEL 9.0
|
||||
test: rhel/9.0
|
||||
- name: FreeBSD 13.1
|
||||
test: freebsd/13.1
|
||||
- name: FreeBSD 12.3
|
||||
test: freebsd/12.3
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
|
|
@ -260,22 +255,6 @@ stages:
|
|||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Remote_2_11
|
||||
displayName: Remote 2.11
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.11/{0}
|
||||
targets:
|
||||
- name: RHEL 7.9
|
||||
test: rhel/7.9
|
||||
- name: RHEL 8.3
|
||||
test: rhel/8.3
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
|
||||
### Docker
|
||||
- stage: Docker_devel
|
||||
|
|
@ -288,8 +267,8 @@ stages:
|
|||
targets:
|
||||
- name: CentOS 7
|
||||
test: centos7
|
||||
- name: Fedora 36
|
||||
test: fedora36
|
||||
- name: Fedora 37
|
||||
test: fedora37
|
||||
- name: openSUSE 15
|
||||
test: opensuse15
|
||||
- name: Ubuntu 20.04
|
||||
|
|
@ -310,8 +289,8 @@ stages:
|
|||
parameters:
|
||||
testFormat: 2.14/linux/{0}
|
||||
targets:
|
||||
- name: Ubuntu 20.04
|
||||
test: ubuntu2004
|
||||
- name: Fedora 36
|
||||
test: fedora36
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
|
|
@ -352,24 +331,6 @@ stages:
|
|||
- 1
|
||||
- 2
|
||||
- 3
|
||||
- stage: Docker_2_11
|
||||
displayName: Docker 2.11
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
testFormat: 2.11/linux/{0}
|
||||
targets:
|
||||
- name: Fedora 32
|
||||
test: fedora32
|
||||
- name: Fedora 33
|
||||
test: fedora33
|
||||
- name: Alpine 3
|
||||
test: alpine3
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
- 3
|
||||
|
||||
### Community Docker
|
||||
- stage: Docker_community_devel
|
||||
|
|
@ -385,7 +346,7 @@ stages:
|
|||
- name: ArchLinux
|
||||
test: archlinux/3.10
|
||||
- name: CentOS Stream 8
|
||||
test: centos-stream8/3.8
|
||||
test: centos-stream8/3.9
|
||||
groups:
|
||||
- 1
|
||||
- 2
|
||||
|
|
@ -433,46 +394,32 @@ stages:
|
|||
testFormat: 2.12/generic/{0}/1
|
||||
targets:
|
||||
- test: 3.8
|
||||
- stage: Generic_2_11
|
||||
displayName: Generic 2.11
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- template: templates/matrix.yml
|
||||
parameters:
|
||||
nameFormat: Python {0}
|
||||
testFormat: 2.11/generic/{0}/1
|
||||
targets:
|
||||
- test: 2.7
|
||||
- test: 3.5
|
||||
|
||||
- stage: Summary
|
||||
condition: succeededOrFailed()
|
||||
dependsOn:
|
||||
- Sanity_devel
|
||||
- Sanity_2_11
|
||||
- Sanity_2_12
|
||||
- Sanity_2_13
|
||||
- Sanity_2_14
|
||||
- Units_devel
|
||||
- Units_2_11
|
||||
- Units_2_12
|
||||
- Units_2_13
|
||||
- Units_2_14
|
||||
- Remote_devel_extra_vms
|
||||
- Remote_devel
|
||||
- Remote_2_11
|
||||
- Remote_2_12
|
||||
- Remote_2_13
|
||||
- Remote_2_14
|
||||
- Docker_devel
|
||||
- Docker_2_11
|
||||
- Docker_2_12
|
||||
- Docker_2_13
|
||||
- Docker_2_14
|
||||
- Docker_community_devel
|
||||
- Generic_devel
|
||||
- Generic_2_11
|
||||
- Generic_2_12
|
||||
- Generic_2_13
|
||||
- Generic_2_14
|
||||
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
||||
# - Generic_devel
|
||||
# - Generic_2_12
|
||||
# - Generic_2_13
|
||||
# - Generic_2_14
|
||||
jobs:
|
||||
- template: templates/coverage.yml
|
||||
|
|
|
|||
32
.github/BOTMETA.yml
vendored
32
.github/BOTMETA.yml
vendored
|
|
@ -265,6 +265,8 @@ files:
|
|||
maintainers: delineaKrehl tylerezimmerman
|
||||
$module_utils/:
|
||||
labels: module_utils
|
||||
$module_utils/deps.py:
|
||||
maintainers: russoz
|
||||
$module_utils/gconftool2.py:
|
||||
labels: gconftool2
|
||||
maintainers: russoz
|
||||
|
|
@ -279,9 +281,14 @@ files:
|
|||
maintainers: $team_huawei
|
||||
$module_utils/identity/keycloak/keycloak.py:
|
||||
maintainers: $team_keycloak
|
||||
$module_utils/identity/keycloak/keycloak_clientsecret.py:
|
||||
maintainers: $team_keycloak fynncfchen johncant
|
||||
$module_utils/ipa.py:
|
||||
labels: ipa
|
||||
maintainers: $team_ipa
|
||||
$module_utils/jenkins.py:
|
||||
labels: jenkins
|
||||
maintainers: russoz
|
||||
$module_utils/manageiq.py:
|
||||
labels: manageiq
|
||||
maintainers: $team_manageiq
|
||||
|
|
@ -302,6 +309,9 @@ files:
|
|||
$module_utils/pipx.py:
|
||||
labels: pipx
|
||||
maintainers: russoz
|
||||
$module_utils/puppet.py:
|
||||
labels: puppet
|
||||
maintainers: russoz
|
||||
$module_utils/pure.py:
|
||||
labels: pure pure_storage
|
||||
maintainers: $team_purestorage
|
||||
|
|
@ -313,6 +323,8 @@ files:
|
|||
$module_utils/scaleway.py:
|
||||
labels: cloud scaleway
|
||||
maintainers: $team_scaleway
|
||||
$module_utils/ssh.py:
|
||||
maintainers: russoz
|
||||
$module_utils/storage/hpe3par/hpe3par.py:
|
||||
maintainers: farhan7500 gautamphegde
|
||||
$module_utils/utm_utils.py:
|
||||
|
|
@ -428,7 +440,7 @@ files:
|
|||
labels: datadog_event
|
||||
maintainers: n0ts
|
||||
$modules/datadog_monitor.py:
|
||||
maintainers: skornehl
|
||||
ignore: skornehl
|
||||
$modules/dconf.py:
|
||||
maintainers: azaghal
|
||||
$modules/deploy_helper.py:
|
||||
|
|
@ -665,6 +677,10 @@ files:
|
|||
maintainers: Gaetan2907
|
||||
$modules/keycloak_clientscope.py:
|
||||
maintainers: Gaetan2907
|
||||
$modules/keycloak_clientsecret_info.py:
|
||||
maintainers: fynncfchen johncant
|
||||
$modules/keycloak_clientsecret_regenerate.py:
|
||||
maintainers: fynncfchen johncant
|
||||
$modules/keycloak_group.py:
|
||||
maintainers: adamgoossens
|
||||
$modules/keycloak_identity_provider.py:
|
||||
|
|
@ -814,6 +830,10 @@ files:
|
|||
maintainers: shane-walker xcambar
|
||||
$modules/nsupdate.py:
|
||||
maintainers: nerzhul
|
||||
$modules/ocapi_command.py:
|
||||
maintainers: $team_wdc
|
||||
$modules/ocapi_info.py:
|
||||
maintainers: $team_wdc
|
||||
$modules/oci_vcn.py:
|
||||
maintainers: $team_oracle rohitChaware
|
||||
$modules/odbc.py:
|
||||
|
|
@ -822,7 +842,8 @@ files:
|
|||
maintainers: marc-sensenich
|
||||
$modules/ohai.py:
|
||||
labels: ohai
|
||||
maintainers: $team_ansible_core mpdehaan
|
||||
maintainers: $team_ansible_core
|
||||
ignore: mpdehaan
|
||||
$modules/omapi_host.py:
|
||||
maintainers: amasolov nerzhul
|
||||
$modules/one_:
|
||||
|
|
@ -1018,7 +1039,7 @@ files:
|
|||
maintainers: dagwieers
|
||||
$modules/redfish_:
|
||||
ignore: jose-delarosa
|
||||
maintainers: $team_redfish
|
||||
maintainers: $team_redfish TSKushal
|
||||
$modules/redhat_subscription.py:
|
||||
labels: redhat_subscription
|
||||
maintainers: barnabycourt alikins kahowell
|
||||
|
|
@ -1072,7 +1093,8 @@ files:
|
|||
$modules/sapcar_extract.py:
|
||||
maintainers: RainerLeber
|
||||
$modules/say.py:
|
||||
maintainers: $team_ansible_core mpdehaan
|
||||
maintainers: $team_ansible_core
|
||||
ignore: mpdehaan
|
||||
$modules/scaleway_:
|
||||
maintainers: $team_scaleway
|
||||
$modules/scaleway_compute_private_network.py:
|
||||
|
|
@ -1372,7 +1394,7 @@ macros:
|
|||
team_opennebula: ilicmilan meerkampdvv rsmontero xorel nilsding
|
||||
team_oracle: manojmeda mross22 nalsaber
|
||||
team_purestorage: bannaych dnix101 genegr lionmax opslounge raekins sdodsley sile16
|
||||
team_redfish: mraineri tomasg2012 xmadsen renxulei rajeevkallur bhavya06
|
||||
team_redfish: mraineri tomasg2012 xmadsen renxulei rajeevkallur bhavya06 jyundt
|
||||
team_rhn: FlossWare alikins barnabycourt vritant
|
||||
team_scaleway: remyleone abarbare
|
||||
team_solaris: bcoca fishman jasperla jpdasma mator scathatheworm troy2914 xen0l
|
||||
|
|
|
|||
193
.github/workflows/ansible-test.yml
vendored
Normal file
193
.github/workflows/ansible-test.yml
vendored
Normal file
|
|
@ -0,0 +1,193 @@
|
|||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
# For the comprehensive list of the inputs supported by the ansible-community/ansible-test-gh-action GitHub Action, see
|
||||
# https://github.com/marketplace/actions/ansible-test
|
||||
|
||||
name: EOL CI
|
||||
on:
|
||||
# Run EOL CI against all pushes (direct commits, also merged PRs), Pull Requests
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- stable-*
|
||||
pull_request:
|
||||
# Run EOL CI once per day (at 10:00 UTC)
|
||||
schedule:
|
||||
- cron: '0 10 * * *'
|
||||
|
||||
concurrency:
|
||||
# Make sure there is at most one active run per PR, but do not cancel any non-PR runs
|
||||
group: ${{ github.workflow }}-${{ (github.head_ref && github.event.number) || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
sanity:
|
||||
name: EOL Sanity (Ⓐ${{ matrix.ansible }})
|
||||
strategy:
|
||||
matrix:
|
||||
ansible:
|
||||
- '2.11'
|
||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
||||
# image for these stable branches. The list of branches where this is necessary will
|
||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
||||
# for the latest list.
|
||||
runs-on: >-
|
||||
${{ contains(fromJson(
|
||||
'["2.9", "2.10", "2.11"]'
|
||||
), matrix.ansible) && 'ubuntu-20.04' || 'ubuntu-latest' }}
|
||||
steps:
|
||||
- name: Perform sanity testing
|
||||
uses: felixfontein/ansible-test-gh-action@main
|
||||
with:
|
||||
ansible-core-github-repository-slug: felixfontein/ansible
|
||||
ansible-core-version: stable-${{ matrix.ansible }}
|
||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
||||
pull-request-change-detection: 'true'
|
||||
testing-type: sanity
|
||||
|
||||
units:
|
||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
||||
# image for these stable branches. The list of branches where this is necessary will
|
||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
||||
# for the latest list.
|
||||
runs-on: >-
|
||||
${{ contains(fromJson(
|
||||
'["2.9", "2.10", "2.11"]'
|
||||
), matrix.ansible) && 'ubuntu-20.04' || 'ubuntu-latest' }}
|
||||
name: EOL Units (Ⓐ${{ matrix.ansible }}+py${{ matrix.python }})
|
||||
strategy:
|
||||
# As soon as the first unit test fails, cancel the others to free up the CI queue
|
||||
fail-fast: true
|
||||
matrix:
|
||||
ansible:
|
||||
- ''
|
||||
python:
|
||||
- ''
|
||||
exclude:
|
||||
- ansible: ''
|
||||
include:
|
||||
- ansible: '2.11'
|
||||
python: '2.7'
|
||||
- ansible: '2.11'
|
||||
python: '3.5'
|
||||
|
||||
steps:
|
||||
- name: >-
|
||||
Perform unit testing against
|
||||
Ansible version ${{ matrix.ansible }}
|
||||
uses: felixfontein/ansible-test-gh-action@main
|
||||
with:
|
||||
ansible-core-github-repository-slug: felixfontein/ansible
|
||||
ansible-core-version: stable-${{ matrix.ansible }}
|
||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
||||
pre-test-cmd: >-
|
||||
mkdir -p ../../ansible
|
||||
;
|
||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
|
||||
pull-request-change-detection: 'true'
|
||||
target-python-version: ${{ matrix.python }}
|
||||
testing-type: units
|
||||
|
||||
integration:
|
||||
# Ansible-test on various stable branches does not yet work well with cgroups v2.
|
||||
# Since ubuntu-latest now uses Ubuntu 22.04, we need to fall back to the ubuntu-20.04
|
||||
# image for these stable branches. The list of branches where this is necessary will
|
||||
# shrink over time, check out https://github.com/ansible-collections/news-for-maintainers/issues/28
|
||||
# for the latest list.
|
||||
runs-on: >-
|
||||
${{ contains(fromJson(
|
||||
'["2.9", "2.10", "2.11"]'
|
||||
), matrix.ansible) && 'ubuntu-20.04' || 'ubuntu-latest' }}
|
||||
name: EOL I (Ⓐ${{ matrix.ansible }}+${{ matrix.docker }}+py${{ matrix.python }}:${{ matrix.target }})
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
ansible:
|
||||
- ''
|
||||
docker:
|
||||
- ''
|
||||
python:
|
||||
- ''
|
||||
target:
|
||||
- ''
|
||||
exclude:
|
||||
- ansible: ''
|
||||
include:
|
||||
# 2.11
|
||||
- ansible: '2.11'
|
||||
docker: fedora32
|
||||
python: ''
|
||||
target: azp/posix/1/
|
||||
- ansible: '2.11'
|
||||
docker: fedora32
|
||||
python: ''
|
||||
target: azp/posix/2/
|
||||
- ansible: '2.11'
|
||||
docker: fedora32
|
||||
python: ''
|
||||
target: azp/posix/3/
|
||||
- ansible: '2.11'
|
||||
docker: fedora33
|
||||
python: ''
|
||||
target: azp/posix/1/
|
||||
- ansible: '2.11'
|
||||
docker: fedora33
|
||||
python: ''
|
||||
target: azp/posix/2/
|
||||
- ansible: '2.11'
|
||||
docker: fedora33
|
||||
python: ''
|
||||
target: azp/posix/3/
|
||||
- ansible: '2.11'
|
||||
docker: alpine3
|
||||
python: ''
|
||||
target: azp/posix/1/
|
||||
- ansible: '2.11'
|
||||
docker: alpine3
|
||||
python: ''
|
||||
target: azp/posix/2/
|
||||
- ansible: '2.11'
|
||||
docker: alpine3
|
||||
python: ''
|
||||
target: azp/posix/3/
|
||||
# Right now all generic tests are disabled. Uncomment when at least one of them is re-enabled.
|
||||
# - ansible: '2.11'
|
||||
# docker: default
|
||||
# python: '2.7'
|
||||
# target: azp/generic/1/
|
||||
# - ansible: '2.11'
|
||||
# docker: default
|
||||
# python: '3.5'
|
||||
# target: azp/generic/2/
|
||||
|
||||
steps:
|
||||
- name: >-
|
||||
Perform integration testing against
|
||||
Ansible version ${{ matrix.ansible }}
|
||||
under Python ${{ matrix.python }}
|
||||
uses: felixfontein/ansible-test-gh-action@main
|
||||
with:
|
||||
ansible-core-github-repository-slug: felixfontein/ansible
|
||||
ansible-core-version: stable-${{ matrix.ansible }}
|
||||
coverage: ${{ github.event_name == 'schedule' && 'always' || 'never' }}
|
||||
docker-image: ${{ matrix.docker }}
|
||||
integration-continue-on-error: 'false'
|
||||
integration-diff: 'false'
|
||||
integration-retry-on-error: 'true'
|
||||
pre-test-cmd: >-
|
||||
mkdir -p ../../ansible
|
||||
;
|
||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/ansible.posix.git ../../ansible/posix
|
||||
;
|
||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.crypto.git ../../community/crypto
|
||||
;
|
||||
git clone --depth=1 --single-branch https://github.com/ansible-collections/community.internal_test_tools.git ../../community/internal_test_tools
|
||||
pull-request-change-detection: 'true'
|
||||
target: ${{ matrix.target }}
|
||||
target-python-version: ${{ matrix.python }}
|
||||
testing-type: integration
|
||||
93
.github/workflows/docs-pr.yml
vendored
93
.github/workflows/docs-pr.yml
vendored
|
|
@ -1,93 +0,0 @@
|
|||
---
|
||||
# Copyright (c) Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
name: Collection Docs
|
||||
concurrency:
|
||||
group: docs-${{ github.head_ref }}
|
||||
cancel-in-progress: true
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, reopened, closed]
|
||||
paths-ignore:
|
||||
- '.azure-pipelines/**'
|
||||
- 'changelogs/**'
|
||||
- 'meta/**'
|
||||
- 'tests/**'
|
||||
|
||||
jobs:
|
||||
build-docs:
|
||||
permissions:
|
||||
contents: read
|
||||
name: Build Ansible Docs
|
||||
uses: ansible-community/github-docs-build/.github/workflows/_shared-docs-build-pr.yml@main
|
||||
with:
|
||||
init-fail-on-error: true
|
||||
provide-link-targets: |
|
||||
ansible_collections.ansible.builtin.dict2items_filter
|
||||
ansible_collections.ansible.builtin.items_lookup
|
||||
ansible_collections.ansible.builtin.path_join_filter
|
||||
ansible_collections.community.kubevirt.kubevirt_cdi_upload_module
|
||||
ansible_collections.community.kubevirt.kubevirt_inventory
|
||||
ansible_collections.community.kubevirt.kubevirt_preset_module
|
||||
ansible_collections.community.kubevirt.kubevirt_pvc_module
|
||||
ansible_collections.community.kubevirt.kubevirt_rs_module
|
||||
ansible_collections.community.kubevirt.kubevirt_template_module
|
||||
ansible_collections.community.kubevirt.kubevirt_vm_module
|
||||
ansible_collections.infoblox.nios_modules.nios_a_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_aaaa_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_cname_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_dns_view_module
|
||||
ansible_collections.infoblox.nios_modules.nios_fixed_address_module
|
||||
ansible_collections.infoblox.nios_modules.nios_host_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_lookup_lookup
|
||||
ansible_collections.infoblox.nios_modules.nios_member_module
|
||||
ansible_collections.infoblox.nios_modules.nios_mx_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_naptr_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_network_module
|
||||
ansible_collections.infoblox.nios_modules.nios_network_view_module
|
||||
ansible_collections.infoblox.nios_modules.nios_next_ip_lookup
|
||||
ansible_collections.infoblox.nios_modules.nios_next_network_lookup
|
||||
ansible_collections.infoblox.nios_modules.nios_nsgroup_module
|
||||
ansible_collections.infoblox.nios_modules.nios_ptr_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_srv_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_txt_record_module
|
||||
ansible_collections.infoblox.nios_modules.nios_zone_module
|
||||
|
||||
comment:
|
||||
permissions:
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-docs
|
||||
name: PR comments
|
||||
steps:
|
||||
- name: PR comment
|
||||
uses: ansible-community/github-docs-build/actions/ansible-docs-build-comment@main
|
||||
with:
|
||||
body-includes: '## Docs Build'
|
||||
reactions: heart
|
||||
action: ${{ needs.build-docs.outputs.changed != 'true' && 'remove' || '' }}
|
||||
on-closed-body: |
|
||||
## Docs Build 📝
|
||||
|
||||
This PR is closed and any previously published docsite has been unpublished.
|
||||
on-merged-body: |
|
||||
## Docs Build 📝
|
||||
|
||||
Thank you for contribution!✨
|
||||
|
||||
This PR has been merged and your docs changes will be incorporated when they are next published.
|
||||
body: |
|
||||
## Docs Build 📝
|
||||
|
||||
Thank you for contribution!✨
|
||||
|
||||
The docsite for **this PR** is available for download as an artifact from this run:
|
||||
${{ needs.build-docs.outputs.artifact-url }}
|
||||
|
||||
File changes:
|
||||
|
||||
${{ needs.build-docs.outputs.diff-files-rendered }}
|
||||
|
||||
${{ needs.build-docs.outputs.diff-rendered }}
|
||||
310
CHANGELOG.rst
310
CHANGELOG.rst
|
|
@ -6,13 +6,274 @@ Community General Release Notes
|
|||
|
||||
This changelog describes changes after version 5.0.0.
|
||||
|
||||
v6.0.0-a1
|
||||
=========
|
||||
v6.4.0
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
This is a pre-release for the upcoming 6.0.0 major release. The main objective of this pre-release is to make it possible to test the large stuctural changes by flattening the directory structure. See the corresponding entry in the changelog for details.
|
||||
Regular feature and bugfix release.
|
||||
|
||||
Minor Changes
|
||||
-------------
|
||||
|
||||
- dnsimple - set custom User-Agent for API requests to DNSimple (https://github.com/ansible-collections/community.general/pull/5927).
|
||||
- flatpak_remote - add new boolean option ``enabled``. It controls, whether the remote is enabled or not (https://github.com/ansible-collections/community.general/pull/5926).
|
||||
- gitlab_project - add ``releases_access_level``, ``environments_access_level``, ``feature_flags_access_level``, ``infrastructure_access_level``, ``monitor_access_level``, and ``security_and_compliance_access_level`` options (https://github.com/ansible-collections/community.general/pull/5986).
|
||||
- jc filter plugin - added the ability to use parser plugins (https://github.com/ansible-collections/community.general/pull/6043).
|
||||
- keycloak_group - add new optional module parameter ``parents`` to properly handle keycloak subgroups (https://github.com/ansible-collections/community.general/pull/5814).
|
||||
- keycloak_user_federation - make ``org.keycloak.storage.ldap.mappers.LDAPStorageMapper`` the default value for mappers ``providerType`` (https://github.com/ansible-collections/community.general/pull/5863).
|
||||
- ldap modules - add ``xorder_discovery`` option (https://github.com/ansible-collections/community.general/issues/6045, https://github.com/ansible-collections/community.general/pull/6109).
|
||||
- lxd_container - add diff and check mode (https://github.com/ansible-collections/community.general/pull/5866).
|
||||
- mattermost, rocketchat, slack - replace missing default favicon with docs.ansible.com favicon (https://github.com/ansible-collections/community.general/pull/5928).
|
||||
- modprobe - add ``persistent`` option (https://github.com/ansible-collections/community.general/issues/4028, https://github.com/ansible-collections/community.general/pull/542).
|
||||
- osx_defaults - include stderr in error messages (https://github.com/ansible-collections/community.general/pull/6011).
|
||||
- proxmox - suppress urllib3 ``InsecureRequestWarnings`` when ``validate_certs`` option is ``false`` (https://github.com/ansible-collections/community.general/pull/5931).
|
||||
- redfish_command - adding ``EnableSecureBoot`` functionality (https://github.com/ansible-collections/community.general/pull/5899).
|
||||
- redfish_command - adding ``VerifyBiosAttributes`` functionality (https://github.com/ansible-collections/community.general/pull/5900).
|
||||
- sefcontext - add support for path substitutions (https://github.com/ansible-collections/community.general/issues/1193).
|
||||
|
||||
Deprecated Features
|
||||
-------------------
|
||||
|
||||
- gitlab_runner - the option ``access_level`` will lose its default value in community.general 8.0.0. From that version on, you have set this option to ``ref_protected`` explicitly, if you want to have a protected runner (https://github.com/ansible-collections/community.general/issues/5925).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- cartesian and flattened lookup plugins - adjust to parameter deprecation in ansible-core 2.14's ``listify_lookup_plugin_terms`` helper function (https://github.com/ansible-collections/community.general/pull/6074).
|
||||
- cloudflare_dns - fixed the idempotency for SRV DNS records (https://github.com/ansible-collections/community.general/pull/5972).
|
||||
- cloudflare_dns - fixed the possiblity of setting a root-level SRV DNS record (https://github.com/ansible-collections/community.general/pull/5972).
|
||||
- github_webhook - fix always changed state when no secret is provided (https://github.com/ansible-collections/community.general/pull/5994).
|
||||
- jenkins_plugin - fix error due to undefined variable when updates file is not downloaded (https://github.com/ansible-collections/community.general/pull/6100).
|
||||
- keycloak_client - fix accidental replacement of value for attribute ``saml.signing.private.key`` with ``no_log`` in wrong contexts (https://github.com/ansible-collections/community.general/pull/5934).
|
||||
- lxd_* modules, lxd inventory plugin - fix TLS/SSL certificate validation problems by using the correct purpose when creating the TLS context (https://github.com/ansible-collections/community.general/issues/5616, https://github.com/ansible-collections/community.general/pull/6034).
|
||||
- nmcli - fix change handling of values specified as an integer 0 (https://github.com/ansible-collections/community.general/pull/5431).
|
||||
- nmcli - fix failure to handle WIFI settings when connection type not specified (https://github.com/ansible-collections/community.general/pull/5431).
|
||||
- nmcli - fix improper detection of changes to ``wifi.wake-on-wlan`` (https://github.com/ansible-collections/community.general/pull/5431).
|
||||
- nmcli - order is significant for lists of addresses (https://github.com/ansible-collections/community.general/pull/6048).
|
||||
- onepassword lookup plugin - Changed to ignore errors from "op account get" calls. Previously, errors would prevent auto-signin code from executing (https://github.com/ansible-collections/community.general/pull/5942).
|
||||
- terraform and timezone - slight refactoring to avoid linter reporting potentially undefined variables (https://github.com/ansible-collections/community.general/pull/5933).
|
||||
- various plugins and modules - remove unnecessary imports (https://github.com/ansible-collections/community.general/pull/5940).
|
||||
- yarn - fix ``global=true`` to check for the configured global folder instead of assuming the default (https://github.com/ansible-collections/community.general/pull/5829)
|
||||
- yarn - fix ``state=absent`` not working with ``global=true`` when the package does not include a binary (https://github.com/ansible-collections/community.general/pull/5829)
|
||||
- yarn - fix ``state=latest`` not working with ``global=true`` (https://github.com/ansible-collections/community.general/issues/5712).
|
||||
- zfs_delegate_admin - zfs allow output can now be parsed when uids/gids are not known to the host system (https://github.com/ansible-collections/community.general/pull/5943).
|
||||
- zypper - make package managing work on readonly filesystem of openSUSE MicroOS (https://github.com/ansible-collections/community.general/pull/5615).
|
||||
|
||||
v6.3.0
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix and feature release.
|
||||
|
||||
Minor Changes
|
||||
-------------
|
||||
|
||||
- apache2_module - add module argument ``warn_mpm_absent`` to control whether warning are raised in some edge cases (https://github.com/ansible-collections/community.general/pull/5793).
|
||||
- bitwarden lookup plugin - can now retrieve secrets from custom fields (https://github.com/ansible-collections/community.general/pull/5694).
|
||||
- bitwarden lookup plugin - implement filtering results by ``collection_id`` parameter (https://github.com/ansible-collections/community.general/issues/5849).
|
||||
- dig lookup plugin - support CAA record type (https://github.com/ansible-collections/community.general/pull/5913).
|
||||
- gitlab_project - add ``builds_access_level``, ``container_registry_access_level`` and ``forking_access_level`` options (https://github.com/ansible-collections/community.general/pull/5706).
|
||||
- gitlab_runner - add new boolean option ``access_level_on_creation``. It controls, whether the value of ``access_level`` is used for runner registration or not. The option ``access_level`` has been ignored on registration so far and was only used on updates (https://github.com/ansible-collections/community.general/issues/5907, https://github.com/ansible-collections/community.general/pull/5908).
|
||||
- ilo_redfish_utils module utils - change implementation of DNS Server IP and NTP Server IP update (https://github.com/ansible-collections/community.general/pull/5804).
|
||||
- ipa_group - allow to add and remove external users with the ``external_user`` option (https://github.com/ansible-collections/community.general/pull/5897).
|
||||
- iptables_state - minor refactoring within the module (https://github.com/ansible-collections/community.general/pull/5844).
|
||||
- one_vm - add a new ``updateconf`` option which implements the ``one.vm.updateconf`` API call (https://github.com/ansible-collections/community.general/pull/5812).
|
||||
- opkg - refactored module to use ``CmdRunner`` for executing ``opkg`` (https://github.com/ansible-collections/community.general/pull/5718).
|
||||
- redhat_subscription - adds ``token`` parameter for subscription-manager authentication using Red Hat API token (https://github.com/ansible-collections/community.general/pull/5725).
|
||||
- snap - minor refactor when executing module (https://github.com/ansible-collections/community.general/pull/5773).
|
||||
- snap_alias - refactored module to use ``CmdRunner`` to execute ``snap`` (https://github.com/ansible-collections/community.general/pull/5486).
|
||||
- sudoers - add ``setenv`` parameters to support passing environment variables via sudo. (https://github.com/ansible-collections/community.general/pull/5883)
|
||||
|
||||
Breaking Changes / Porting Guide
|
||||
--------------------------------
|
||||
|
||||
- ModuleHelper module utils - when the module sets output variables named ``msg``, ``exception``, ``output``, ``vars``, or ``changed``, the actual output will prefix those names with ``_`` (underscore symbol) only when they clash with output variables generated by ModuleHelper itself, which only occurs when handling exceptions. Please note that this breaking change does not require a new major release since before this release, it was not possible to add such variables to the output `due to a bug <https://github.com/ansible-collections/community.general/pull/5755>`__ (https://github.com/ansible-collections/community.general/pull/5765).
|
||||
|
||||
Deprecated Features
|
||||
-------------------
|
||||
|
||||
- consul - deprecate using parameters unused for ``state=absent`` (https://github.com/ansible-collections/community.general/pull/5772).
|
||||
- gitlab_runner - the default of the new option ``access_level_on_creation`` will change from ``false`` to ``true`` in community.general 7.0.0. This will cause ``access_level`` to be used during runner registration as well, and not only during updates (https://github.com/ansible-collections/community.general/pull/5908).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- ModuleHelper - fix bug when adjusting the name of reserved output variables (https://github.com/ansible-collections/community.general/pull/5755).
|
||||
- alternatives - support subcommands on Fedora 37, which uses ``follower`` instead of ``slave`` (https://github.com/ansible-collections/community.general/pull/5794).
|
||||
- bitwarden lookup plugin - clarify what to do, if the bitwarden vault is not unlocked (https://github.com/ansible-collections/community.general/pull/5811).
|
||||
- dig lookup plugin - correctly handle DNSKEY record type's ``algorithm`` field (https://github.com/ansible-collections/community.general/pull/5914).
|
||||
- gem - fix force parameter not being passed to gem command when uninstalling (https://github.com/ansible-collections/community.general/pull/5822).
|
||||
- gem - fix hang due to interactive prompt for confirmation on specific version uninstall (https://github.com/ansible-collections/community.general/pull/5751).
|
||||
- gitlab_deploy_key - also update ``title`` and not just ``can_push`` (https://github.com/ansible-collections/community.general/pull/5888).
|
||||
- keycloak_user_federation - fixes federation creation issue. When a new federation was created and at the same time a default / standard mapper was also changed / updated the creation process failed as a bad None set variable led to a bad malformed url request (https://github.com/ansible-collections/community.general/pull/5750).
|
||||
- keycloak_user_federation - fixes idempotency detection issues. In some cases the module could fail to properly detect already existing user federations because of a buggy seemingly superflous extra query parameter (https://github.com/ansible-collections/community.general/pull/5732).
|
||||
- loganalytics callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- logdna callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- logstash callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- nsupdate - fix zone lookup. The SOA record for an existing zone is returned as an answer RR and not as an authority RR (https://github.com/ansible-collections/community.general/issues/5817, https://github.com/ansible-collections/community.general/pull/5818).
|
||||
- proxmox_disk - fixed issue with read timeout on import action (https://github.com/ansible-collections/community.general/pull/5803).
|
||||
- redfish_utils - removed basic auth HTTP header when performing a GET on the service root resource and when performing a POST to the session collection (https://github.com/ansible-collections/community.general/issues/5886).
|
||||
- splunk callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- sumologic callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- syslog_json callback plugin - adjust type of callback to ``notification``, it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- terraform - fix ``current`` workspace never getting appended to the ``all`` key in the ``workspace_ctf`` object (https://github.com/ansible-collections/community.general/pull/5735).
|
||||
- terraform - fix ``terraform init`` failure when there are multiple workspaces on the remote backend and when ``default`` workspace is missing by setting ``TF_WORKSPACE`` environmental variable to the value of ``workspace`` when used (https://github.com/ansible-collections/community.general/pull/5735).
|
||||
- terraform module - disable ANSI escape sequences during validation phase (https://github.com/ansible-collections/community.general/pull/5843).
|
||||
- xml - fixed a bug where empty ``children`` list would not be set (https://github.com/ansible-collections/community.general/pull/5808).
|
||||
|
||||
New Modules
|
||||
-----------
|
||||
|
||||
- ocapi_command - Manages Out-Of-Band controllers using Open Composable API (OCAPI)
|
||||
- ocapi_info - Manages Out-Of-Band controllers using Open Composable API (OCAPI)
|
||||
|
||||
v6.2.0
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix and feature release.
|
||||
|
||||
Minor Changes
|
||||
-------------
|
||||
|
||||
- opkg - allow installing a package in a certain version (https://github.com/ansible-collections/community.general/pull/5688).
|
||||
- proxmox - added new module parameter ``tags`` for use with PVE 7+ (https://github.com/ansible-collections/community.general/pull/5714).
|
||||
- puppet - refactored module to use ``CmdRunner`` for executing ``puppet`` (https://github.com/ansible-collections/community.general/pull/5612).
|
||||
- redhat_subscription - add a ``server_proxy_scheme`` parameter to configure the scheme for the proxy server (https://github.com/ansible-collections/community.general/pull/5662).
|
||||
- ssh_config - refactor code to module util to fix sanity check (https://github.com/ansible-collections/community.general/pull/5720).
|
||||
- sudoers - adds ``host`` parameter for setting hostname restrictions in sudoers rules (https://github.com/ansible-collections/community.general/issues/5702).
|
||||
|
||||
Deprecated Features
|
||||
-------------------
|
||||
|
||||
- manageiq_policies - deprecate ``state=list`` in favour of using ``community.general.manageiq_policies_info`` (https://github.com/ansible-collections/community.general/pull/5721).
|
||||
- rax - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cbs - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cbs_attachments - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cdb - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cdb_database - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cdb_user - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_clb - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_clb_nodes - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_clb_ssl - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_dns - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_dns_record - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_facts - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_files - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_files_objects - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_identity - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_keypair - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_meta - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_alarm - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_check - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_entity - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_notification - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_notification_plan - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_network - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_queue - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_scaling_group - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_scaling_policy - module relies on deprecates library ``pyrax``. Unless maintainers step up to work on the module, it will be marked as deprecated in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- ansible_galaxy_install - set default to raise exception if command's return code is different from zero (https://github.com/ansible-collections/community.general/pull/5680).
|
||||
- ansible_galaxy_install - try ``C.UTF-8`` and then fall back to ``en_US.UTF-8`` before failing (https://github.com/ansible-collections/community.general/pull/5680).
|
||||
- gitlab_group_variables - fix dropping variables accidentally when GitLab introduced new properties (https://github.com/ansible-collections/community.general/pull/5667).
|
||||
- gitlab_project_variables - fix dropping variables accidentally when GitLab introduced new properties (https://github.com/ansible-collections/community.general/pull/5667).
|
||||
- lxc_container - fix the arguments of the lxc command which broke the creation and cloning of containers (https://github.com/ansible-collections/community.general/issues/5578).
|
||||
- opkg - fix issue that ``force=reinstall`` would not reinstall an existing package (https://github.com/ansible-collections/community.general/pull/5705).
|
||||
- proxmox_disk - fixed possible issues with redundant ``vmid`` parameter (https://github.com/ansible-collections/community.general/issues/5492, https://github.com/ansible-collections/community.general/pull/5672).
|
||||
- proxmox_nic - fixed possible issues with redundant ``vmid`` parameter (https://github.com/ansible-collections/community.general/issues/5492, https://github.com/ansible-collections/community.general/pull/5672).
|
||||
- unixy callback plugin - fix typo introduced when updating to use Ansible's configuration manager for handling options (https://github.com/ansible-collections/community.general/issues/5600).
|
||||
|
||||
v6.1.0
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Regular bugfix and feature release.
|
||||
|
||||
Minor Changes
|
||||
-------------
|
||||
|
||||
- cmd_runner module utils - ``cmd_runner_fmt.as_bool()`` can now take an extra parameter to format when value is false (https://github.com/ansible-collections/community.general/pull/5647).
|
||||
- gconftool2 - refactor using ``ModuleHelper`` and ``CmdRunner`` (https://github.com/ansible-collections/community.general/pull/5545).
|
||||
- java_certs - add more detailed error output when extracting certificate from PKCS12 fails (https://github.com/ansible-collections/community.general/pull/5550).
|
||||
- jenkins_plugin - refactor code to module util to fix sanity check (https://github.com/ansible-collections/community.general/pull/5565).
|
||||
- lxd_project - refactored code out to module utils to clear sanity check (https://github.com/ansible-collections/community.general/pull/5549).
|
||||
- nmap inventory plugin - add new options ``udp_scan``, ``icmp_timestamp``, and ``dns_resolve`` for different types of scans (https://github.com/ansible-collections/community.general/pull/5566).
|
||||
- rax_scaling_group - refactored out code to the ``rax`` module utils to clear the sanity check (https://github.com/ansible-collections/community.general/pull/5563).
|
||||
- redfish_command - add ``PerformRequestedOperations`` command to perform any operations necessary to continue the update flow (https://github.com/ansible-collections/community.general/issues/4276).
|
||||
- redfish_command - add ``update_apply_time`` to ``SimpleUpdate`` command (https://github.com/ansible-collections/community.general/issues/3910).
|
||||
- redfish_command - add ``update_status`` to output of ``SimpleUpdate`` command to allow a user monitor the update in progress (https://github.com/ansible-collections/community.general/issues/4276).
|
||||
- redfish_info - add ``GetUpdateStatus`` command to check the progress of a previous update request (https://github.com/ansible-collections/community.general/issues/4276).
|
||||
- redfish_utils module utils - added PUT (``put_request()``) functionality (https://github.com/ansible-collections/community.general/pull/5490).
|
||||
- slack - add option ``prepend_hash`` which allows to control whether a ``#`` is prepended to ``channel_id``. The current behavior (value ``auto``) is to prepend ``#`` unless some specific prefixes are found. That list of prefixes is incomplete, and there does not seem to exist a documented condition on when exactly ``#`` must not be prepended. We recommend to explicitly set ``prepend_hash=always`` or ``prepend_hash=never`` to avoid any ambiguity (https://github.com/ansible-collections/community.general/pull/5629).
|
||||
- spotinst_aws_elastigroup - add ``elements`` attribute when missing in ``list`` parameters (https://github.com/ansible-collections/community.general/pull/5553).
|
||||
- ssh_config - add ``host_key_algorithms`` option (https://github.com/ansible-collections/community.general/pull/5605).
|
||||
- udm_share - added ``elements`` attribute to ``list`` type parameters (https://github.com/ansible-collections/community.general/pull/5557).
|
||||
- udm_user - add ``elements`` attribute when missing in ``list`` parameters (https://github.com/ansible-collections/community.general/pull/5559).
|
||||
|
||||
Deprecated Features
|
||||
-------------------
|
||||
|
||||
- The ``sap`` modules ``sapcar_extract``, ``sap_task_list_execute``, and ``hana_query``, will be removed from this collection in community.general 7.0.0 and replaced with redirects to ``community.sap_libs``. If you want to continue using these modules, make sure to also install ``community.sap_libs`` (it is part of the Ansible package) (https://github.com/ansible-collections/community.general/pull/5614).
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- chroot connection plugin - add ``inventory_hostname`` to vars under ``remote_addr``. This is needed for compatibility with ansible-core 2.13 (https://github.com/ansible-collections/community.general/pull/5570).
|
||||
- cmd_runner module utils - fixed bug when handling default cases in ``cmd_runner_fmt.as_map()`` (https://github.com/ansible-collections/community.general/pull/5538).
|
||||
- cmd_runner module utils - formatting arguments ``cmd_runner_fmt.as_fixed()`` was expecting an non-existing argument (https://github.com/ansible-collections/community.general/pull/5538).
|
||||
- keycloak_client_rolemapping - calculate ``proposed`` and ``after`` return values properly (https://github.com/ansible-collections/community.general/pull/5619).
|
||||
- keycloak_client_rolemapping - remove only listed mappings with ``state=absent`` (https://github.com/ansible-collections/community.general/pull/5619).
|
||||
- proxmox inventory plugin - fix bug while templating when using templates for the ``url``, ``user``, ``password``, ``token_id``, or ``token_secret`` options (https://github.com/ansible-collections/community.general/pull/5640).
|
||||
- proxmox inventory plugin - handle tags delimited by semicolon instead of comma, which happens from Proxmox 7.3 on (https://github.com/ansible-collections/community.general/pull/5602).
|
||||
- redhat_subscription - do not ignore ``consumer_name`` and other variables if ``activationkey`` is specified (https://github.com/ansible-collections/community.general/issues/3486, https://github.com/ansible-collections/community.general/pull/5627).
|
||||
- redhat_subscription - do not pass arguments to ``subscription-manager register`` for things already configured; now a specified ``rhsm_baseurl`` is properly set for subscription-manager (https://github.com/ansible-collections/community.general/pull/5583).
|
||||
- unixy callback plugin - fix plugin to work with ansible-core 2.14 by using Ansible's configuration manager for handling options (https://github.com/ansible-collections/community.general/issues/5600).
|
||||
- vdo - now uses ``yaml.safe_load()`` to parse command output instead of the deprecated ``yaml.load()`` which is potentially unsafe. Using ``yaml.load()`` without explicitely setting a ``Loader=`` is also an error in pyYAML 6.0 (https://github.com/ansible-collections/community.general/pull/5632).
|
||||
- vmadm - fix for index out of range error in ``get_vm_uuid`` (https://github.com/ansible-collections/community.general/pull/5628).
|
||||
|
||||
New Modules
|
||||
-----------
|
||||
|
||||
- gitlab_project_badge - Manage project badges on GitLab Server
|
||||
- keycloak_clientsecret_info - Retrieve client secret via Keycloak API
|
||||
- keycloak_clientsecret_regenerate - Regenerate Keycloak client secret via Keycloak API
|
||||
|
||||
v6.0.1
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
Bugfix release for Ansible 7.0.0.
|
||||
|
||||
Bugfixes
|
||||
--------
|
||||
|
||||
- dependent lookup plugin - avoid warning on deprecated parameter for ``Templar.template()`` (https://github.com/ansible-collections/community.general/pull/5543).
|
||||
- jenkins_build - fix the logical flaw when deleting a Jenkins build (https://github.com/ansible-collections/community.general/pull/5514).
|
||||
- one_vm - avoid splitting labels that are ``None`` (https://github.com/ansible-collections/community.general/pull/5489).
|
||||
- onepassword_raw - add missing parameter to plugin documentation (https://github.com/ansible-collections/community.general/issues/5506).
|
||||
- proxmox_disk - avoid duplicate ``vmid`` reference (https://github.com/ansible-collections/community.general/issues/5492, https://github.com/ansible-collections/community.general/pull/5493).
|
||||
|
||||
v6.0.0
|
||||
======
|
||||
|
||||
Release Summary
|
||||
---------------
|
||||
|
||||
New major release of community.general with lots of bugfixes, new features, some removed deprecated features, and some other breaking changes. Please check the coresponding sections of the changelog for more details.
|
||||
|
||||
Major Changes
|
||||
-------------
|
||||
|
|
@ -33,6 +294,7 @@ Minor Changes
|
|||
- alternatives - add ``state=absent`` to be able to remove an alternative (https://github.com/ansible-collections/community.general/pull/4654).
|
||||
- alternatives - add ``subcommands`` parameter (https://github.com/ansible-collections/community.general/pull/4654).
|
||||
- ansible_galaxy_install - minor refactoring using latest ``ModuleHelper`` updates (https://github.com/ansible-collections/community.general/pull/4752).
|
||||
- ansible_galaxy_install - refactored module to use ``CmdRunner`` to execute ``ansible-galaxy`` (https://github.com/ansible-collections/community.general/pull/5477).
|
||||
- apk - add ``world`` parameter for supporting a custom world file (https://github.com/ansible-collections/community.general/pull/4976).
|
||||
- bitwarden lookup plugin - add option ``search`` to search for other attributes than name (https://github.com/ansible-collections/community.general/pull/5297).
|
||||
- cartesian lookup plugin - start using Ansible's configuration manager to parse options (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
|
|
@ -41,6 +303,7 @@ Minor Changes
|
|||
- consul - adds ``ttl`` parameter for session (https://github.com/ansible-collections/community.general/pull/4996).
|
||||
- consul - minor refactoring (https://github.com/ansible-collections/community.general/pull/5367).
|
||||
- consul_session - adds ``token`` parameter for session (https://github.com/ansible-collections/community.general/pull/5193).
|
||||
- cpanm - refactored module to use ``CmdRunner`` to execute ``cpanm`` (https://github.com/ansible-collections/community.general/pull/5485).
|
||||
- cpanm - using ``do_raise()`` to raise exceptions in ``ModuleHelper`` derived modules (https://github.com/ansible-collections/community.general/pull/4674).
|
||||
- credstash lookup plugin - start using Ansible's configuration manager to parse options (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
- dependent lookup plugin - start using Ansible's configuration manager to parse options (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
|
|
@ -65,9 +328,11 @@ Minor Changes
|
|||
- gitlab_user - minor refactor when checking for installed dependency (https://github.com/ansible-collections/community.general/pull/5259).
|
||||
- hiera lookup plugin - start using Ansible's configuration manager to parse options. The Hiera executable and config file can now also be passed as lookup parameters (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
- homebrew, homebrew_tap - added Homebrew on Linux path to defaults (https://github.com/ansible-collections/community.general/pull/5241).
|
||||
- hponcfg - refactored module to use ``CmdRunner`` to execute ``hponcfg`` (https://github.com/ansible-collections/community.general/pull/5483).
|
||||
- keycloak_* modules - add ``http_agent`` parameter with default value ``Ansible`` (https://github.com/ansible-collections/community.general/issues/5023).
|
||||
- keyring lookup plugin - start using Ansible's configuration manager to parse options (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
- lastpass - use config manager for handling plugin options (https://github.com/ansible-collections/community.general/pull/5022).
|
||||
- ldap_attrs - allow for DNs to have ``{x}`` prefix on first RDN (https://github.com/ansible-collections/community.general/issues/977, https://github.com/ansible-collections/community.general/pull/5450).
|
||||
- linode inventory plugin - simplify option handling (https://github.com/ansible-collections/community.general/pull/5438).
|
||||
- listen_ports_facts - add new ``include_non_listening`` option which adds ``-a`` option to ``netstat`` and ``ss``. This shows both listening and non-listening (for TCP this means established connections) sockets, and returns ``state`` and ``foreign_address`` (https://github.com/ansible-collections/community.general/issues/4762, https://github.com/ansible-collections/community.general/pull/4953).
|
||||
- lmdb_kv lookup plugin - start using Ansible's configuration manager to parse options (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
|
|
@ -76,6 +341,7 @@ Minor Changes
|
|||
- machinectl become plugin - combine the success command when building the become command to be consistent with other become plugins (https://github.com/ansible-collections/community.general/pull/5287).
|
||||
- manifold lookup plugin - start using Ansible's configuration manager to parse options (https://github.com/ansible-collections/community.general/pull/5440).
|
||||
- maven_artifact - add a new ``unredirected_headers`` option that can be used with ansible-core 2.12 and above. The default value is to not use ``Authorization`` and ``Cookie`` headers on redirects for security reasons. With ansible-core 2.11, all headers are still passed on for redirects (https://github.com/ansible-collections/community.general/pull/4812).
|
||||
- mksysb - refactored module to use ``CmdRunner`` to execute ``mksysb`` (https://github.com/ansible-collections/community.general/pull/5484).
|
||||
- mksysb - using ``do_raise()`` to raise exceptions in ``ModuleHelper`` derived modules (https://github.com/ansible-collections/community.general/pull/4674).
|
||||
- nagios - minor refactoring on parameter validation for different actions (https://github.com/ansible-collections/community.general/pull/5239).
|
||||
- netcup_dnsapi - add ``timeout`` parameter (https://github.com/ansible-collections/community.general/pull/5301).
|
||||
|
|
@ -83,6 +349,7 @@ Minor Changes
|
|||
- nmcli - add bond option ``xmit_hash_policy`` to bond options (https://github.com/ansible-collections/community.general/issues/5148).
|
||||
- nmcli - adds ``vpn`` type and parameter for supporting VPN with service type L2TP and PPTP (https://github.com/ansible-collections/community.general/pull/4746).
|
||||
- nmcli - honor IP options for VPNs (https://github.com/ansible-collections/community.general/pull/5228).
|
||||
- onepassword - support version 2 of the OnePassword CLI (https://github.com/ansible-collections/community.general/pull/4728)
|
||||
- opentelemetry callback plugin - allow configuring opentelementry callback via config file (https://github.com/ansible-collections/community.general/pull/4916).
|
||||
- opentelemetry callback plugin - send logs. This can be disabled by setting ``disable_logs=false`` (https://github.com/ansible-collections/community.general/pull/4175).
|
||||
- pacman - added parameters ``reason`` and ``reason_for`` to set/change the install reason of packages (https://github.com/ansible-collections/community.general/pull/4956).
|
||||
|
|
@ -125,6 +392,7 @@ Breaking Changes / Porting Guide
|
|||
--------------------------------
|
||||
|
||||
- newrelic_deployment - ``revision`` is required for v2 API (https://github.com/ansible-collections/community.general/pull/5341).
|
||||
- scaleway_container_registry_info - no longer replace ``secret_environment_variables`` in the output by ``SENSITIVE_VALUE`` (https://github.com/ansible-collections/community.general/pull/5497).
|
||||
|
||||
Deprecated Features
|
||||
-------------------
|
||||
|
|
@ -179,8 +447,10 @@ Bugfixes
|
|||
- filesystem - improve error messages when output cannot be parsed by including newlines in escaped form (https://github.com/ansible-collections/community.general/pull/4700).
|
||||
- funcd connection plugin - fix signature of ``exec_command`` (https://github.com/ansible-collections/community.general/pull/5111).
|
||||
- ini_file - minor refactor fixing a python lint error (https://github.com/ansible-collections/community.general/pull/5307).
|
||||
- iso_create - the module somtimes failed to add folders for Joliet and UDF formats (https://github.com/ansible-collections/community.general/issues/5275).
|
||||
- keycloak_realm - fix default groups and roles (https://github.com/ansible-collections/community.general/issues/4241).
|
||||
- keyring_info - fix the result from the keyring library never getting returned (https://github.com/ansible-collections/community.general/pull/4964).
|
||||
- ldap_attrs - fix bug which caused a ``Bad search filter`` error. The error was occuring when the ldap attribute value contained special characters such as ``(`` or ``*`` (https://github.com/ansible-collections/community.general/issues/5434, https://github.com/ansible-collections/community.general/pull/5435).
|
||||
- ldap_attrs - fix ordering issue by ignoring the ``{x}`` prefix on attribute values (https://github.com/ansible-collections/community.general/issues/977, https://github.com/ansible-collections/community.general/pull/5385).
|
||||
- listen_ports_facts - removed leftover ``EnvironmentError`` . The ``else`` clause had a wrong indentation. The check is now handled in the ``split_pid_name`` function (https://github.com/ansible-collections/community.general/pull/5202).
|
||||
- locale_gen - fix support for Ubuntu (https://github.com/ansible-collections/community.general/issues/5281).
|
||||
|
|
@ -220,6 +490,7 @@ Bugfixes
|
|||
- redis* modules - fix call to ``module.fail_json`` when failing because of missing Python libraries (https://github.com/ansible-collections/community.general/pull/4733).
|
||||
- slack - fix incorrect channel prefix ``#`` caused by incomplete pattern detection by adding ``G0`` and ``GF`` as channel ID patterns (https://github.com/ansible-collections/community.general/pull/5019).
|
||||
- slack - fix message update for channels which start with ``CP``. When ``message-id`` was passed it failed for channels which started with ``CP`` because the ``#`` symbol was added before the ``channel_id`` (https://github.com/ansible-collections/community.general/pull/5249).
|
||||
- snap - allow values in the ``options`` parameter to contain whitespaces (https://github.com/ansible-collections/community.general/pull/5475).
|
||||
- sudoers - ensure sudoers config files are created with the permissions requested by sudoers (0440) (https://github.com/ansible-collections/community.general/pull/4814).
|
||||
- sudoers - fix incorrect handling of ``state: absent`` (https://github.com/ansible-collections/community.general/issues/4852).
|
||||
- tss lookup plugin - adding support for updated Delinea library (https://github.com/DelineaXPM/python-tss-sdk/issues/9, https://github.com/ansible-collections/community.general/pull/5151).
|
||||
|
|
@ -229,8 +500,41 @@ Bugfixes
|
|||
- xfconf - fix setting of boolean values (https://github.com/ansible-collections/community.general/issues/4999, https://github.com/ansible-collections/community.general/pull/5007).
|
||||
- zfs - fix wrong quoting of properties (https://github.com/ansible-collections/community.general/issues/4707, https://github.com/ansible-collections/community.general/pull/4726).
|
||||
|
||||
New Plugins
|
||||
-----------
|
||||
|
||||
Filter
|
||||
~~~~~~
|
||||
|
||||
- counter - Counts hashable elements in a sequence
|
||||
|
||||
Lookup
|
||||
~~~~~~
|
||||
|
||||
- bitwarden - Retrieve secrets from Bitwarden
|
||||
|
||||
New Modules
|
||||
-----------
|
||||
|
||||
- gconftool2_info - Retrieve GConf configurations
|
||||
- iso_customize - Add/remove/change files in ISO file
|
||||
- keycloak_user_rolemapping - Allows administration of Keycloak user_rolemapping with the Keycloak API
|
||||
- keyring - Set or delete a passphrase using the Operating System's native keyring
|
||||
- keyring_info - Get a passphrase using the Operating System's native keyring
|
||||
- manageiq_policies_info - Listing of resource policy_profiles in ManageIQ
|
||||
- manageiq_tags_info - Retrieve resource tags in ManageIQ
|
||||
- pipx_info - Rretrieves information about applications installed with pipx
|
||||
- proxmox_disk - Management of a disk of a Qemu(KVM) VM in a Proxmox VE cluster.
|
||||
- scaleway_compute_private_network - Scaleway compute - private network management
|
||||
- scaleway_container - Scaleway Container management
|
||||
- scaleway_container_info - Retrieve information on Scaleway Container
|
||||
- scaleway_container_namespace - Scaleway Container namespace management
|
||||
- scaleway_container_namespace_info - Retrieve information on Scaleway Container namespace
|
||||
- scaleway_container_registry - Scaleway Container registry management module
|
||||
- scaleway_container_registry_info - Scaleway Container registry info module
|
||||
- scaleway_function - Scaleway Function management
|
||||
- scaleway_function_info - Retrieve information on Scaleway Function
|
||||
- scaleway_function_namespace - Scaleway Function namespace management
|
||||
- scaleway_function_namespace_info - Retrieve information on Scaleway Function namespace
|
||||
- wdc_redfish_command - Manages WDC UltraStar Data102 Out-Of-Band controllers using Redfish APIs
|
||||
- wdc_redfish_info - Manages WDC UltraStar Data102 Out-Of-Band controllers using Redfish APIs
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ Also, consider taking up a valuable, reviewed, but abandoned pull request which
|
|||
* Try committing your changes with an informative but short commit message.
|
||||
* Do not squash your commits and force-push to your branch if not needed. Reviews of your pull request are much easier with individual commits to comprehend the pull request history. All commits of your pull request branch will be squashed into one commit by GitHub upon merge.
|
||||
* Do not add merge commits to your PR. The bot will complain and you will have to rebase ([instructions for rebasing](https://docs.ansible.com/ansible/latest/dev_guide/developing_rebasing.html)) to remove them before your PR can be merged. To avoid that git automatically does merges during pulls, you can configure it to do rebases instead by running `git config pull.rebase true` inside the repository checkout.
|
||||
* Make sure your PR includes a [changelog fragment](https://docs.ansible.com/ansible/devel/community/development_process.html#changelogs-how-to). (You must not include a fragment for new modules or new plugins, except for test and filter plugins. Also you shouldn't include one for docs-only changes. If you're not sure, simply don't include one, we'll tell you whether one is needed or not :) )
|
||||
* Make sure your PR includes a [changelog fragment](https://docs.ansible.com/ansible/devel/community/development_process.html#creating-changelog-fragments). (You must not include a fragment for new modules or new plugins, except for test and filter plugins. Also you shouldn't include one for docs-only changes. If you're not sure, simply don't include one, we'll tell you whether one is needed or not :) )
|
||||
* Avoid reformatting unrelated parts of the codebase in your PR. These types of changes will likely be requested for reversion, create additional work for reviewers, and may cause approval to be delayed.
|
||||
|
||||
You can also read [our Quick-start development guide](https://github.com/ansible/community-docs/blob/main/create_pr_quick_start_guide.rst).
|
||||
|
|
|
|||
17
README.md
17
README.md
|
|
@ -6,7 +6,8 @@ SPDX-License-Identifier: GPL-3.0-or-later
|
|||
|
||||
# Community General Collection
|
||||
|
||||
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
||||
[](https://dev.azure.com/ansible/community.general/_build?definitionId=31)
|
||||
[](https://github.com/ansible-collections/community.general/actions)
|
||||
[](https://codecov.io/gh/ansible-collections/community.general)
|
||||
|
||||
This repository contains the `community.general` Ansible Collection. The collection is a part of the Ansible package and includes many modules and plugins supported by Ansible community which are not part of more specialized community collections.
|
||||
|
|
@ -72,13 +73,13 @@ We are actively accepting new contributors.
|
|||
|
||||
All types of contributions are very welcome.
|
||||
|
||||
You don't know how to start? Refer to our [contribution guide](https://github.com/ansible-collections/community.general/blob/main/CONTRIBUTING.md)!
|
||||
You don't know how to start? Refer to our [contribution guide](https://github.com/ansible-collections/community.general/blob/stable-6/CONTRIBUTING.md)!
|
||||
|
||||
The current maintainers are listed in the [commit-rights.md](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md#people) file. If you have questions or need help, feel free to mention them in the proposals.
|
||||
The current maintainers are listed in the [commit-rights.md](https://github.com/ansible-collections/community.general/blob/stable-6/commit-rights.md#people) file. If you have questions or need help, feel free to mention them in the proposals.
|
||||
|
||||
You can find more information in the [developer guide for collections](https://docs.ansible.com/ansible/devel/dev_guide/developing_collections.html#contributing-to-collections), and in the [Ansible Community Guide](https://docs.ansible.com/ansible/latest/community/index.html).
|
||||
|
||||
Also for some notes specific to this collection see [our CONTRIBUTING documentation](https://github.com/ansible-collections/community.general/blob/main/CONTRIBUTING.md).
|
||||
Also for some notes specific to this collection see [our CONTRIBUTING documentation](https://github.com/ansible-collections/community.general/blob/stable-6/CONTRIBUTING.md).
|
||||
|
||||
### Running tests
|
||||
|
||||
|
|
@ -88,7 +89,7 @@ See [here](https://docs.ansible.com/ansible/devel/dev_guide/developing_collectio
|
|||
|
||||
To learn how to maintain / become a maintainer of this collection, refer to:
|
||||
|
||||
* [Committer guidelines](https://github.com/ansible-collections/community.general/blob/main/commit-rights.md).
|
||||
* [Committer guidelines](https://github.com/ansible-collections/community.general/blob/stable-6/commit-rights.md).
|
||||
* [Maintainer guidelines](https://github.com/ansible/community-docs/blob/main/maintaining.rst).
|
||||
|
||||
It is necessary for maintainers of this collection to be subscribed to:
|
||||
|
|
@ -116,7 +117,7 @@ See the [Releasing guidelines](https://github.com/ansible/community-docs/blob/ma
|
|||
|
||||
## Release notes
|
||||
|
||||
See the [changelog](https://github.com/ansible-collections/community.general/blob/main/CHANGELOG.rst).
|
||||
See the [changelog](https://github.com/ansible-collections/community.general/blob/stable-6/CHANGELOG.rst).
|
||||
|
||||
## Roadmap
|
||||
|
||||
|
|
@ -135,8 +136,8 @@ See [this issue](https://github.com/ansible-collections/community.general/issues
|
|||
|
||||
This collection is primarily licensed and distributed as a whole under the GNU General Public License v3.0 or later.
|
||||
|
||||
See [LICENSES/GPL-3.0-or-later.txt](https://github.com/ansible-collections/community.general/blob/main/COPYING) for the full text.
|
||||
See [LICENSES/GPL-3.0-or-later.txt](https://github.com/ansible-collections/community.general/blob/stable-6/COPYING) for the full text.
|
||||
|
||||
Parts of the collection are licensed under the [BSD 2-Clause license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/BSD-2-Clause.txt), the [MIT license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/MIT.txt), and the [PSF 2.0 license](https://github.com/ansible-collections/community.general/blob/main/LICENSES/PSF-2.0.txt).
|
||||
Parts of the collection are licensed under the [BSD 2-Clause license](https://github.com/ansible-collections/community.general/blob/stable-6/LICENSES/BSD-2-Clause.txt), the [MIT license](https://github.com/ansible-collections/community.general/blob/stable-6/LICENSES/MIT.txt), and the [PSF 2.0 license](https://github.com/ansible-collections/community.general/blob/stable-6/LICENSES/PSF-2.0.txt).
|
||||
|
||||
All files have a machine readable `SDPX-License-Identifier:` comment denoting its respective license(s) or an equivalent entry in an accompanying `.license` file. Only changelog fragments (which will not be part of a release) are covered by a blanket statement in `.reuse/dep5`. This conforms to the [REUSE specification](https://reuse.software/spec/).
|
||||
|
|
|
|||
|
|
@ -1,5 +1,62 @@
|
|||
ancestor: 5.0.0
|
||||
releases:
|
||||
6.0.0:
|
||||
changes:
|
||||
breaking_changes:
|
||||
- scaleway_container_registry_info - no longer replace ``secret_environment_variables``
|
||||
in the output by ``SENSITIVE_VALUE`` (https://github.com/ansible-collections/community.general/pull/5497).
|
||||
bugfixes:
|
||||
- iso_create - the module somtimes failed to add folders for Joliet and UDF
|
||||
formats (https://github.com/ansible-collections/community.general/issues/5275).
|
||||
- ldap_attrs - fix bug which caused a ``Bad search filter`` error. The error
|
||||
was occuring when the ldap attribute value contained special characters such
|
||||
as ``(`` or ``*`` (https://github.com/ansible-collections/community.general/issues/5434,
|
||||
https://github.com/ansible-collections/community.general/pull/5435).
|
||||
- snap - allow values in the ``options`` parameter to contain whitespaces (https://github.com/ansible-collections/community.general/pull/5475).
|
||||
minor_changes:
|
||||
- ansible_galaxy_install - refactored module to use ``CmdRunner`` to execute
|
||||
``ansible-galaxy`` (https://github.com/ansible-collections/community.general/pull/5477).
|
||||
- cpanm - refactored module to use ``CmdRunner`` to execute ``cpanm`` (https://github.com/ansible-collections/community.general/pull/5485).
|
||||
- hponcfg - refactored module to use ``CmdRunner`` to execute ``hponcfg`` (https://github.com/ansible-collections/community.general/pull/5483).
|
||||
- ldap_attrs - allow for DNs to have ``{x}`` prefix on first RDN (https://github.com/ansible-collections/community.general/issues/977,
|
||||
https://github.com/ansible-collections/community.general/pull/5450).
|
||||
- mksysb - refactored module to use ``CmdRunner`` to execute ``mksysb`` (https://github.com/ansible-collections/community.general/pull/5484).
|
||||
- onepassword - support version 2 of the OnePassword CLI (https://github.com/ansible-collections/community.general/pull/4728)
|
||||
release_summary: New major release of community.general with lots of bugfixes,
|
||||
new features, some removed deprecated features, and some other breaking changes.
|
||||
Please check the coresponding sections of the changelog for more details.
|
||||
fragments:
|
||||
- 4728-onepassword-v2.yml
|
||||
- 5435-escape-ldap-param.yml
|
||||
- 5450-allow-for-xordered-dns.yaml
|
||||
- 5468-iso-create-not-add-folders.yml
|
||||
- 5475-snap-option-value-whitespace.yml
|
||||
- 5477-ansible-galaxy-install-cmd-runner.yml
|
||||
- 5483-hponcfg-cmd-runner.yml
|
||||
- 5484-mksysb-cmd-runner.yml
|
||||
- 5485-cpanm-cmd-runner.yml
|
||||
- 5497-scaleway-filtering.yml
|
||||
- 6.0.0.yml
|
||||
modules:
|
||||
- description: Scaleway Container management
|
||||
name: scaleway_container
|
||||
namespace: ''
|
||||
- description: Retrieve information on Scaleway Container
|
||||
name: scaleway_container_info
|
||||
namespace: ''
|
||||
- description: Scaleway Container namespace management
|
||||
name: scaleway_container_namespace
|
||||
namespace: ''
|
||||
- description: Retrieve information on Scaleway Container namespace
|
||||
name: scaleway_container_namespace_info
|
||||
namespace: ''
|
||||
- description: Scaleway Function management
|
||||
name: scaleway_function
|
||||
namespace: ''
|
||||
- description: Retrieve information on Scaleway Function
|
||||
name: scaleway_function_info
|
||||
namespace: ''
|
||||
release_date: '2022-11-07'
|
||||
6.0.0-a1:
|
||||
changes:
|
||||
breaking_changes:
|
||||
|
|
@ -508,10 +565,560 @@ releases:
|
|||
- simplified-bsd-license.yml
|
||||
- unflatmap.yml
|
||||
modules:
|
||||
- description: Retrieve GConf configurations
|
||||
name: gconftool2_info
|
||||
namespace: ''
|
||||
- description: Add/remove/change files in ISO file
|
||||
name: iso_customize
|
||||
namespace: ''
|
||||
- description: Allows administration of Keycloak user_rolemapping with the Keycloak
|
||||
API
|
||||
name: keycloak_user_rolemapping
|
||||
namespace: ''
|
||||
- description: Set or delete a passphrase using the Operating System's native
|
||||
keyring
|
||||
name: keyring
|
||||
namespace: ''
|
||||
- description: Get a passphrase using the Operating System's native keyring
|
||||
name: keyring_info
|
||||
namespace: ''
|
||||
- description: Listing of resource policy_profiles in ManageIQ
|
||||
name: manageiq_policies_info
|
||||
namespace: ''
|
||||
- description: Retrieve resource tags in ManageIQ
|
||||
name: manageiq_tags_info
|
||||
namespace: ''
|
||||
- description: Rretrieves information about applications installed with pipx
|
||||
name: pipx_info
|
||||
namespace: ''
|
||||
- description: Management of a disk of a Qemu(KVM) VM in a Proxmox VE cluster.
|
||||
name: proxmox_disk
|
||||
namespace: ''
|
||||
- description: Scaleway compute - private network management
|
||||
name: scaleway_compute_private_network
|
||||
namespace: ''
|
||||
- description: Scaleway Container registry management module
|
||||
name: scaleway_container_registry
|
||||
namespace: ''
|
||||
- description: Scaleway Container registry info module
|
||||
name: scaleway_container_registry_info
|
||||
namespace: ''
|
||||
- description: Scaleway Function namespace management
|
||||
name: scaleway_function_namespace
|
||||
namespace: ''
|
||||
- description: Retrieve information on Scaleway Function namespace
|
||||
name: scaleway_function_namespace_info
|
||||
namespace: ''
|
||||
- description: Manages WDC UltraStar Data102 Out-Of-Band controllers using Redfish
|
||||
APIs
|
||||
name: wdc_redfish_command
|
||||
namespace: ''
|
||||
- description: Manages WDC UltraStar Data102 Out-Of-Band controllers using Redfish
|
||||
APIs
|
||||
name: wdc_redfish_info
|
||||
namespace: ''
|
||||
plugins:
|
||||
filter:
|
||||
- description: Counts hashable elements in a sequence
|
||||
name: counter
|
||||
namespace: null
|
||||
lookup:
|
||||
- description: Retrieve secrets from Bitwarden
|
||||
name: bitwarden
|
||||
namespace: null
|
||||
release_date: '2022-11-02'
|
||||
6.0.1:
|
||||
changes:
|
||||
bugfixes:
|
||||
- dependent lookup plugin - avoid warning on deprecated parameter for ``Templar.template()``
|
||||
(https://github.com/ansible-collections/community.general/pull/5543).
|
||||
- jenkins_build - fix the logical flaw when deleting a Jenkins build (https://github.com/ansible-collections/community.general/pull/5514).
|
||||
- one_vm - avoid splitting labels that are ``None`` (https://github.com/ansible-collections/community.general/pull/5489).
|
||||
- onepassword_raw - add missing parameter to plugin documentation (https://github.com/ansible-collections/community.general/issues/5506).
|
||||
- proxmox_disk - avoid duplicate ``vmid`` reference (https://github.com/ansible-collections/community.general/issues/5492,
|
||||
https://github.com/ansible-collections/community.general/pull/5493).
|
||||
release_summary: Bugfix release for Ansible 7.0.0.
|
||||
fragments:
|
||||
- 5489-nonetype-in-get-vm-by-label.yml
|
||||
- 5493-proxmox.yml
|
||||
- 5506-onepassword_raw-missing-param.yml
|
||||
- 5514-fix-logical-flaw-when-deleting-jenkins-build.yml
|
||||
- 5543-dependent-template.yml
|
||||
- 6.0.1.yml
|
||||
release_date: '2022-11-15'
|
||||
6.1.0:
|
||||
changes:
|
||||
bugfixes:
|
||||
- chroot connection plugin - add ``inventory_hostname`` to vars under ``remote_addr``.
|
||||
This is needed for compatibility with ansible-core 2.13 (https://github.com/ansible-collections/community.general/pull/5570).
|
||||
- cmd_runner module utils - fixed bug when handling default cases in ``cmd_runner_fmt.as_map()``
|
||||
(https://github.com/ansible-collections/community.general/pull/5538).
|
||||
- cmd_runner module utils - formatting arguments ``cmd_runner_fmt.as_fixed()``
|
||||
was expecting an non-existing argument (https://github.com/ansible-collections/community.general/pull/5538).
|
||||
- keycloak_client_rolemapping - calculate ``proposed`` and ``after`` return
|
||||
values properly (https://github.com/ansible-collections/community.general/pull/5619).
|
||||
- keycloak_client_rolemapping - remove only listed mappings with ``state=absent``
|
||||
(https://github.com/ansible-collections/community.general/pull/5619).
|
||||
- proxmox inventory plugin - fix bug while templating when using templates for
|
||||
the ``url``, ``user``, ``password``, ``token_id``, or ``token_secret`` options
|
||||
(https://github.com/ansible-collections/community.general/pull/5640).
|
||||
- proxmox inventory plugin - handle tags delimited by semicolon instead of comma,
|
||||
which happens from Proxmox 7.3 on (https://github.com/ansible-collections/community.general/pull/5602).
|
||||
- redhat_subscription - do not ignore ``consumer_name`` and other variables
|
||||
if ``activationkey`` is specified (https://github.com/ansible-collections/community.general/issues/3486,
|
||||
https://github.com/ansible-collections/community.general/pull/5627).
|
||||
- redhat_subscription - do not pass arguments to ``subscription-manager register``
|
||||
for things already configured; now a specified ``rhsm_baseurl`` is properly
|
||||
set for subscription-manager (https://github.com/ansible-collections/community.general/pull/5583).
|
||||
- unixy callback plugin - fix plugin to work with ansible-core 2.14 by using
|
||||
Ansible's configuration manager for handling options (https://github.com/ansible-collections/community.general/issues/5600).
|
||||
- vdo - now uses ``yaml.safe_load()`` to parse command output instead of the
|
||||
deprecated ``yaml.load()`` which is potentially unsafe. Using ``yaml.load()``
|
||||
without explicitely setting a ``Loader=`` is also an error in pyYAML 6.0 (https://github.com/ansible-collections/community.general/pull/5632).
|
||||
- vmadm - fix for index out of range error in ``get_vm_uuid`` (https://github.com/ansible-collections/community.general/pull/5628).
|
||||
deprecated_features:
|
||||
- The ``sap`` modules ``sapcar_extract``, ``sap_task_list_execute``, and ``hana_query``,
|
||||
will be removed from this collection in community.general 7.0.0 and replaced
|
||||
with redirects to ``community.sap_libs``. If you want to continue using these
|
||||
modules, make sure to also install ``community.sap_libs`` (it is part of the
|
||||
Ansible package) (https://github.com/ansible-collections/community.general/pull/5614).
|
||||
minor_changes:
|
||||
- cmd_runner module utils - ``cmd_runner_fmt.as_bool()`` can now take an extra
|
||||
parameter to format when value is false (https://github.com/ansible-collections/community.general/pull/5647).
|
||||
- gconftool2 - refactor using ``ModuleHelper`` and ``CmdRunner`` (https://github.com/ansible-collections/community.general/pull/5545).
|
||||
- java_certs - add more detailed error output when extracting certificate from
|
||||
PKCS12 fails (https://github.com/ansible-collections/community.general/pull/5550).
|
||||
- jenkins_plugin - refactor code to module util to fix sanity check (https://github.com/ansible-collections/community.general/pull/5565).
|
||||
- lxd_project - refactored code out to module utils to clear sanity check (https://github.com/ansible-collections/community.general/pull/5549).
|
||||
- nmap inventory plugin - add new options ``udp_scan``, ``icmp_timestamp``,
|
||||
and ``dns_resolve`` for different types of scans (https://github.com/ansible-collections/community.general/pull/5566).
|
||||
- rax_scaling_group - refactored out code to the ``rax`` module utils to clear
|
||||
the sanity check (https://github.com/ansible-collections/community.general/pull/5563).
|
||||
- redfish_command - add ``PerformRequestedOperations`` command to perform any
|
||||
operations necessary to continue the update flow (https://github.com/ansible-collections/community.general/issues/4276).
|
||||
- redfish_command - add ``update_apply_time`` to ``SimpleUpdate`` command (https://github.com/ansible-collections/community.general/issues/3910).
|
||||
- redfish_command - add ``update_status`` to output of ``SimpleUpdate`` command
|
||||
to allow a user monitor the update in progress (https://github.com/ansible-collections/community.general/issues/4276).
|
||||
- redfish_info - add ``GetUpdateStatus`` command to check the progress of a
|
||||
previous update request (https://github.com/ansible-collections/community.general/issues/4276).
|
||||
- redfish_utils module utils - added PUT (``put_request()``) functionality (https://github.com/ansible-collections/community.general/pull/5490).
|
||||
- slack - add option ``prepend_hash`` which allows to control whether a ``#``
|
||||
is prepended to ``channel_id``. The current behavior (value ``auto``) is to
|
||||
prepend ``#`` unless some specific prefixes are found. That list of prefixes
|
||||
is incomplete, and there does not seem to exist a documented condition on
|
||||
when exactly ``#`` must not be prepended. We recommend to explicitly set ``prepend_hash=always``
|
||||
or ``prepend_hash=never`` to avoid any ambiguity (https://github.com/ansible-collections/community.general/pull/5629).
|
||||
- spotinst_aws_elastigroup - add ``elements`` attribute when missing in ``list``
|
||||
parameters (https://github.com/ansible-collections/community.general/pull/5553).
|
||||
- ssh_config - add ``host_key_algorithms`` option (https://github.com/ansible-collections/community.general/pull/5605).
|
||||
- udm_share - added ``elements`` attribute to ``list`` type parameters (https://github.com/ansible-collections/community.general/pull/5557).
|
||||
- udm_user - add ``elements`` attribute when missing in ``list`` parameters
|
||||
(https://github.com/ansible-collections/community.general/pull/5559).
|
||||
release_summary: Regular bugfix and feature release.
|
||||
fragments:
|
||||
- 3910-redfish-add-operation-apply-time-to-simple-update.yml
|
||||
- 4276-redfish-command-updates-for-full-simple-update-workflow.yml
|
||||
- 5490-adding-put-functionality.yml
|
||||
- 5538-cmd-runner-as-fixed.yml
|
||||
- 5545-gconftool-cmd-runner.yml
|
||||
- 5549-lxd-project-sanity.yml
|
||||
- 5550-java_certs-not-enough-info-on-error.yml
|
||||
- 5553-spotinst-aws-elasticgroup-sanity.yml
|
||||
- 5557-udm-share-sanity.yml
|
||||
- 5559-udm-user-sanity.yml
|
||||
- 5563-rax-scaling-group-sanity.yml
|
||||
- 5565-jenkins-plugin-sanity.yml
|
||||
- 5566-additional-flags-nmap.yml
|
||||
- 5570-chroot-plugin-fix-default-inventory_hostname.yml
|
||||
- 5583-redhat_subscription-subscribe-parameters.yaml
|
||||
- 5601-unixy-callback-use-config-manager.yml
|
||||
- 5602-proxmox-tags.yml
|
||||
- 5605-ssh-config-add-host-key-algorithms.yaml
|
||||
- 5619-keycloak-improvements.yml
|
||||
- 5627-redhat_subscription-subscribe-parameters-2.yaml
|
||||
- 5628-fix-vmadm-off-by-one.yml
|
||||
- 5629-add-prepend-hash-option-for-channel-id.yml
|
||||
- 5632-vdo-Use-yaml-safe-load-instead-of-yaml-load.yml
|
||||
- 5640-fix-typo-proxmox-inventory.yml
|
||||
- 5647-cmd-runner-as-bool-false.yml
|
||||
- 6.1.0.yml
|
||||
- sap-removal.yml
|
||||
modules:
|
||||
- description: Manage project badges on GitLab Server
|
||||
name: gitlab_project_badge
|
||||
namespace: ''
|
||||
- description: Retrieve client secret via Keycloak API
|
||||
name: keycloak_clientsecret_info
|
||||
namespace: ''
|
||||
- description: Regenerate Keycloak client secret via Keycloak API
|
||||
name: keycloak_clientsecret_regenerate
|
||||
namespace: ''
|
||||
release_date: '2022-12-06'
|
||||
6.2.0:
|
||||
changes:
|
||||
bugfixes:
|
||||
- ansible_galaxy_install - set default to raise exception if command's return
|
||||
code is different from zero (https://github.com/ansible-collections/community.general/pull/5680).
|
||||
- ansible_galaxy_install - try ``C.UTF-8`` and then fall back to ``en_US.UTF-8``
|
||||
before failing (https://github.com/ansible-collections/community.general/pull/5680).
|
||||
- gitlab_group_variables - fix dropping variables accidentally when GitLab introduced
|
||||
new properties (https://github.com/ansible-collections/community.general/pull/5667).
|
||||
- gitlab_project_variables - fix dropping variables accidentally when GitLab
|
||||
introduced new properties (https://github.com/ansible-collections/community.general/pull/5667).
|
||||
- lxc_container - fix the arguments of the lxc command which broke the creation
|
||||
and cloning of containers (https://github.com/ansible-collections/community.general/issues/5578).
|
||||
- opkg - fix issue that ``force=reinstall`` would not reinstall an existing
|
||||
package (https://github.com/ansible-collections/community.general/pull/5705).
|
||||
- proxmox_disk - fixed possible issues with redundant ``vmid`` parameter (https://github.com/ansible-collections/community.general/issues/5492,
|
||||
https://github.com/ansible-collections/community.general/pull/5672).
|
||||
- proxmox_nic - fixed possible issues with redundant ``vmid`` parameter (https://github.com/ansible-collections/community.general/issues/5492,
|
||||
https://github.com/ansible-collections/community.general/pull/5672).
|
||||
- unixy callback plugin - fix typo introduced when updating to use Ansible's
|
||||
configuration manager for handling options (https://github.com/ansible-collections/community.general/issues/5600).
|
||||
deprecated_features:
|
||||
- manageiq_policies - deprecate ``state=list`` in favour of using ``community.general.manageiq_policies_info``
|
||||
(https://github.com/ansible-collections/community.general/pull/5721).
|
||||
- rax - module relies on deprecates library ``pyrax``. Unless maintainers step
|
||||
up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cbs - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cbs_attachments - module relies on deprecates library ``pyrax``. Unless
|
||||
maintainers step up to work on the module, it will be marked as deprecated
|
||||
in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cdb - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cdb_database - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_cdb_user - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_clb - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_clb_nodes - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_clb_ssl - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_dns - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_dns_record - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_facts - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_files - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_files_objects - module relies on deprecates library ``pyrax``. Unless
|
||||
maintainers step up to work on the module, it will be marked as deprecated
|
||||
in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_identity - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_keypair - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_meta - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_alarm - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_check - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_entity - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_notification - module relies on deprecates library ``pyrax``. Unless
|
||||
maintainers step up to work on the module, it will be marked as deprecated
|
||||
in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_mon_notification_plan - module relies on deprecates library ``pyrax``.
|
||||
Unless maintainers step up to work on the module, it will be marked as deprecated
|
||||
in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_network - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_queue - module relies on deprecates library ``pyrax``. Unless maintainers
|
||||
step up to work on the module, it will be marked as deprecated in community.general
|
||||
7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_scaling_group - module relies on deprecates library ``pyrax``. Unless
|
||||
maintainers step up to work on the module, it will be marked as deprecated
|
||||
in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
- rax_scaling_policy - module relies on deprecates library ``pyrax``. Unless
|
||||
maintainers step up to work on the module, it will be marked as deprecated
|
||||
in community.general 7.0.0 and removed in version 9.0.0 (https://github.com/ansible-collections/community.general/pull/5733).
|
||||
minor_changes:
|
||||
- opkg - allow installing a package in a certain version (https://github.com/ansible-collections/community.general/pull/5688).
|
||||
- proxmox - added new module parameter ``tags`` for use with PVE 7+ (https://github.com/ansible-collections/community.general/pull/5714).
|
||||
- puppet - refactored module to use ``CmdRunner`` for executing ``puppet`` (https://github.com/ansible-collections/community.general/pull/5612).
|
||||
- redhat_subscription - add a ``server_proxy_scheme`` parameter to configure
|
||||
the scheme for the proxy server (https://github.com/ansible-collections/community.general/pull/5662).
|
||||
- ssh_config - refactor code to module util to fix sanity check (https://github.com/ansible-collections/community.general/pull/5720).
|
||||
- sudoers - adds ``host`` parameter for setting hostname restrictions in sudoers
|
||||
rules (https://github.com/ansible-collections/community.general/issues/5702).
|
||||
release_summary: Regular bugfix and feature release.
|
||||
fragments:
|
||||
- 5612-puppet-cmd-runner.yml
|
||||
- 5659-fix-lxc_container-command.yml
|
||||
- 5662-redhat_subscription-server_proxy_scheme.yaml
|
||||
- 5666-gitlab-variables.yml
|
||||
- 5672-proxmox.yml
|
||||
- 5680-ansible_galaxy_install-fx-locale.yaml
|
||||
- 5688-opkg-module-install-certain-version.yml
|
||||
- 5703-sudoers-host-support.yml
|
||||
- 5705-opkg-fix-force-reinstall.yml
|
||||
- 5714-proxmox-lxc-tag-support.yml
|
||||
- 5720-ssh_config-plugin-sanity.yml
|
||||
- 5721-manageiq-policies-deprecate-list-state.yaml
|
||||
- 5733-rax-deprecation-notice.yml
|
||||
- 5744-unixy-callback-fix-config-manager-typo.yml
|
||||
- 6.2.0.yml
|
||||
release_date: '2023-01-04'
|
||||
6.3.0:
|
||||
changes:
|
||||
breaking_changes:
|
||||
- 'ModuleHelper module utils - when the module sets output variables named ``msg``,
|
||||
``exception``, ``output``, ``vars``, or ``changed``, the actual output will
|
||||
prefix those names with ``_`` (underscore symbol) only when they clash with
|
||||
output variables generated by ModuleHelper itself, which only occurs when
|
||||
handling exceptions. Please note that this breaking change does not require
|
||||
a new major release since before this release, it was not possible to add
|
||||
such variables to the output `due to a bug <https://github.com/ansible-collections/community.general/pull/5755>`__
|
||||
(https://github.com/ansible-collections/community.general/pull/5765).
|
||||
|
||||
'
|
||||
bugfixes:
|
||||
- ModuleHelper - fix bug when adjusting the name of reserved output variables
|
||||
(https://github.com/ansible-collections/community.general/pull/5755).
|
||||
- alternatives - support subcommands on Fedora 37, which uses ``follower`` instead
|
||||
of ``slave`` (https://github.com/ansible-collections/community.general/pull/5794).
|
||||
- bitwarden lookup plugin - clarify what to do, if the bitwarden vault is not
|
||||
unlocked (https://github.com/ansible-collections/community.general/pull/5811).
|
||||
- dig lookup plugin - correctly handle DNSKEY record type's ``algorithm`` field
|
||||
(https://github.com/ansible-collections/community.general/pull/5914).
|
||||
- gem - fix force parameter not being passed to gem command when uninstalling
|
||||
(https://github.com/ansible-collections/community.general/pull/5822).
|
||||
- gem - fix hang due to interactive prompt for confirmation on specific version
|
||||
uninstall (https://github.com/ansible-collections/community.general/pull/5751).
|
||||
- gitlab_deploy_key - also update ``title`` and not just ``can_push`` (https://github.com/ansible-collections/community.general/pull/5888).
|
||||
- keycloak_user_federation - fixes federation creation issue. When a new federation
|
||||
was created and at the same time a default / standard mapper was also changed
|
||||
/ updated the creation process failed as a bad None set variable led to a
|
||||
bad malformed url request (https://github.com/ansible-collections/community.general/pull/5750).
|
||||
- 'keycloak_user_federation - fixes idempotency detection issues. In some cases
|
||||
the module could fail to properly detect already existing user federations
|
||||
because of a buggy seemingly superflous extra query parameter (https://github.com/ansible-collections/community.general/pull/5732).
|
||||
|
||||
'
|
||||
- loganalytics callback plugin - adjust type of callback to ``notification``,
|
||||
it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- logdna callback plugin - adjust type of callback to ``notification``, it was
|
||||
incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- logstash callback plugin - adjust type of callback to ``notification``, it
|
||||
was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- nsupdate - fix zone lookup. The SOA record for an existing zone is returned
|
||||
as an answer RR and not as an authority RR (https://github.com/ansible-collections/community.general/issues/5817,
|
||||
https://github.com/ansible-collections/community.general/pull/5818).
|
||||
- proxmox_disk - fixed issue with read timeout on import action (https://github.com/ansible-collections/community.general/pull/5803).
|
||||
- redfish_utils - removed basic auth HTTP header when performing a GET on the
|
||||
service root resource and when performing a POST to the session collection
|
||||
(https://github.com/ansible-collections/community.general/issues/5886).
|
||||
- splunk callback plugin - adjust type of callback to ``notification``, it was
|
||||
incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- sumologic callback plugin - adjust type of callback to ``notification``, it
|
||||
was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- syslog_json callback plugin - adjust type of callback to ``notification``,
|
||||
it was incorrectly classified as ``aggregate`` before (https://github.com/ansible-collections/community.general/pull/5761).
|
||||
- terraform - fix ``current`` workspace never getting appended to the ``all``
|
||||
key in the ``workspace_ctf`` object (https://github.com/ansible-collections/community.general/pull/5735).
|
||||
- terraform - fix ``terraform init`` failure when there are multiple workspaces
|
||||
on the remote backend and when ``default`` workspace is missing by setting
|
||||
``TF_WORKSPACE`` environmental variable to the value of ``workspace`` when
|
||||
used (https://github.com/ansible-collections/community.general/pull/5735).
|
||||
- terraform module - disable ANSI escape sequences during validation phase (https://github.com/ansible-collections/community.general/pull/5843).
|
||||
- xml - fixed a bug where empty ``children`` list would not be set (https://github.com/ansible-collections/community.general/pull/5808).
|
||||
deprecated_features:
|
||||
- consul - deprecate using parameters unused for ``state=absent`` (https://github.com/ansible-collections/community.general/pull/5772).
|
||||
- gitlab_runner - the default of the new option ``access_level_on_creation``
|
||||
will change from ``false`` to ``true`` in community.general 7.0.0. This will
|
||||
cause ``access_level`` to be used during runner registration as well, and
|
||||
not only during updates (https://github.com/ansible-collections/community.general/pull/5908).
|
||||
minor_changes:
|
||||
- apache2_module - add module argument ``warn_mpm_absent`` to control whether
|
||||
warning are raised in some edge cases (https://github.com/ansible-collections/community.general/pull/5793).
|
||||
- bitwarden lookup plugin - can now retrieve secrets from custom fields (https://github.com/ansible-collections/community.general/pull/5694).
|
||||
- bitwarden lookup plugin - implement filtering results by ``collection_id``
|
||||
parameter (https://github.com/ansible-collections/community.general/issues/5849).
|
||||
- dig lookup plugin - support CAA record type (https://github.com/ansible-collections/community.general/pull/5913).
|
||||
- gitlab_project - add ``builds_access_level``, ``container_registry_access_level``
|
||||
and ``forking_access_level`` options (https://github.com/ansible-collections/community.general/pull/5706).
|
||||
- gitlab_runner - add new boolean option ``access_level_on_creation``. It controls,
|
||||
whether the value of ``access_level`` is used for runner registration or not.
|
||||
The option ``access_level`` has been ignored on registration so far and was
|
||||
only used on updates (https://github.com/ansible-collections/community.general/issues/5907,
|
||||
https://github.com/ansible-collections/community.general/pull/5908).
|
||||
- ilo_redfish_utils module utils - change implementation of DNS Server IP and
|
||||
NTP Server IP update (https://github.com/ansible-collections/community.general/pull/5804).
|
||||
- ipa_group - allow to add and remove external users with the ``external_user``
|
||||
option (https://github.com/ansible-collections/community.general/pull/5897).
|
||||
- iptables_state - minor refactoring within the module (https://github.com/ansible-collections/community.general/pull/5844).
|
||||
- one_vm - add a new ``updateconf`` option which implements the ``one.vm.updateconf``
|
||||
API call (https://github.com/ansible-collections/community.general/pull/5812).
|
||||
- opkg - refactored module to use ``CmdRunner`` for executing ``opkg`` (https://github.com/ansible-collections/community.general/pull/5718).
|
||||
- redhat_subscription - adds ``token`` parameter for subscription-manager authentication
|
||||
using Red Hat API token (https://github.com/ansible-collections/community.general/pull/5725).
|
||||
- snap - minor refactor when executing module (https://github.com/ansible-collections/community.general/pull/5773).
|
||||
- snap_alias - refactored module to use ``CmdRunner`` to execute ``snap`` (https://github.com/ansible-collections/community.general/pull/5486).
|
||||
- sudoers - add ``setenv`` parameters to support passing environment variables
|
||||
via sudo. (https://github.com/ansible-collections/community.general/pull/5883)
|
||||
release_summary: Regular bugfix and feature release.
|
||||
fragments:
|
||||
- 5486-snap-alias-cmd-runner.yml
|
||||
- 5694-add-custom-fields-to-bitwarden.yml
|
||||
- 5706-add-builds-forks-container-registry.yml
|
||||
- 5718-opkg-refactor.yaml
|
||||
- 5725-redhat_subscription-add-red-hat-api-token.yml
|
||||
- 5732-bugfix-keycloak-userfed-idempotency.yml
|
||||
- 5735-terraform-init-fix-when-default-workspace-doesnt-exists.yaml
|
||||
- 5750-bugfixing-keycloak-usrfed-fail-when-update-default-mapper-simultaneously.yml
|
||||
- 5751-gem-fix-uninstall-hang.yml
|
||||
- 5755-mh-fix-output-conflict.yml
|
||||
- 5761-callback-types.yml
|
||||
- 5765-mh-lax-output-conflict.yml
|
||||
- 5772-consul-deprecate-params-when-absent.yml
|
||||
- 5773-snap-mh-execute.yml
|
||||
- 5793-apache2-module-npm-warnings.yml
|
||||
- 5794-alternatives-fedora37.yml
|
||||
- 5803-proxmox-read-timeout.yml
|
||||
- 5804-minor-changes-to-hpe-ilo-collection.yml
|
||||
- 5808-xml-children-parameter-does-not-exist.yml
|
||||
- 5811-clarify-bitwarden-error.yml
|
||||
- 5812-implement-updateconf-api-call.yml
|
||||
- 5818-nsupdate-fix-zone-lookup.yml
|
||||
- 5822-gem-uninstall-force.yml
|
||||
- 5843-terraform-validate-no-color.yml
|
||||
- 5844-iptables-state-refactor.yml
|
||||
- 5851-lookup-bitwarden-add-filter-by-collection-id-parameter.yml
|
||||
- 5883-sudoers-add-support-for-setenv-parameter.yml
|
||||
- 5886-redfish-correct-basic-auth-usage-on-session-creation.yml
|
||||
- 5888-update-key-title.yml
|
||||
- 5897-ipa_group-add-external-users.yml
|
||||
- 5907-fix-gitlab_runner-not-idempotent.yml
|
||||
- 5913-dig-caa.yml
|
||||
- 5914-dig-dnskey.yml
|
||||
- 6.3.0.yml
|
||||
modules:
|
||||
- description: Manages Out-Of-Band controllers using Open Composable API (OCAPI)
|
||||
name: ocapi_command
|
||||
namespace: ''
|
||||
- description: Manages Out-Of-Band controllers using Open Composable API (OCAPI)
|
||||
name: ocapi_info
|
||||
namespace: ''
|
||||
release_date: '2023-01-31'
|
||||
6.4.0:
|
||||
changes:
|
||||
bugfixes:
|
||||
- cartesian and flattened lookup plugins - adjust to parameter deprecation in
|
||||
ansible-core 2.14's ``listify_lookup_plugin_terms`` helper function (https://github.com/ansible-collections/community.general/pull/6074).
|
||||
- cloudflare_dns - fixed the idempotency for SRV DNS records (https://github.com/ansible-collections/community.general/pull/5972).
|
||||
- cloudflare_dns - fixed the possiblity of setting a root-level SRV DNS record
|
||||
(https://github.com/ansible-collections/community.general/pull/5972).
|
||||
- github_webhook - fix always changed state when no secret is provided (https://github.com/ansible-collections/community.general/pull/5994).
|
||||
- jenkins_plugin - fix error due to undefined variable when updates file is
|
||||
not downloaded (https://github.com/ansible-collections/community.general/pull/6100).
|
||||
- keycloak_client - fix accidental replacement of value for attribute ``saml.signing.private.key``
|
||||
with ``no_log`` in wrong contexts (https://github.com/ansible-collections/community.general/pull/5934).
|
||||
- lxd_* modules, lxd inventory plugin - fix TLS/SSL certificate validation problems
|
||||
by using the correct purpose when creating the TLS context (https://github.com/ansible-collections/community.general/issues/5616,
|
||||
https://github.com/ansible-collections/community.general/pull/6034).
|
||||
- nmcli - fix change handling of values specified as an integer 0 (https://github.com/ansible-collections/community.general/pull/5431).
|
||||
- nmcli - fix failure to handle WIFI settings when connection type not specified
|
||||
(https://github.com/ansible-collections/community.general/pull/5431).
|
||||
- nmcli - fix improper detection of changes to ``wifi.wake-on-wlan`` (https://github.com/ansible-collections/community.general/pull/5431).
|
||||
- nmcli - order is significant for lists of addresses (https://github.com/ansible-collections/community.general/pull/6048).
|
||||
- onepassword lookup plugin - Changed to ignore errors from "op account get"
|
||||
calls. Previously, errors would prevent auto-signin code from executing (https://github.com/ansible-collections/community.general/pull/5942).
|
||||
- terraform and timezone - slight refactoring to avoid linter reporting potentially
|
||||
undefined variables (https://github.com/ansible-collections/community.general/pull/5933).
|
||||
- various plugins and modules - remove unnecessary imports (https://github.com/ansible-collections/community.general/pull/5940).
|
||||
- yarn - fix ``global=true`` to check for the configured global folder instead
|
||||
of assuming the default (https://github.com/ansible-collections/community.general/pull/5829)
|
||||
- yarn - fix ``state=absent`` not working with ``global=true`` when the package
|
||||
does not include a binary (https://github.com/ansible-collections/community.general/pull/5829)
|
||||
- yarn - fix ``state=latest`` not working with ``global=true`` (https://github.com/ansible-collections/community.general/issues/5712).
|
||||
- zfs_delegate_admin - zfs allow output can now be parsed when uids/gids are
|
||||
not known to the host system (https://github.com/ansible-collections/community.general/pull/5943).
|
||||
- zypper - make package managing work on readonly filesystem of openSUSE MicroOS
|
||||
(https://github.com/ansible-collections/community.general/pull/5615).
|
||||
deprecated_features:
|
||||
- gitlab_runner - the option ``access_level`` will lose its default value in
|
||||
community.general 8.0.0. From that version on, you have set this option to
|
||||
``ref_protected`` explicitly, if you want to have a protected runner (https://github.com/ansible-collections/community.general/issues/5925).
|
||||
minor_changes:
|
||||
- dnsimple - set custom User-Agent for API requests to DNSimple (https://github.com/ansible-collections/community.general/pull/5927).
|
||||
- flatpak_remote - add new boolean option ``enabled``. It controls, whether
|
||||
the remote is enabled or not (https://github.com/ansible-collections/community.general/pull/5926).
|
||||
- gitlab_project - add ``releases_access_level``, ``environments_access_level``,
|
||||
``feature_flags_access_level``, ``infrastructure_access_level``, ``monitor_access_level``,
|
||||
and ``security_and_compliance_access_level`` options (https://github.com/ansible-collections/community.general/pull/5986).
|
||||
- jc filter plugin - added the ability to use parser plugins (https://github.com/ansible-collections/community.general/pull/6043).
|
||||
- keycloak_group - add new optional module parameter ``parents`` to properly
|
||||
handle keycloak subgroups (https://github.com/ansible-collections/community.general/pull/5814).
|
||||
- keycloak_user_federation - make ``org.keycloak.storage.ldap.mappers.LDAPStorageMapper``
|
||||
the default value for mappers ``providerType`` (https://github.com/ansible-collections/community.general/pull/5863).
|
||||
- ldap modules - add ``xorder_discovery`` option (https://github.com/ansible-collections/community.general/issues/6045,
|
||||
https://github.com/ansible-collections/community.general/pull/6109).
|
||||
- lxd_container - add diff and check mode (https://github.com/ansible-collections/community.general/pull/5866).
|
||||
- mattermost, rocketchat, slack - replace missing default favicon with docs.ansible.com
|
||||
favicon (https://github.com/ansible-collections/community.general/pull/5928).
|
||||
- modprobe - add ``persistent`` option (https://github.com/ansible-collections/community.general/issues/4028,
|
||||
https://github.com/ansible-collections/community.general/pull/542).
|
||||
- osx_defaults - include stderr in error messages (https://github.com/ansible-collections/community.general/pull/6011).
|
||||
- proxmox - suppress urllib3 ``InsecureRequestWarnings`` when ``validate_certs``
|
||||
option is ``false`` (https://github.com/ansible-collections/community.general/pull/5931).
|
||||
- redfish_command - adding ``EnableSecureBoot`` functionality (https://github.com/ansible-collections/community.general/pull/5899).
|
||||
- redfish_command - adding ``VerifyBiosAttributes`` functionality (https://github.com/ansible-collections/community.general/pull/5900).
|
||||
- sefcontext - add support for path substitutions (https://github.com/ansible-collections/community.general/issues/1193).
|
||||
release_summary: Regular feature and bugfix release.
|
||||
fragments:
|
||||
- 4028-modprobe-persistent-option.yml
|
||||
- 5431-nmcli-wifi.yml
|
||||
- 5615-zypper-transactional-update.yml
|
||||
- 5814-support-keycloak-subgroups.yml
|
||||
- 5829-fix-yarn-global.yml
|
||||
- 5830-sefcontext-path-subs.yml
|
||||
- 5863-providerType-defaulted-keycloak_userfed-mappers.yml
|
||||
- 5866-lxd_container-diff-and-check-mode.yml
|
||||
- 5899-adding-enablesecureboot-functionality-to-redfish-config.yml
|
||||
- 5900-adding-verifybiosattribute-fucntionality-to-redfish-command.yml
|
||||
- 5915-suppress-urllib3-insecure-request-warnings.yml
|
||||
- 5925-align_gitlab_runner_access_level_default_with_gitlab.yml
|
||||
- 5926-flatpak-remote-enabled.yml
|
||||
- 5927-set-user-agent-dnsimple.yml
|
||||
- 5928-fix-favicon-url.yml
|
||||
- 5933-linting.yml
|
||||
- 5934-fix-keycloak-sanitize_cr.yml
|
||||
- 5942-onepassword-ignore-errors-from-op-account-get.yml
|
||||
- 5943-zfs_delegate_admin-fix-zfs-allow-cannot-parse-unknown-uid-gid.yml
|
||||
- 5972-cloudflare-dns-srv-record.yml
|
||||
- 5985-add-new-gitlab-api-features.yml
|
||||
- 5994-github-webhook-secret.yml
|
||||
- 6.4.0.yml
|
||||
- 6011-osx-defaults-errors.yml
|
||||
- 6034-lxd-tls.yml
|
||||
- 6043-jc_plugin_parser_support.yml
|
||||
- 6045-xorder-discovery.yml
|
||||
- 6048-nmcli-addres-order.yml
|
||||
- 6074-loader_in_listify.yml.yml
|
||||
- 6100-jenkins_plugin.yml
|
||||
- remove-unneeded-imports.yml
|
||||
release_date: '2023-02-27'
|
||||
|
|
|
|||
|
|
@ -1,2 +0,0 @@
|
|||
minor_changes:
|
||||
- onepassword - support version 2 of the OnePassword CLI (https://github.com/ansible-collections/community.general/pull/4728)
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
bugfixes:
|
||||
- ldap_attrs - fix bug which caused a ``Bad search filter`` error. The error was occuring when the ldap attribute value contained special characters such as ``(`` or ``*`` (https://github.com/ansible-collections/community.general/issues/5434, https://github.com/ansible-collections/community.general/pull/5435).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
minor_changes:
|
||||
- ldap_attrs - allow for DNs to have ``{x}`` prefix on first RDN (https://github.com/ansible-collections/community.general/issues/977, https://github.com/ansible-collections/community.general/pull/5450).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
bugfixes:
|
||||
- iso_create - the module somtimes failed to add folders for Joliet and UDF formats (https://github.com/ansible-collections/community.general/issues/5275).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
bugfixes:
|
||||
- snap - allow values in the ``options`` parameter to contain whitespaces (https://github.com/ansible-collections/community.general/pull/5475).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
minor_changes:
|
||||
- ansible_galaxy_install - refactored module to use ``CmdRunner`` to execute ``ansible-galaxy`` (https://github.com/ansible-collections/community.general/pull/5477).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
minor_changes:
|
||||
- hponcfg - refactored module to use ``CmdRunner`` to execute ``hponcfg`` (https://github.com/ansible-collections/community.general/pull/5483).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
minor_changes:
|
||||
- mksysb - refactored module to use ``CmdRunner`` to execute ``mksysb`` (https://github.com/ansible-collections/community.general/pull/5484).
|
||||
|
|
@ -1,2 +0,0 @@
|
|||
minor_changes:
|
||||
- cpanm - refactored module to use ``CmdRunner`` to execute ``cpanm`` (https://github.com/ansible-collections/community.general/pull/5485).
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
release_summary: >-
|
||||
New major release of community.general with lots of bugfixes, new features, some removed deprecated features, and some other breaking changes.
|
||||
Please check the coresponding sections of the changelog for more details.
|
||||
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
namespace: community
|
||||
name: general
|
||||
version: 6.0.0
|
||||
version: 6.4.0
|
||||
readme: README.md
|
||||
authors:
|
||||
- Ansible (https://github.com/ansible)
|
||||
|
|
|
|||
2
plugins/cache/memcached.py
vendored
2
plugins/cache/memcached.py
vendored
|
|
@ -52,11 +52,9 @@ import time
|
|||
from multiprocessing import Lock
|
||||
from itertools import chain
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.common._collections_compat import MutableSet
|
||||
from ansible.plugins.cache import BaseCacheModule
|
||||
from ansible.release import __version__ as ansible_base_version
|
||||
from ansible.utils.display import Display
|
||||
|
||||
try:
|
||||
|
|
|
|||
2
plugins/cache/redis.py
vendored
2
plugins/cache/redis.py
vendored
|
|
@ -67,12 +67,10 @@ import re
|
|||
import time
|
||||
import json
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.parsing.ajson import AnsibleJSONEncoder, AnsibleJSONDecoder
|
||||
from ansible.plugins.cache import BaseCacheModule
|
||||
from ansible.release import __version__ as ansible_base_version
|
||||
from ansible.utils.display import Display
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -16,15 +16,15 @@ DOCUMENTATION = '''
|
|||
- cgroups
|
||||
short_description: Profiles maximum memory usage of tasks and full execution using cgroups
|
||||
description:
|
||||
- This is an ansible callback plugin that profiles maximum memory usage of ansible and individual tasks, and displays a recap at the end using cgroups
|
||||
- This is an ansible callback plugin that profiles maximum memory usage of ansible and individual tasks, and displays a recap at the end using cgroups.
|
||||
notes:
|
||||
- Requires ansible to be run from within a cgroup, such as with C(cgexec -g memory:ansible_profile ansible-playbook ...)
|
||||
- This cgroup should only be used by ansible to get accurate results
|
||||
- To create the cgroup, first use a command such as C(sudo cgcreate -a ec2-user:ec2-user -t ec2-user:ec2-user -g memory:ansible_profile)
|
||||
- Requires ansible to be run from within a cgroup, such as with C(cgexec -g memory:ansible_profile ansible-playbook ...).
|
||||
- This cgroup should only be used by ansible to get accurate results.
|
||||
- To create the cgroup, first use a command such as C(sudo cgcreate -a ec2-user:ec2-user -t ec2-user:ec2-user -g memory:ansible_profile).
|
||||
options:
|
||||
max_mem_file:
|
||||
required: true
|
||||
description: Path to cgroups C(memory.max_usage_in_bytes) file. Example C(/sys/fs/cgroup/memory/ansible_profile/memory.max_usage_in_bytes)
|
||||
description: Path to cgroups C(memory.max_usage_in_bytes) file. Example C(/sys/fs/cgroup/memory/ansible_profile/memory.max_usage_in_bytes).
|
||||
env:
|
||||
- name: CGROUP_MAX_MEM_FILE
|
||||
ini:
|
||||
|
|
@ -32,7 +32,7 @@ DOCUMENTATION = '''
|
|||
key: max_mem_file
|
||||
cur_mem_file:
|
||||
required: true
|
||||
description: Path to C(memory.usage_in_bytes) file. Example C(/sys/fs/cgroup/memory/ansible_profile/memory.usage_in_bytes)
|
||||
description: Path to C(memory.usage_in_bytes) file. Example C(/sys/fs/cgroup/memory/ansible_profile/memory.usage_in_bytes).
|
||||
env:
|
||||
- name: CGROUP_CUR_MEM_FILE
|
||||
ini:
|
||||
|
|
|
|||
|
|
@ -13,8 +13,8 @@ DOCUMENTATION = '''
|
|||
type: aggregate
|
||||
short_description: demo callback that adds play/task context
|
||||
description:
|
||||
- Displays some play and task context along with normal output
|
||||
- This is mostly for demo purposes
|
||||
- Displays some play and task context along with normal output.
|
||||
- This is mostly for demo purposes.
|
||||
requirements:
|
||||
- whitelist in configuration
|
||||
'''
|
||||
|
|
|
|||
|
|
@ -21,13 +21,12 @@ DOCUMENTATION = '''
|
|||
extends_documentation_fragment:
|
||||
- default_callback
|
||||
requirements:
|
||||
- set as stdout callback in ansible.cfg (stdout_callback = counter_enabled)
|
||||
- set as stdout callback in C(ansible.cfg) (C(stdout_callback = counter_enabled))
|
||||
'''
|
||||
|
||||
from ansible import constants as C
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
from ansible.utils.color import colorize, hostcolor
|
||||
from ansible.template import Templar
|
||||
from ansible.playbook.task_include import TaskInclude
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ short_description: minimal stdout output
|
|||
extends_documentation_fragment:
|
||||
- default_callback
|
||||
description:
|
||||
- When in verbose mode it will act the same as the default callback
|
||||
- When in verbose mode it will act the same as the default callback.
|
||||
author:
|
||||
- Dag Wieers (@dagwieers)
|
||||
requirements:
|
||||
|
|
|
|||
|
|
@ -786,10 +786,6 @@ playbook.yml: >
|
|||
|
||||
import sys
|
||||
from contextlib import contextmanager
|
||||
from ansible import constants as C
|
||||
from ansible.playbook.task_include import TaskInclude
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
from ansible.utils.color import colorize, hostcolor
|
||||
from ansible.template import Templar
|
||||
from ansible.vars.manager import VariableManager
|
||||
from ansible.plugins.callback.default import CallbackModule as Default
|
||||
|
|
|
|||
|
|
@ -13,10 +13,10 @@ DOCUMENTATION = '''
|
|||
type: notification
|
||||
short_description: post task events to a jabber server
|
||||
description:
|
||||
- The chatty part of ChatOps with a Hipchat server as a target
|
||||
- The chatty part of ChatOps with a Hipchat server as a target.
|
||||
- This callback plugin sends status updates to a HipChat channel during playbook execution.
|
||||
requirements:
|
||||
- xmpp (python lib https://github.com/ArchipelProject/xmpppy)
|
||||
- xmpp (Python library U(https://github.com/ArchipelProject/xmpppy))
|
||||
options:
|
||||
server:
|
||||
description: connection info to jabber server
|
||||
|
|
|
|||
|
|
@ -13,10 +13,10 @@ DOCUMENTATION = '''
|
|||
type: notification
|
||||
short_description: write playbook output to log file
|
||||
description:
|
||||
- This callback writes playbook output to a file per host in the C(/var/log/ansible/hosts) directory
|
||||
- This callback writes playbook output to a file per host in the C(/var/log/ansible/hosts) directory.
|
||||
requirements:
|
||||
- Whitelist in configuration
|
||||
- A writeable /var/log/ansible/hosts directory by the user executing Ansible on the controller
|
||||
- A writeable C(/var/log/ansible/hosts) directory by the user executing Ansible on the controller
|
||||
options:
|
||||
log_folder:
|
||||
default: /var/log/ansible/hosts
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
name: loganalytics
|
||||
type: aggregate
|
||||
type: notification
|
||||
short_description: Posts task results to Azure Log Analytics
|
||||
author: "Cyrus Li (@zhcli) <cyrus1006@gmail.com>"
|
||||
description:
|
||||
|
|
@ -54,7 +54,6 @@ examples: |
|
|||
import hashlib
|
||||
import hmac
|
||||
import base64
|
||||
import logging
|
||||
import json
|
||||
import uuid
|
||||
import socket
|
||||
|
|
@ -155,7 +154,7 @@ class AzureLogAnalyticsSource(object):
|
|||
|
||||
class CallbackModule(CallbackBase):
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'loganalytics'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
|
|
|||
|
|
@ -9,17 +9,17 @@ __metaclass__ = type
|
|||
DOCUMENTATION = '''
|
||||
author: Unknown (!UNKNOWN)
|
||||
name: logdna
|
||||
type: aggregate
|
||||
type: notification
|
||||
short_description: Sends playbook logs to LogDNA
|
||||
description:
|
||||
- This callback will report logs from playbook actions, tasks, and events to LogDNA (https://app.logdna.com)
|
||||
- This callback will report logs from playbook actions, tasks, and events to LogDNA (U(https://app.logdna.com)).
|
||||
requirements:
|
||||
- LogDNA Python Library (https://github.com/logdna/python)
|
||||
- LogDNA Python Library (U(https://github.com/logdna/python))
|
||||
- whitelisting in configuration
|
||||
options:
|
||||
conf_key:
|
||||
required: true
|
||||
description: LogDNA Ingestion Key
|
||||
description: LogDNA Ingestion Key.
|
||||
type: string
|
||||
env:
|
||||
- name: LOGDNA_INGESTION_KEY
|
||||
|
|
@ -28,7 +28,7 @@ DOCUMENTATION = '''
|
|||
key: conf_key
|
||||
plugin_ignore_errors:
|
||||
required: false
|
||||
description: Whether to ignore errors on failing or not
|
||||
description: Whether to ignore errors on failing or not.
|
||||
type: boolean
|
||||
env:
|
||||
- name: ANSIBLE_IGNORE_ERRORS
|
||||
|
|
@ -38,7 +38,7 @@ DOCUMENTATION = '''
|
|||
default: false
|
||||
conf_hostname:
|
||||
required: false
|
||||
description: Alternative Host Name; the current host name by default
|
||||
description: Alternative Host Name; the current host name by default.
|
||||
type: string
|
||||
env:
|
||||
- name: LOGDNA_HOSTNAME
|
||||
|
|
@ -47,7 +47,7 @@ DOCUMENTATION = '''
|
|||
key: conf_hostname
|
||||
conf_tags:
|
||||
required: false
|
||||
description: Tags
|
||||
description: Tags.
|
||||
type: string
|
||||
env:
|
||||
- name: LOGDNA_TAGS
|
||||
|
|
@ -111,7 +111,7 @@ def isJSONable(obj):
|
|||
class CallbackModule(CallbackBase):
|
||||
|
||||
CALLBACK_VERSION = 0.1
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.logdna'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
|
|
|||
|
|
@ -13,15 +13,15 @@ DOCUMENTATION = '''
|
|||
short_description: Sends events to Logentries
|
||||
description:
|
||||
- This callback plugin will generate JSON objects and send them to Logentries via TCP for auditing/debugging purposes.
|
||||
- Before 2.4, if you wanted to use an ini configuration, the file must be placed in the same directory as this plugin and named logentries.ini
|
||||
- Before 2.4, if you wanted to use an ini configuration, the file must be placed in the same directory as this plugin and named C(logentries.ini).
|
||||
- In 2.4 and above you can just put it in the main Ansible configuration file.
|
||||
requirements:
|
||||
- whitelisting in configuration
|
||||
- certifi (python library)
|
||||
- flatdict (python library), if you want to use the 'flatten' option
|
||||
- certifi (Python library)
|
||||
- flatdict (Python library), if you want to use the 'flatten' option
|
||||
options:
|
||||
api:
|
||||
description: URI to the Logentries API
|
||||
description: URI to the Logentries API.
|
||||
env:
|
||||
- name: LOGENTRIES_API
|
||||
default: data.logentries.com
|
||||
|
|
@ -29,7 +29,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_logentries
|
||||
key: api
|
||||
port:
|
||||
description: HTTP port to use when connecting to the API
|
||||
description: HTTP port to use when connecting to the API.
|
||||
env:
|
||||
- name: LOGENTRIES_PORT
|
||||
default: 80
|
||||
|
|
@ -37,7 +37,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_logentries
|
||||
key: port
|
||||
tls_port:
|
||||
description: Port to use when connecting to the API when TLS is enabled
|
||||
description: Port to use when connecting to the API when TLS is enabled.
|
||||
env:
|
||||
- name: LOGENTRIES_TLS_PORT
|
||||
default: 443
|
||||
|
|
@ -45,7 +45,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_logentries
|
||||
key: tls_port
|
||||
token:
|
||||
description: The logentries "TCP token"
|
||||
description: The logentries C(TCP token).
|
||||
env:
|
||||
- name: LOGENTRIES_ANSIBLE_TOKEN
|
||||
required: true
|
||||
|
|
@ -54,7 +54,7 @@ DOCUMENTATION = '''
|
|||
key: token
|
||||
use_tls:
|
||||
description:
|
||||
- Toggle to decide whether to use TLS to encrypt the communications with the API server
|
||||
- Toggle to decide whether to use TLS to encrypt the communications with the API server.
|
||||
env:
|
||||
- name: LOGENTRIES_USE_TLS
|
||||
default: false
|
||||
|
|
@ -63,7 +63,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_logentries
|
||||
key: use_tls
|
||||
flatten:
|
||||
description: flatten complex data structures into a single dictionary with complex keys
|
||||
description: Flatten complex data structures into a single dictionary with complex keys.
|
||||
type: boolean
|
||||
default: false
|
||||
env:
|
||||
|
|
|
|||
|
|
@ -13,13 +13,13 @@ DOCUMENTATION = r'''
|
|||
type: notification
|
||||
short_description: Sends events to Logstash
|
||||
description:
|
||||
- This callback will report facts and task events to Logstash https://www.elastic.co/products/logstash
|
||||
- This callback will report facts and task events to Logstash U(https://www.elastic.co/products/logstash).
|
||||
requirements:
|
||||
- whitelisting in configuration
|
||||
- logstash (python library)
|
||||
- logstash (Python library)
|
||||
options:
|
||||
server:
|
||||
description: Address of the Logstash server
|
||||
description: Address of the Logstash server.
|
||||
env:
|
||||
- name: LOGSTASH_SERVER
|
||||
ini:
|
||||
|
|
@ -28,7 +28,7 @@ DOCUMENTATION = r'''
|
|||
version_added: 1.0.0
|
||||
default: localhost
|
||||
port:
|
||||
description: Port on which logstash is listening
|
||||
description: Port on which logstash is listening.
|
||||
env:
|
||||
- name: LOGSTASH_PORT
|
||||
ini:
|
||||
|
|
@ -37,7 +37,7 @@ DOCUMENTATION = r'''
|
|||
version_added: 1.0.0
|
||||
default: 5000
|
||||
type:
|
||||
description: Message type
|
||||
description: Message type.
|
||||
env:
|
||||
- name: LOGSTASH_TYPE
|
||||
ini:
|
||||
|
|
@ -54,7 +54,7 @@ DOCUMENTATION = r'''
|
|||
env:
|
||||
- name: LOGSTASH_PRE_COMMAND
|
||||
format_version:
|
||||
description: Logging format
|
||||
description: Logging format.
|
||||
type: str
|
||||
version_added: 2.0.0
|
||||
ini:
|
||||
|
|
@ -113,7 +113,7 @@ from ansible.plugins.callback import CallbackBase
|
|||
class CallbackModule(CallbackBase):
|
||||
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.logstash'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
|
|
|||
|
|
@ -79,7 +79,6 @@ import re
|
|||
import email.utils
|
||||
import smtplib
|
||||
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.common.text.converters import to_bytes
|
||||
from ansible.parsing.ajson import AnsibleJSONEncoder
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
|
|
|
|||
|
|
@ -67,9 +67,6 @@ DOCUMENTATION = '''
|
|||
type: string
|
||||
'''
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
from ansible.module_utils.common.text.converters import to_bytes
|
||||
from ansible.module_utils.urls import open_url
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ DOCUMENTATION = '''
|
|||
- set as main display callback
|
||||
short_description: Don't display stuff to screen
|
||||
description:
|
||||
- This callback prevents outputing events to screen
|
||||
- This callback prevents outputing events to screen.
|
||||
'''
|
||||
|
||||
from ansible.plugins.callback import CallbackBase
|
||||
|
|
|
|||
|
|
@ -14,12 +14,12 @@ DOCUMENTATION = '''
|
|||
type: notification
|
||||
requirements:
|
||||
- whitelisting in configuration
|
||||
- the '/usr/bin/say' command line program (standard on macOS) or 'espeak' command line program
|
||||
- the C(/usr/bin/say) command line program (standard on macOS) or C(espeak) command line program
|
||||
short_description: notify using software speech synthesizer
|
||||
description:
|
||||
- This plugin will use the 'say' or 'espeak' program to "speak" about play events.
|
||||
- This plugin will use the C(say) or C(espeak) program to "speak" about play events.
|
||||
notes:
|
||||
- In 2.8, this callback has been renamed from C(osx_say) into M(community.general.say).
|
||||
- In Ansible 2.8, this callback has been renamed from C(osx_say) into M(community.general.say).
|
||||
'''
|
||||
|
||||
import platform
|
||||
|
|
|
|||
|
|
@ -22,7 +22,7 @@ DOCUMENTATION = '''
|
|||
options:
|
||||
nocolor:
|
||||
default: false
|
||||
description: This setting allows suppressing colorizing output
|
||||
description: This setting allows suppressing colorizing output.
|
||||
env:
|
||||
- name: ANSIBLE_NOCOLOR
|
||||
- name: ANSIBLE_SELECTIVE_DONT_COLORIZE
|
||||
|
|
|
|||
|
|
@ -18,11 +18,11 @@ DOCUMENTATION = '''
|
|||
short_description: Sends play events to a Slack channel
|
||||
description:
|
||||
- This is an ansible callback plugin that sends status updates to a Slack channel during playbook execution.
|
||||
- Before 2.4 only environment variables were available for configuring this plugin
|
||||
- Before Ansible 2.4 only environment variables were available for configuring this plugin.
|
||||
options:
|
||||
webhook_url:
|
||||
required: true
|
||||
description: Slack Webhook URL
|
||||
description: Slack Webhook URL.
|
||||
env:
|
||||
- name: SLACK_WEBHOOK_URL
|
||||
ini:
|
||||
|
|
@ -45,7 +45,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_slack
|
||||
key: username
|
||||
validate_certs:
|
||||
description: validate the SSL certificate of the Slack server. (For HTTPS URLs)
|
||||
description: Validate the SSL certificate of the Slack server for HTTPS URLs.
|
||||
env:
|
||||
- name: SLACK_VALIDATE_CERTS
|
||||
ini:
|
||||
|
|
|
|||
|
|
@ -8,27 +8,27 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
name: splunk
|
||||
type: aggregate
|
||||
type: notification
|
||||
short_description: Sends task result events to Splunk HTTP Event Collector
|
||||
author: "Stuart Hirst (!UNKNOWN) <support@convergingdata.com>"
|
||||
description:
|
||||
- This callback plugin will send task results as JSON formatted events to a Splunk HTTP collector.
|
||||
- The companion Splunk Monitoring & Diagnostics App is available here "https://splunkbase.splunk.com/app/4023/"
|
||||
- The companion Splunk Monitoring & Diagnostics App is available here U(https://splunkbase.splunk.com/app/4023/).
|
||||
- Credit to "Ryan Currah (@ryancurrah)" for original source upon which this is based.
|
||||
requirements:
|
||||
- Whitelisting this callback plugin
|
||||
- 'Create a HTTP Event Collector in Splunk'
|
||||
- 'Define the url and token in ansible.cfg'
|
||||
- 'Define the URL and token in C(ansible.cfg)'
|
||||
options:
|
||||
url:
|
||||
description: URL to the Splunk HTTP collector source
|
||||
description: URL to the Splunk HTTP collector source.
|
||||
env:
|
||||
- name: SPLUNK_URL
|
||||
ini:
|
||||
- section: callback_splunk
|
||||
key: url
|
||||
authtoken:
|
||||
description: Token to authenticate the connection to the Splunk HTTP collector
|
||||
description: Token to authenticate the connection to the Splunk HTTP collector.
|
||||
env:
|
||||
- name: SPLUNK_AUTHTOKEN
|
||||
ini:
|
||||
|
|
@ -48,7 +48,7 @@ DOCUMENTATION = '''
|
|||
version_added: '1.0.0'
|
||||
include_milliseconds:
|
||||
description: Whether to include milliseconds as part of the generated timestamp field in the event
|
||||
sent to the Splunk HTTP collector
|
||||
sent to the Splunk HTTP collector.
|
||||
env:
|
||||
- name: SPLUNK_INCLUDE_MILLISECONDS
|
||||
ini:
|
||||
|
|
@ -165,7 +165,7 @@ class SplunkHTTPCollectorSource(object):
|
|||
|
||||
class CallbackModule(CallbackBase):
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.splunk'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
|
|
|||
|
|
@ -8,18 +8,18 @@ __metaclass__ = type
|
|||
|
||||
DOCUMENTATION = '''
|
||||
name: sumologic
|
||||
type: aggregate
|
||||
type: notification
|
||||
short_description: Sends task result events to Sumologic
|
||||
author: "Ryan Currah (@ryancurrah)"
|
||||
description:
|
||||
- This callback plugin will send task results as JSON formatted events to a Sumologic HTTP collector source
|
||||
- This callback plugin will send task results as JSON formatted events to a Sumologic HTTP collector source.
|
||||
requirements:
|
||||
- Whitelisting this callback plugin
|
||||
- 'Create a HTTP collector source in Sumologic and specify a custom timestamp format of C(yyyy-MM-dd HH:mm:ss ZZZZ) and a custom timestamp locator
|
||||
of C("timestamp": "(.*)")'
|
||||
options:
|
||||
url:
|
||||
description: URL to the Sumologic HTTP collector source
|
||||
description: URL to the Sumologic HTTP collector source.
|
||||
env:
|
||||
- name: SUMOLOGIC_URL
|
||||
ini:
|
||||
|
|
@ -28,7 +28,7 @@ options:
|
|||
'''
|
||||
|
||||
EXAMPLES = '''
|
||||
examples: >
|
||||
examples: |
|
||||
To enable, add this to your ansible.cfg file in the defaults block
|
||||
[defaults]
|
||||
callback_whitelist = community.general.sumologic
|
||||
|
|
@ -111,7 +111,7 @@ class SumologicHTTPCollectorSource(object):
|
|||
|
||||
class CallbackModule(CallbackBase):
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.sumologic'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
|
|
|||
|
|
@ -15,11 +15,11 @@ DOCUMENTATION = '''
|
|||
- whitelist in configuration
|
||||
short_description: sends JSON events to syslog
|
||||
description:
|
||||
- This plugin logs ansible-playbook and ansible runs to a syslog server in JSON format
|
||||
- Before Ansible 2.9 only environment variables were available for configuration
|
||||
- This plugin logs ansible-playbook and ansible runs to a syslog server in JSON format.
|
||||
- Before Ansible 2.9 only environment variables were available for configuration.
|
||||
options:
|
||||
server:
|
||||
description: syslog server that will receive the event
|
||||
description: Syslog server that will receive the event.
|
||||
env:
|
||||
- name: SYSLOG_SERVER
|
||||
default: localhost
|
||||
|
|
@ -27,7 +27,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_syslog_json
|
||||
key: syslog_server
|
||||
port:
|
||||
description: port on which the syslog server is listening
|
||||
description: Port on which the syslog server is listening.
|
||||
env:
|
||||
- name: SYSLOG_PORT
|
||||
default: 514
|
||||
|
|
@ -35,7 +35,7 @@ DOCUMENTATION = '''
|
|||
- section: callback_syslog_json
|
||||
key: syslog_port
|
||||
facility:
|
||||
description: syslog facility to log as
|
||||
description: Syslog facility to log as.
|
||||
env:
|
||||
- name: SYSLOG_FACILITY
|
||||
default: user
|
||||
|
|
@ -54,9 +54,6 @@ DOCUMENTATION = '''
|
|||
version_added: 4.5.0
|
||||
'''
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
import logging
|
||||
import logging.handlers
|
||||
|
||||
|
|
@ -71,7 +68,7 @@ class CallbackModule(CallbackBase):
|
|||
"""
|
||||
|
||||
CALLBACK_VERSION = 2.0
|
||||
CALLBACK_TYPE = 'aggregate'
|
||||
CALLBACK_TYPE = 'notification'
|
||||
CALLBACK_NAME = 'community.general.syslog_json'
|
||||
CALLBACK_NEEDS_WHITELIST = True
|
||||
|
||||
|
|
|
|||
|
|
@ -63,7 +63,7 @@ class CallbackModule(CallbackModule_default):
|
|||
|
||||
def _preprocess_result(self, result):
|
||||
self.delegated_vars = result._result.get('_ansible_delegated_vars', None)
|
||||
self._handle_exception(result._result, use_stderr=self.display_failed_stderr)
|
||||
self._handle_exception(result._result, use_stderr=self.get_option('display_failed_stderr'))
|
||||
self._handle_warnings(result._result)
|
||||
|
||||
def _process_result_output(self, result, msg):
|
||||
|
|
@ -109,7 +109,7 @@ class CallbackModule(CallbackModule_default):
|
|||
self._display.display(msg)
|
||||
|
||||
def v2_runner_on_skipped(self, result, ignore_errors=False):
|
||||
if self.display_skipped_hosts:
|
||||
if self.get_option('display_skipped_hosts'):
|
||||
self._preprocess_result(result)
|
||||
display_color = C.COLOR_SKIP
|
||||
msg = "skipped"
|
||||
|
|
@ -128,7 +128,7 @@ class CallbackModule(CallbackModule_default):
|
|||
msg += " | item: %s" % (item_value,)
|
||||
|
||||
task_result = self._process_result_output(result, msg)
|
||||
self._display.display(" " + task_result, display_color, stderr=self.display_failed_stderr)
|
||||
self._display.display(" " + task_result, display_color, stderr=self.get_option('display_failed_stderr'))
|
||||
|
||||
def v2_runner_on_ok(self, result, msg="ok", display_color=C.COLOR_OK):
|
||||
self._preprocess_result(result)
|
||||
|
|
@ -142,7 +142,7 @@ class CallbackModule(CallbackModule_default):
|
|||
display_color = C.COLOR_CHANGED
|
||||
task_result = self._process_result_output(result, msg)
|
||||
self._display.display(" " + task_result, display_color)
|
||||
elif self.display_ok_hosts:
|
||||
elif self.get_option('display_ok_hosts'):
|
||||
task_result = self._process_result_output(result, msg)
|
||||
self._display.display(" " + task_result, display_color)
|
||||
|
||||
|
|
@ -162,7 +162,7 @@ class CallbackModule(CallbackModule_default):
|
|||
display_color = C.COLOR_UNREACHABLE
|
||||
task_result = self._process_result_output(result, msg)
|
||||
|
||||
self._display.display(" " + task_result, display_color, stderr=self.display_failed_stderr)
|
||||
self._display.display(" " + task_result, display_color, stderr=self.get_option('display_failed_stderr'))
|
||||
|
||||
def v2_on_file_diff(self, result):
|
||||
if result._task.loop and 'results' in result._result:
|
||||
|
|
@ -205,7 +205,7 @@ class CallbackModule(CallbackModule_default):
|
|||
colorize(u'ignored', t['ignored'], None)),
|
||||
log_only=True
|
||||
)
|
||||
if stats.custom and self.show_custom_stats:
|
||||
if stats.custom and self.get_option('show_custom_stats'):
|
||||
self._display.banner("CUSTOM STATS: ")
|
||||
# per host
|
||||
# TODO: come up with 'pretty format'
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ DOCUMENTATION = '''
|
|||
author: Unknown (!UNKNOWN)
|
||||
name: yaml
|
||||
type: stdout
|
||||
short_description: yaml-ized Ansible screen output
|
||||
short_description: YAML-ized Ansible screen output
|
||||
description:
|
||||
- Ansible output that can be quite a bit easier to read than the
|
||||
default JSON formatting.
|
||||
|
|
@ -25,12 +25,10 @@ import yaml
|
|||
import json
|
||||
import re
|
||||
import string
|
||||
import sys
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.parsing.yaml.dumper import AnsibleDumper
|
||||
from ansible.plugins.callback import CallbackBase, strip_internal_keys, module_response_deepcopy
|
||||
from ansible.plugins.callback import strip_internal_keys, module_response_deepcopy
|
||||
from ansible.plugins.callback.default import CallbackModule as Default
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -22,6 +22,7 @@ DOCUMENTATION = '''
|
|||
- The path of the chroot you want to access.
|
||||
default: inventory_hostname
|
||||
vars:
|
||||
- name: inventory_hostname
|
||||
- name: ansible_host
|
||||
executable:
|
||||
description:
|
||||
|
|
|
|||
|
|
@ -20,9 +20,13 @@ attributes:
|
|||
description: Will return details on what has changed (or possibly needs changing in C(check_mode)), when in diff mode.
|
||||
'''
|
||||
|
||||
# platform:
|
||||
# description: Target OS/families that can be operated against.
|
||||
# support: N/A
|
||||
PLATFORM = r'''
|
||||
options: {}
|
||||
attributes:
|
||||
platform:
|
||||
description: Target OS/families that can be operated against.
|
||||
support: N/A
|
||||
'''
|
||||
|
||||
# Should be used together with the standard fragment
|
||||
INFO_MODULE = r'''
|
||||
|
|
|
|||
|
|
@ -29,8 +29,7 @@ options:
|
|||
required: true
|
||||
|
||||
requirements:
|
||||
- hpe3par_sdk >= 1.0.2. Install using 'pip install hpe3par_sdk'
|
||||
- hpe3par_sdk >= 1.0.2. Install using C(pip install hpe3par_sdk).
|
||||
- WSAPI service should be enabled on the 3PAR storage array.
|
||||
notes:
|
||||
- check_mode not supported
|
||||
'''
|
||||
|
|
|
|||
|
|
@ -60,9 +60,20 @@ options:
|
|||
sasl_class:
|
||||
description:
|
||||
- The class to use for SASL authentication.
|
||||
- possible choices are C(external), C(gssapi).
|
||||
- Possible choices are C(external), C(gssapi).
|
||||
type: str
|
||||
choices: ['external', 'gssapi']
|
||||
default: external
|
||||
version_added: "2.0.0"
|
||||
xorder_discovery:
|
||||
description:
|
||||
- Set the behavior on how to process Xordered DNs.
|
||||
- C(enable) will perform a C(ONELEVEL) search below the superior RDN to find the matching DN.
|
||||
- C(disable) will always use the DN unmodified (as passed by the I(dn) parameter).
|
||||
- C(auto) will only perform a search if the first RDN does not contain an index number (C({x})).
|
||||
- Possible choices are C(enable), C(auto), C(disable).
|
||||
type: str
|
||||
choices: ['enable', 'auto', 'disable']
|
||||
default: auto
|
||||
version_added: "6.4.0"
|
||||
'''
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@ DOCUMENTATION = '''
|
|||
description:
|
||||
- The correct parser for the input data.
|
||||
- For example C(ifconfig).
|
||||
- "Note: use underscores instead of dashes (if any) in the parser module name."
|
||||
- See U(https://github.com/kellyjonbrazil/jc#parsers) for the latest list of parsers.
|
||||
type: string
|
||||
required: true
|
||||
|
|
@ -79,13 +80,13 @@ from ansible.errors import AnsibleError, AnsibleFilterError
|
|||
import importlib
|
||||
|
||||
try:
|
||||
import jc
|
||||
import jc # noqa: F401, pylint: disable=unused-import
|
||||
HAS_LIB = True
|
||||
except ImportError:
|
||||
HAS_LIB = False
|
||||
|
||||
|
||||
def jc(data, parser, quiet=True, raw=False):
|
||||
def jc_filter(data, parser, quiet=True, raw=False):
|
||||
"""Convert returned command output to JSON using the JC library
|
||||
|
||||
Arguments:
|
||||
|
|
@ -137,8 +138,14 @@ def jc(data, parser, quiet=True, raw=False):
|
|||
raise AnsibleError('You need to install "jc" as a Python library on the Ansible controller prior to running jc filter')
|
||||
|
||||
try:
|
||||
jc_parser = importlib.import_module('jc.parsers.' + parser)
|
||||
return jc_parser.parse(data, quiet=quiet, raw=raw)
|
||||
# new API (jc v1.18.0 and higher) allows use of plugin parsers
|
||||
if hasattr(jc, 'parse'):
|
||||
return jc.parse(parser, data, quiet=quiet, raw=raw)
|
||||
|
||||
# old API (jc v1.17.7 and lower)
|
||||
else:
|
||||
jc_parser = importlib.import_module('jc.parsers.' + parser)
|
||||
return jc_parser.parse(data, quiet=quiet, raw=raw)
|
||||
|
||||
except Exception as e:
|
||||
raise AnsibleFilterError('Error in jc filter plugin: %s' % e)
|
||||
|
|
@ -149,5 +156,5 @@ class FilterModule(object):
|
|||
|
||||
def filters(self):
|
||||
return {
|
||||
'jc': jc
|
||||
'jc': jc_filter,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -102,8 +102,6 @@ from ansible.errors import AnsibleFilterError
|
|||
from ansible.module_utils.six import string_types
|
||||
from ansible.module_utils.common._collections_compat import Mapping, Sequence
|
||||
from ansible.utils.vars import merge_hash
|
||||
from ansible.release import __version__ as ansible_version
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
|
||||
from collections import defaultdict
|
||||
from operator import itemgetter
|
||||
|
|
|
|||
|
|
@ -121,10 +121,7 @@ compose:
|
|||
ansible_host: "ipv4 | community.general.json_query('[?public==`false`].address') | first"
|
||||
'''
|
||||
|
||||
import os
|
||||
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -55,6 +55,11 @@ DOCUMENTATION = r'''
|
|||
type: str
|
||||
default: none
|
||||
choices: [ 'STOPPED', 'STARTING', 'RUNNING', 'none' ]
|
||||
project:
|
||||
description: Filter the instance according to the given project.
|
||||
type: str
|
||||
default: default
|
||||
version_added: 6.2.0
|
||||
type_filter:
|
||||
description:
|
||||
- Filter the instances by type C(virtual-machine), C(container) or C(both).
|
||||
|
|
@ -140,19 +145,21 @@ groupby:
|
|||
vlan666:
|
||||
type: vlanid
|
||||
attribute: 666
|
||||
projectInternals:
|
||||
type: project
|
||||
attribute: internals
|
||||
'''
|
||||
|
||||
import binascii
|
||||
import json
|
||||
import re
|
||||
import time
|
||||
import os
|
||||
import socket
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin
|
||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||
from ansible.module_utils.common.dict_transformations import dict_merge
|
||||
from ansible.module_utils.six import raise_from
|
||||
from ansible.errors import AnsibleError, AnsibleParserError
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlencode
|
||||
from ansible_collections.community.general.plugins.module_utils.lxd import LXDClient, LXDClientException
|
||||
|
||||
try:
|
||||
|
|
@ -330,7 +337,15 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
# "status_code": 200,
|
||||
# "type": "sync"
|
||||
# }
|
||||
instances = self.socket.do('GET', '/1.0/instances')
|
||||
url = '/1.0/instances'
|
||||
if self.project:
|
||||
url = url + '?{0}'.format(urlencode(dict(project=self.project)))
|
||||
|
||||
instances = self.socket.do('GET', url)
|
||||
|
||||
if self.project:
|
||||
return [m.split('/')[3].split('?')[0] for m in instances['metadata']]
|
||||
|
||||
return [m.split('/')[3] for m in instances['metadata']]
|
||||
|
||||
def _get_config(self, branch, name):
|
||||
|
|
@ -351,9 +366,11 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
dict(config): Config of the instance"""
|
||||
config = {}
|
||||
if isinstance(branch, (tuple, list)):
|
||||
config[name] = {branch[1]: self.socket.do('GET', '/1.0/{0}/{1}/{2}'.format(to_native(branch[0]), to_native(name), to_native(branch[1])))}
|
||||
config[name] = {branch[1]: self.socket.do(
|
||||
'GET', '/1.0/{0}/{1}/{2}?{3}'.format(to_native(branch[0]), to_native(name), to_native(branch[1]), urlencode(dict(project=self.project))))}
|
||||
else:
|
||||
config[name] = {branch: self.socket.do('GET', '/1.0/{0}/{1}'.format(to_native(branch), to_native(name)))}
|
||||
config[name] = {branch: self.socket.do(
|
||||
'GET', '/1.0/{0}/{1}?{2}'.format(to_native(branch), to_native(name), urlencode(dict(project=self.project))))}
|
||||
return config
|
||||
|
||||
def get_instance_data(self, names):
|
||||
|
|
@ -583,6 +600,8 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
self._set_data_entry(instance_name, 'network_interfaces', self.extract_network_information_from_instance_config(instance_name))
|
||||
self._set_data_entry(instance_name, 'preferred_interface', self.get_prefered_instance_network_interface(instance_name))
|
||||
self._set_data_entry(instance_name, 'vlan_ids', self.get_instance_vlans(instance_name))
|
||||
self._set_data_entry(instance_name, 'project', self._get_data_entry(
|
||||
'instances/{0}/instances/metadata/project'.format(instance_name)))
|
||||
|
||||
def build_inventory_network(self, instance_name):
|
||||
"""Add the network interfaces of the instance to the inventory
|
||||
|
|
@ -686,6 +705,8 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
# add VLAN_ID information
|
||||
if self._get_data_entry('inventory/{0}/vlan_ids'.format(instance_name)):
|
||||
self.inventory.set_variable(instance_name, 'ansible_lxd_vlan_ids', self._get_data_entry('inventory/{0}/vlan_ids'.format(instance_name)))
|
||||
# add project
|
||||
self.inventory.set_variable(instance_name, 'ansible_lxd_project', self._get_data_entry('inventory/{0}/project'.format(instance_name)))
|
||||
|
||||
def build_inventory_groups_location(self, group_name):
|
||||
"""create group by attribute: location
|
||||
|
|
@ -761,6 +782,28 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
# Ignore invalid IP addresses returned by lxd
|
||||
pass
|
||||
|
||||
def build_inventory_groups_project(self, group_name):
|
||||
"""create group by attribute: project
|
||||
|
||||
Args:
|
||||
str(group_name): Group name
|
||||
Kwargs:
|
||||
None
|
||||
Raises:
|
||||
None
|
||||
Returns:
|
||||
None"""
|
||||
# maybe we just want to expand one group
|
||||
if group_name not in self.inventory.groups:
|
||||
self.inventory.add_group(group_name)
|
||||
|
||||
gen_instances = [
|
||||
instance_name for instance_name in self.inventory.hosts
|
||||
if 'ansible_lxd_project' in self.inventory.get_host(instance_name).get_vars()]
|
||||
for instance_name in gen_instances:
|
||||
if self.groupby[group_name].get('attribute').lower() == self.inventory.get_host(instance_name).get_vars().get('ansible_lxd_project'):
|
||||
self.inventory.add_child(group_name, instance_name)
|
||||
|
||||
def build_inventory_groups_os(self, group_name):
|
||||
"""create group by attribute: os
|
||||
|
||||
|
|
@ -899,6 +942,7 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
* 'profile'
|
||||
* 'vlanid'
|
||||
* 'type'
|
||||
* 'project'
|
||||
|
||||
Args:
|
||||
str(group_name): Group name
|
||||
|
|
@ -926,6 +970,8 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
self.build_inventory_groups_vlanid(group_name)
|
||||
elif self.groupby[group_name].get('type') == 'type':
|
||||
self.build_inventory_groups_type(group_name)
|
||||
elif self.groupby[group_name].get('type') == 'project':
|
||||
self.build_inventory_groups_project(group_name)
|
||||
else:
|
||||
raise AnsibleParserError('Unknown group type: {0}'.format(to_native(group_name)))
|
||||
|
||||
|
|
@ -1032,6 +1078,7 @@ class InventoryModule(BaseInventoryPlugin):
|
|||
try:
|
||||
self.client_key = self.get_option('client_key')
|
||||
self.client_cert = self.get_option('client_cert')
|
||||
self.project = self.get_option('project')
|
||||
self.debug = self.DEBUG
|
||||
self.data = {} # store for inventory-data
|
||||
self.groupby = self.get_option('groupby')
|
||||
|
|
|
|||
|
|
@ -46,6 +46,25 @@ DOCUMENTATION = '''
|
|||
description: use IPv6 type addresses
|
||||
type: boolean
|
||||
default: true
|
||||
udp_scan:
|
||||
description:
|
||||
- Scan via UDP.
|
||||
- Depending on your system you might need I(sudo=true) for this to work.
|
||||
type: boolean
|
||||
default: false
|
||||
version_added: 6.1.0
|
||||
icmp_timestamp:
|
||||
description:
|
||||
- Scan via ICMP Timestamp (C(-PP)).
|
||||
- Depending on your system you might need I(sudo=true) for this to work.
|
||||
type: boolean
|
||||
default: false
|
||||
version_added: 6.1.0
|
||||
dns_resolve:
|
||||
description: Whether to always (C(true)) or never (C(false)) do DNS resolution.
|
||||
type: boolean
|
||||
default: false
|
||||
version_added: 6.1.0
|
||||
notes:
|
||||
- At least one of ipv4 or ipv6 is required to be True, both can be True, but they cannot both be False.
|
||||
- 'TODO: add OS fingerprinting'
|
||||
|
|
@ -166,6 +185,15 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||
cmd.append('--exclude')
|
||||
cmd.append(','.join(self._options['exclude']))
|
||||
|
||||
if self._options['dns_resolve']:
|
||||
cmd.append('-n')
|
||||
|
||||
if self._options['udp_scan']:
|
||||
cmd.append('-sU')
|
||||
|
||||
if self._options['icmp_timestamp']:
|
||||
cmd.append('-PP')
|
||||
|
||||
cmd.append(self._options['address'])
|
||||
try:
|
||||
# execute
|
||||
|
|
|
|||
|
|
@ -65,7 +65,7 @@ from sys import version as python_version
|
|||
from ansible.errors import AnsibleError
|
||||
from ansible.module_utils.urls import open_url
|
||||
from ansible.plugins.inventory import BaseInventoryPlugin
|
||||
from ansible.module_utils.common.text.converters import to_native, to_text
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.module_utils.ansible_release import __version__ as ansible_version
|
||||
from ansible.module_utils.six.moves.urllib.parse import urljoin
|
||||
|
||||
|
|
|
|||
|
|
@ -277,6 +277,11 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||
credentials = urlencode({'username': self.proxmox_user, 'password': self.proxmox_password, })
|
||||
|
||||
a = self._get_session()
|
||||
|
||||
if a.verify is False:
|
||||
from requests.packages.urllib3 import disable_warnings
|
||||
disable_warnings()
|
||||
|
||||
ret = a.post('%s/api2/json/access/ticket' % self.proxmox_url, data=credentials)
|
||||
|
||||
json = ret.json()
|
||||
|
|
@ -408,7 +413,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||
stripped_value = value.strip()
|
||||
if stripped_value:
|
||||
parsed_key = key + "_parsed"
|
||||
properties[parsed_key] = [tag.strip() for tag in stripped_value.split(",")]
|
||||
properties[parsed_key] = [tag.strip() for tag in stripped_value.replace(',', ';').split(";")]
|
||||
|
||||
# The first field in the agent string tells you whether the agent is enabled
|
||||
# the rest of the comma separated string is extra config for the agent.
|
||||
|
|
@ -615,7 +620,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
|
|||
for o in ('url', 'user', 'password', 'token_id', 'token_secret'):
|
||||
v = self.get_option(o)
|
||||
if self.templar.is_template(v):
|
||||
v = self.templar.template(v, disable_looups=False)
|
||||
v = self.templar.template(v, disable_lookups=False)
|
||||
setattr(self, 'proxmox_%s' % o, v)
|
||||
|
||||
# some more cleanup and validation
|
||||
|
|
|
|||
|
|
@ -28,8 +28,12 @@ DOCUMENTATION = """
|
|||
default: name
|
||||
version_added: 5.7.0
|
||||
field:
|
||||
description: Field to fetch; leave unset to fetch whole response.
|
||||
description: Field to fetch. Leave unset to fetch whole response.
|
||||
type: str
|
||||
collection_id:
|
||||
description: Collection ID to filter results by collection. Leave unset to skip filtering.
|
||||
type: str
|
||||
version_added: 6.3.0
|
||||
"""
|
||||
|
||||
EXAMPLES = """
|
||||
|
|
@ -43,10 +47,20 @@ EXAMPLES = """
|
|||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'bafba515-af11-47e6-abe3-af1200cd18b2', search='id', field='password') }}
|
||||
|
||||
- name: "Get 'password' from Bitwarden record named 'a_test' from collection"
|
||||
ansible.builtin.debug:
|
||||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'a_test', field='password', collection_id='bafba515-af11-47e6-abe3-af1200cd18b2') }}
|
||||
|
||||
- name: "Get full Bitwarden record named 'a_test'"
|
||||
ansible.builtin.debug:
|
||||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'a_test') }}
|
||||
|
||||
- name: "Get custom field 'api_key' from Bitwarden record named 'a_test'"
|
||||
ansible.builtin.debug:
|
||||
msg: >-
|
||||
{{ lookup('community.general.bitwarden', 'a_test', field='api_key') }}
|
||||
"""
|
||||
|
||||
RETURN = """
|
||||
|
|
@ -78,7 +92,7 @@ class Bitwarden(object):
|
|||
return self._cli_path
|
||||
|
||||
@property
|
||||
def logged_in(self):
|
||||
def unlocked(self):
|
||||
out, err = self._run(['status'], stdin="")
|
||||
decoded = AnsibleJSONDecoder().raw_decode(out)[0]
|
||||
return decoded['status'] == 'unlocked'
|
||||
|
|
@ -91,10 +105,17 @@ class Bitwarden(object):
|
|||
raise BitwardenException(err)
|
||||
return to_text(out, errors='surrogate_or_strict'), to_text(err, errors='surrogate_or_strict')
|
||||
|
||||
def _get_matches(self, search_value, search_field):
|
||||
def _get_matches(self, search_value, search_field, collection_id):
|
||||
"""Return matching records whose search_field is equal to key.
|
||||
"""
|
||||
out, err = self._run(['list', 'items', '--search', search_value])
|
||||
|
||||
# Prepare set of params for Bitwarden CLI
|
||||
params = ['list', 'items', '--search', search_value]
|
||||
|
||||
if collection_id:
|
||||
params.extend(['--collectionid', collection_id])
|
||||
|
||||
out, err = self._run(params)
|
||||
|
||||
# This includes things that matched in different fields.
|
||||
initial_matches = AnsibleJSONDecoder().raw_decode(out)[0]
|
||||
|
|
@ -102,17 +123,27 @@ class Bitwarden(object):
|
|||
# Filter to only include results from the right field.
|
||||
return [item for item in initial_matches if item[search_field] == search_value]
|
||||
|
||||
def get_field(self, field, search_value, search_field="name"):
|
||||
"""Return a list of the specified field for records whose search_field match search_value.
|
||||
def get_field(self, field, search_value, search_field="name", collection_id=None):
|
||||
"""Return a list of the specified field for records whose search_field match search_value
|
||||
and filtered by collection if collection has been provided.
|
||||
|
||||
If field is None, return the whole record for each match.
|
||||
"""
|
||||
matches = self._get_matches(search_value, search_field)
|
||||
matches = self._get_matches(search_value, search_field, collection_id)
|
||||
|
||||
if field:
|
||||
if field in ['autofillOnPageLoad', 'password', 'passwordRevisionDate', 'totp', 'uris', 'username']:
|
||||
return [match['login'][field] for match in matches]
|
||||
|
||||
return matches
|
||||
elif not field:
|
||||
return matches
|
||||
else:
|
||||
custom_field_matches = []
|
||||
for match in matches:
|
||||
for custom_field in match['fields']:
|
||||
if custom_field['name'] == field:
|
||||
custom_field_matches.append(custom_field['value'])
|
||||
if matches and not custom_field_matches:
|
||||
raise AnsibleError("Custom field {field} does not exist in {search_value}".format(field=field, search_value=search_value))
|
||||
return custom_field_matches
|
||||
|
||||
|
||||
class LookupModule(LookupBase):
|
||||
|
|
@ -121,10 +152,11 @@ class LookupModule(LookupBase):
|
|||
self.set_options(var_options=variables, direct=kwargs)
|
||||
field = self.get_option('field')
|
||||
search_field = self.get_option('search')
|
||||
if not _bitwarden.logged_in:
|
||||
raise AnsibleError("Not logged into Bitwarden. Run 'bw login'.")
|
||||
collection_id = self.get_option('collection_id')
|
||||
if not _bitwarden.unlocked:
|
||||
raise AnsibleError("Bitwarden Vault locked. Run 'bw unlock'.")
|
||||
|
||||
return [_bitwarden.get_field(field, term, search_field) for term in terms]
|
||||
return [_bitwarden.get_field(field, term, search_field, collection_id) for term in terms]
|
||||
|
||||
|
||||
_bitwarden = Bitwarden()
|
||||
|
|
|
|||
|
|
@ -66,7 +66,12 @@ class LookupModule(LookupBase):
|
|||
"""
|
||||
results = []
|
||||
for x in terms:
|
||||
intermediate = listify_lookup_plugin_terms(x, templar=self._templar, loader=self._loader)
|
||||
try:
|
||||
intermediate = listify_lookup_plugin_terms(x, templar=self._templar)
|
||||
except TypeError:
|
||||
# The loader argument is deprecated in ansible-core 2.14+. Fall back to
|
||||
# pre-2.14 behavior for older ansible-core versions.
|
||||
intermediate = listify_lookup_plugin_terms(x, templar=self._templar, loader=self._loader)
|
||||
results.append(intermediate)
|
||||
return results
|
||||
|
||||
|
|
|
|||
|
|
@ -105,7 +105,6 @@ RETURN = """
|
|||
type: dict
|
||||
"""
|
||||
|
||||
import os
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
||||
from ansible.errors import AnsibleError, AnsibleAssertionError
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
|
|
|
|||
|
|
@ -93,8 +93,6 @@ RETURN = """
|
|||
type: str
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
|
||||
|
|
|
|||
|
|
@ -80,7 +80,6 @@ from subprocess import Popen
|
|||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.parsing.splitter import parse_kv
|
||||
from ansible.module_utils.common.text.converters import to_bytes, to_text, to_native
|
||||
from ansible.utils.display import Display
|
||||
|
||||
|
|
|
|||
|
|
@ -125,8 +125,16 @@ from ansible.errors import AnsibleLookupError
|
|||
from ansible.module_utils.common._collections_compat import Mapping, Sequence
|
||||
from ansible.module_utils.six import string_types
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.release import __version__ as ansible_version
|
||||
from ansible.template import Templar
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
|
||||
|
||||
# Whether Templar has a cache, which can be controlled by Templar.template()'s cache option.
|
||||
# The cache was removed for ansible-core 2.14 (https://github.com/ansible/ansible/pull/78419)
|
||||
_TEMPLAR_HAS_TEMPLATE_CACHE = LooseVersion(ansible_version) < LooseVersion('2.14.0')
|
||||
|
||||
|
||||
class LookupModule(LookupBase):
|
||||
def __evaluate(self, expression, templar, variables):
|
||||
|
|
@ -136,7 +144,10 @@ class LookupModule(LookupBase):
|
|||
``variables`` are the variables to use.
|
||||
"""
|
||||
templar.available_variables = variables or {}
|
||||
return templar.template("{0}{1}{2}".format("{{", expression, "}}"), cache=False)
|
||||
expression = "{0}{1}{2}".format("{{", expression, "}}")
|
||||
if _TEMPLAR_HAS_TEMPLATE_CACHE:
|
||||
return templar.template(expression, cache=False)
|
||||
return templar.template(expression)
|
||||
|
||||
def __process(self, result, terms, index, current, templar, variables):
|
||||
"""Fills ``result`` list with evaluated items.
|
||||
|
|
|
|||
|
|
@ -35,9 +35,10 @@ DOCUMENTATION = '''
|
|||
description:
|
||||
- Record type to query.
|
||||
- C(DLV) has been removed in community.general 6.0.0.
|
||||
- C(CAA) has been added in community.general 6.3.0.
|
||||
type: str
|
||||
default: 'A'
|
||||
choices: [A, ALL, AAAA, CNAME, DNAME, DNSKEY, DS, HINFO, LOC, MX, NAPTR, NS, NSEC3PARAM, PTR, RP, RRSIG, SOA, SPF, SRV, SSHFP, TLSA, TXT]
|
||||
choices: [A, ALL, AAAA, CAA, CNAME, DNAME, DNSKEY, DS, HINFO, LOC, MX, NAPTR, NS, NSEC3PARAM, PTR, RP, RRSIG, SOA, SPF, SRV, SSHFP, TLSA, TXT]
|
||||
flat:
|
||||
description: If 0 each record is returned as a dictionary, otherwise a string.
|
||||
type: int
|
||||
|
|
@ -129,6 +130,12 @@ RETURN = """
|
|||
AAAA:
|
||||
description:
|
||||
- address
|
||||
CAA:
|
||||
description:
|
||||
- flags
|
||||
- tag
|
||||
- value
|
||||
version_added: 6.3.0
|
||||
CNAME:
|
||||
description:
|
||||
- target
|
||||
|
|
@ -198,7 +205,7 @@ try:
|
|||
import dns.resolver
|
||||
import dns.reversename
|
||||
import dns.rdataclass
|
||||
from dns.rdatatype import (A, AAAA, CNAME, DNAME, DNSKEY, DS, HINFO, LOC,
|
||||
from dns.rdatatype import (A, AAAA, CAA, CNAME, DNAME, DNSKEY, DS, HINFO, LOC,
|
||||
MX, NAPTR, NS, NSEC3PARAM, PTR, RP, SOA, SPF, SRV, SSHFP, TLSA, TXT)
|
||||
HAVE_DNS = True
|
||||
except ImportError:
|
||||
|
|
@ -218,6 +225,7 @@ def make_rdata_dict(rdata):
|
|||
supported_types = {
|
||||
A: ['address'],
|
||||
AAAA: ['address'],
|
||||
CAA: ['flags', 'tag', 'value'],
|
||||
CNAME: ['target'],
|
||||
DNAME: ['target'],
|
||||
DNSKEY: ['flags', 'algorithm', 'protocol', 'key'],
|
||||
|
|
@ -230,7 +238,7 @@ def make_rdata_dict(rdata):
|
|||
NSEC3PARAM: ['algorithm', 'flags', 'iterations', 'salt'],
|
||||
PTR: ['target'],
|
||||
RP: ['mbox', 'txt'],
|
||||
# RRSIG: ['algorithm', 'labels', 'original_ttl', 'expiration', 'inception', 'signature'],
|
||||
# RRSIG: ['type_covered', 'algorithm', 'labels', 'original_ttl', 'expiration', 'inception', 'key_tag', 'signer', 'signature'],
|
||||
SOA: ['mname', 'rname', 'serial', 'refresh', 'retry', 'expire', 'minimum'],
|
||||
SPF: ['strings'],
|
||||
SRV: ['priority', 'weight', 'port', 'target'],
|
||||
|
|
@ -251,6 +259,8 @@ def make_rdata_dict(rdata):
|
|||
|
||||
if rdata.rdtype == DS and f == 'digest':
|
||||
val = dns.rdata._hexify(rdata.digest).replace(' ', '')
|
||||
if rdata.rdtype == DNSKEY and f == 'algorithm':
|
||||
val = int(val)
|
||||
if rdata.rdtype == DNSKEY and f == 'key':
|
||||
val = dns.rdata._base64ify(rdata.key).replace(' ', '')
|
||||
if rdata.rdtype == NSEC3PARAM and f == 'salt':
|
||||
|
|
|
|||
|
|
@ -136,12 +136,11 @@ RETURN = '''
|
|||
|
||||
import re
|
||||
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.utils.display import Display
|
||||
from ansible.errors import AnsibleLookupError
|
||||
from ansible.module_utils.basic import missing_required_lib
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.errors import AnsibleError, AnsibleLookupError
|
||||
from ansible.utils.display import Display
|
||||
|
||||
try:
|
||||
import etcd3
|
||||
|
|
|
|||
|
|
@ -67,7 +67,12 @@ class LookupModule(LookupBase):
|
|||
|
||||
if isinstance(term, string_types):
|
||||
# convert a variable to a list
|
||||
term2 = listify_lookup_plugin_terms(term, templar=self._templar, loader=self._loader)
|
||||
try:
|
||||
term2 = listify_lookup_plugin_terms(term, templar=self._templar)
|
||||
except TypeError:
|
||||
# The loader argument is deprecated in ansible-core 2.14+. Fall back to
|
||||
# pre-2.14 behavior for older ansible-core versions.
|
||||
term2 = listify_lookup_plugin_terms(term, templar=self._templar, loader=self._loader)
|
||||
# but avoid converting a plain string to a list of one string
|
||||
if term2 != [term]:
|
||||
term = term2
|
||||
|
|
|
|||
|
|
@ -61,8 +61,6 @@ RETURN = """
|
|||
elements: str
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
from ansible.plugins.lookup import LookupBase
|
||||
from ansible.utils.cmd_functions import run_cmd
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
|
|
|
|||
|
|
@ -69,7 +69,6 @@ from ansible.utils.display import Display
|
|||
from traceback import format_exception
|
||||
import json
|
||||
import sys
|
||||
import os
|
||||
|
||||
display = Display()
|
||||
|
||||
|
|
|
|||
|
|
@ -32,7 +32,7 @@ DOCUMENTATION = '''
|
|||
section:
|
||||
description: Item section containing the field to retrieve (case-insensitive). If absent will return first match from any section.
|
||||
domain:
|
||||
description: Domain of 1Password. Default is U(1password.com).
|
||||
description: Domain of 1Password.
|
||||
version_added: 3.2.0
|
||||
default: '1password.com'
|
||||
type: str
|
||||
|
|
@ -488,7 +488,7 @@ class OnePassCLIv2(OnePassCLIBase):
|
|||
account = "{subdomain}.{domain}".format(subdomain=self.subdomain, domain=self.domain)
|
||||
args.extend(["--account", account])
|
||||
|
||||
rc, out, err = self._run(args)
|
||||
rc, out, err = self._run(args, ignore_errors=True)
|
||||
|
||||
return not bool(rc)
|
||||
|
||||
|
|
|
|||
|
|
@ -30,6 +30,11 @@ DOCUMENTATION = '''
|
|||
description: Item section containing the field to retrieve (case-insensitive). If absent will return first match from any section.
|
||||
subdomain:
|
||||
description: The 1Password subdomain to authenticate against.
|
||||
domain:
|
||||
description: Domain of 1Password.
|
||||
version_added: 6.0.0
|
||||
default: '1password.com'
|
||||
type: str
|
||||
username:
|
||||
description: The username used to sign in.
|
||||
secret_key:
|
||||
|
|
|
|||
|
|
@ -73,8 +73,6 @@ _raw:
|
|||
elements: str
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
HAVE_REDIS = False
|
||||
try:
|
||||
import redis
|
||||
|
|
|
|||
|
|
@ -88,9 +88,10 @@ class FormatError(CmdRunnerException):
|
|||
|
||||
|
||||
class _ArgFormat(object):
|
||||
def __init__(self, func, ignore_none=None):
|
||||
def __init__(self, func, ignore_none=None, ignore_missing_value=False):
|
||||
self.func = func
|
||||
self.ignore_none = ignore_none
|
||||
self.ignore_missing_value = ignore_missing_value
|
||||
|
||||
def __call__(self, value, ctx_ignore_none):
|
||||
ignore_none = self.ignore_none if self.ignore_none is not None else ctx_ignore_none
|
||||
|
|
@ -102,8 +103,13 @@ class _ArgFormat(object):
|
|||
|
||||
class _Format(object):
|
||||
@staticmethod
|
||||
def as_bool(args):
|
||||
return _ArgFormat(lambda value: _ensure_list(args) if value else [])
|
||||
def as_bool(args_true, args_false=None, ignore_none=None):
|
||||
if args_false is not None:
|
||||
if ignore_none is None:
|
||||
ignore_none = False
|
||||
else:
|
||||
args_false = []
|
||||
return _ArgFormat(lambda value: _ensure_list(args_true) if value else _ensure_list(args_false), ignore_none=ignore_none)
|
||||
|
||||
@staticmethod
|
||||
def as_bool_not(args):
|
||||
|
|
@ -127,7 +133,7 @@ class _Format(object):
|
|||
|
||||
@staticmethod
|
||||
def as_fixed(args):
|
||||
return _ArgFormat(lambda value: _ensure_list(args), ignore_none=False)
|
||||
return _ArgFormat(lambda value: _ensure_list(args), ignore_none=False, ignore_missing_value=True)
|
||||
|
||||
@staticmethod
|
||||
def as_func(func, ignore_none=None):
|
||||
|
|
@ -135,14 +141,15 @@ class _Format(object):
|
|||
|
||||
@staticmethod
|
||||
def as_map(_map, default=None, ignore_none=None):
|
||||
if default is None:
|
||||
default = []
|
||||
return _ArgFormat(lambda value: _ensure_list(_map.get(value, default)), ignore_none=ignore_none)
|
||||
|
||||
@staticmethod
|
||||
def as_default_type(_type, arg="", ignore_none=None):
|
||||
fmt = _Format
|
||||
if _type == "dict":
|
||||
return fmt.as_func(lambda d: ["--{0}={1}".format(*a) for a in iteritems(d)],
|
||||
ignore_none=ignore_none)
|
||||
return fmt.as_func(lambda d: ["--{0}={1}".format(*a) for a in iteritems(d)], ignore_none=ignore_none)
|
||||
if _type == "list":
|
||||
return fmt.as_func(lambda value: ["--{0}".format(x) for x in value], ignore_none=ignore_none)
|
||||
if _type == "bool":
|
||||
|
|
@ -261,10 +268,13 @@ class _CmdRunnerContext(object):
|
|||
for arg_name in self.args_order:
|
||||
value = None
|
||||
try:
|
||||
value = named_args[arg_name]
|
||||
if arg_name in named_args:
|
||||
value = named_args[arg_name]
|
||||
elif not runner.arg_formats[arg_name].ignore_missing_value:
|
||||
raise MissingArgumentValue(self.args_order, arg_name)
|
||||
self.cmd.extend(runner.arg_formats[arg_name](value, ctx_ignore_none=self.ignore_value_none))
|
||||
except KeyError:
|
||||
raise MissingArgumentValue(self.args_order, arg_name)
|
||||
except MissingArgumentValue:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise FormatError(arg_name, value, runner.arg_formats[arg_name], e)
|
||||
|
||||
|
|
|
|||
90
plugins/module_utils/deps.py
Normal file
90
plugins/module_utils/deps.py
Normal file
|
|
@ -0,0 +1,90 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# (c) 2022, Alexei Znamensky <russoz@gmail.com>
|
||||
# Copyright (c) 2022, Ansible Project
|
||||
# Simplified BSD License (see LICENSES/BSD-2-Clause.txt or https://opensource.org/licenses/BSD-2-Clause)
|
||||
# SPDX-License-Identifier: BSD-2-Clause
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import traceback
|
||||
from contextlib import contextmanager
|
||||
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.basic import missing_required_lib
|
||||
|
||||
|
||||
_deps = dict()
|
||||
|
||||
|
||||
class _Dependency(object):
|
||||
_states = ["pending", "failure", "success"]
|
||||
|
||||
def __init__(self, name, reason=None, url=None, msg=None):
|
||||
self.name = name
|
||||
self.reason = reason
|
||||
self.url = url
|
||||
self.msg = msg
|
||||
|
||||
self.state = 0
|
||||
self.trace = None
|
||||
self.exc = None
|
||||
|
||||
def succeed(self):
|
||||
self.state = 2
|
||||
|
||||
def fail(self, exc, trace):
|
||||
self.state = 1
|
||||
self.exc = exc
|
||||
self.trace = trace
|
||||
|
||||
@property
|
||||
def message(self):
|
||||
if self.msg:
|
||||
return to_native(self.msg)
|
||||
else:
|
||||
return missing_required_lib(self.name, reason=self.reason, url=self.url)
|
||||
|
||||
@property
|
||||
def failed(self):
|
||||
return self.state == 1
|
||||
|
||||
def verify(self, module):
|
||||
if self.failed:
|
||||
module.fail_json(msg=self.message, exception=self.trace)
|
||||
|
||||
def __str__(self):
|
||||
return "<dependency: {0} [{1}]>".format(self.name, self._states[self.state])
|
||||
|
||||
|
||||
@contextmanager
|
||||
def declare(name, *args, **kwargs):
|
||||
dep = _Dependency(name, *args, **kwargs)
|
||||
try:
|
||||
yield dep
|
||||
except Exception as e:
|
||||
dep.fail(e, traceback.format_exc())
|
||||
else:
|
||||
dep.succeed()
|
||||
finally:
|
||||
_deps[name] = dep
|
||||
|
||||
|
||||
def validate(module, spec=None):
|
||||
dep_names = sorted(_deps)
|
||||
|
||||
if spec is not None:
|
||||
if spec.startswith("-"):
|
||||
spec_split = spec[1:].split(":")
|
||||
for d in spec_split:
|
||||
dep_names.remove(d)
|
||||
else:
|
||||
spec_split = spec[1:].split(":")
|
||||
dep_names = []
|
||||
for d in spec_split:
|
||||
_deps[d] # ensure it exists
|
||||
dep_names.append(d)
|
||||
|
||||
for dep in dep_names:
|
||||
_deps[dep].verify(module)
|
||||
|
|
@ -19,15 +19,16 @@ import os
|
|||
import re
|
||||
import traceback
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||
# (TODO: remove AnsibleModule from next line!)
|
||||
from ansible.module_utils.basic import AnsibleModule, missing_required_lib # noqa: F401, pylint: disable=unused-import
|
||||
from ansible.module_utils.six.moves import configparser
|
||||
from os.path import expanduser
|
||||
from uuid import UUID
|
||||
|
||||
LIBCLOUD_IMP_ERR = None
|
||||
try:
|
||||
from libcloud.common.dimensiondata import API_ENDPOINTS, DimensionDataAPIException, DimensionDataStatus
|
||||
from libcloud.compute.base import Node, NodeLocation
|
||||
from libcloud.common.dimensiondata import API_ENDPOINTS, DimensionDataAPIException, DimensionDataStatus # noqa: F401, pylint: disable=unused-import
|
||||
from libcloud.compute.base import Node, NodeLocation # noqa: F401, pylint: disable=unused-import
|
||||
from libcloud.compute.providers import get_driver
|
||||
from libcloud.compute.types import Provider
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,14 @@
|
|||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, cmd_runner_fmt as fmt
|
||||
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, cmd_runner_fmt
|
||||
|
||||
|
||||
_state_map = {
|
||||
"present": "--set",
|
||||
"absent": "--unset",
|
||||
"get": "--get",
|
||||
}
|
||||
|
||||
|
||||
def gconftool2_runner(module, **kwargs):
|
||||
|
|
@ -14,14 +21,12 @@ def gconftool2_runner(module, **kwargs):
|
|||
module,
|
||||
command='gconftool-2',
|
||||
arg_formats=dict(
|
||||
key=fmt.as_list(),
|
||||
value_type=fmt.as_opt_val("--type"),
|
||||
value=fmt.as_list(),
|
||||
direct=fmt.as_bool("--direct"),
|
||||
config_source=fmt.as_opt_val("--config-source"),
|
||||
get=fmt.as_bool("--get"),
|
||||
set_arg=fmt.as_bool("--set"),
|
||||
unset=fmt.as_bool("--unset"),
|
||||
state=cmd_runner_fmt.as_map(_state_map),
|
||||
key=cmd_runner_fmt.as_list(),
|
||||
value_type=cmd_runner_fmt.as_opt_val("--type"),
|
||||
value=cmd_runner_fmt.as_list(),
|
||||
direct=cmd_runner_fmt.as_bool("--direct"),
|
||||
config_source=cmd_runner_fmt.as_opt_val("--config-source"),
|
||||
),
|
||||
**kwargs
|
||||
)
|
||||
|
|
|
|||
|
|
@ -110,3 +110,14 @@ def gitlab_authentication(module):
|
|||
GitLab remove Session API now that private tokens are removed from user API endpoints since version 10.2." % to_native(e))
|
||||
|
||||
return gitlab_instance
|
||||
|
||||
|
||||
def filter_returned_variables(gitlab_variables):
|
||||
# pop properties we don't know
|
||||
existing_variables = [dict(x.attributes) for x in gitlab_variables]
|
||||
KNOWN = ['key', 'value', 'masked', 'protected', 'variable_type', 'environment_scope']
|
||||
for item in existing_variables:
|
||||
for key in list(item.keys()):
|
||||
if key not in KNOWN:
|
||||
item.pop(key)
|
||||
return existing_variables
|
||||
|
|
|
|||
|
|
@ -42,6 +42,7 @@ URL_CLIENTTEMPLATE = "{url}/admin/realms/{realm}/client-templates/{id}"
|
|||
URL_CLIENTTEMPLATES = "{url}/admin/realms/{realm}/client-templates"
|
||||
URL_GROUPS = "{url}/admin/realms/{realm}/groups"
|
||||
URL_GROUP = "{url}/admin/realms/{realm}/groups/{groupid}"
|
||||
URL_GROUP_CHILDREN = "{url}/admin/realms/{realm}/groups/{groupid}/children"
|
||||
|
||||
URL_CLIENTSCOPES = "{url}/admin/realms/{realm}/client-scopes"
|
||||
URL_CLIENTSCOPE = "{url}/admin/realms/{realm}/client-scopes/{id}"
|
||||
|
|
@ -58,6 +59,8 @@ URL_CLIENT_USER_ROLEMAPPINGS = "{url}/admin/realms/{realm}/users/{id}/role-mappi
|
|||
URL_CLIENT_USER_ROLEMAPPINGS_AVAILABLE = "{url}/admin/realms/{realm}/users/{id}/role-mappings/clients/{client}/available"
|
||||
URL_CLIENT_USER_ROLEMAPPINGS_COMPOSITE = "{url}/admin/realms/{realm}/users/{id}/role-mappings/clients/{client}/composite"
|
||||
|
||||
URL_CLIENTSECRET = "{url}/admin/realms/{realm}/clients/{id}/client-secret"
|
||||
|
||||
URL_AUTHENTICATION_FLOWS = "{url}/admin/realms/{realm}/authentication/flows"
|
||||
URL_AUTHENTICATION_FLOW = "{url}/admin/realms/{realm}/authentication/flows/{id}"
|
||||
URL_AUTHENTICATION_FLOW_COPY = "{url}/admin/realms/{realm}/authentication/flows/{copyfrom}/copy"
|
||||
|
|
@ -606,7 +609,7 @@ class KeycloakAPI(object):
|
|||
"""
|
||||
available_rolemappings_url = URL_CLIENT_GROUP_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=gid, client=cid)
|
||||
try:
|
||||
open_url(available_rolemappings_url, method="DELETE", http_agent=self.http_agent, headers=self.restheaders,
|
||||
open_url(available_rolemappings_url, method="DELETE", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep),
|
||||
validate_certs=self.validate_certs, timeout=self.connection_timeout)
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not delete available rolemappings for client %s in group %s, realm %s: %s"
|
||||
|
|
@ -1160,6 +1163,52 @@ class KeycloakAPI(object):
|
|||
self.module.fail_json(msg='Could not update protocolmappers for clientscope %s in realm %s: %s'
|
||||
% (mapper_rep, realm, str(e)))
|
||||
|
||||
def create_clientsecret(self, id, realm="master"):
|
||||
""" Generate a new client secret by id
|
||||
|
||||
:param id: id (not clientId) of client to be queried
|
||||
:param realm: client from this realm
|
||||
:return: dict of credential representation
|
||||
"""
|
||||
clientsecret_url = URL_CLIENTSECRET.format(url=self.baseurl, realm=realm, id=id)
|
||||
|
||||
try:
|
||||
return json.loads(to_native(open_url(clientsecret_url, method='POST', headers=self.restheaders, timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
return None
|
||||
else:
|
||||
self.module.fail_json(msg='Could not obtain clientsecret of client %s for realm %s: %s'
|
||||
% (id, realm, str(e)))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg='Could not obtain clientsecret of client %s for realm %s: %s'
|
||||
% (id, realm, str(e)))
|
||||
|
||||
def get_clientsecret(self, id, realm="master"):
|
||||
""" Obtain client secret by id
|
||||
|
||||
:param id: id (not clientId) of client to be queried
|
||||
:param realm: client from this realm
|
||||
:return: dict of credential representation
|
||||
"""
|
||||
clientsecret_url = URL_CLIENTSECRET.format(url=self.baseurl, realm=realm, id=id)
|
||||
|
||||
try:
|
||||
return json.loads(to_native(open_url(clientsecret_url, method='GET', headers=self.restheaders, timeout=self.connection_timeout,
|
||||
validate_certs=self.validate_certs).read()))
|
||||
|
||||
except HTTPError as e:
|
||||
if e.code == 404:
|
||||
return None
|
||||
else:
|
||||
self.module.fail_json(msg='Could not obtain clientsecret of client %s for realm %s: %s'
|
||||
% (id, realm, str(e)))
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg='Could not obtain clientsecret of client %s for realm %s: %s'
|
||||
% (id, realm, str(e)))
|
||||
|
||||
def get_groups(self, realm="master"):
|
||||
""" Fetch the name and ID of all groups on the Keycloak server.
|
||||
|
||||
|
|
@ -1201,7 +1250,7 @@ class KeycloakAPI(object):
|
|||
self.module.fail_json(msg="Could not fetch group %s in realm %s: %s"
|
||||
% (gid, realm, str(e)))
|
||||
|
||||
def get_group_by_name(self, name, realm="master"):
|
||||
def get_group_by_name(self, name, realm="master", parents=None):
|
||||
""" Fetch a keycloak group within a realm based on its name.
|
||||
|
||||
The Keycloak API does not allow filtering of the Groups resource by name.
|
||||
|
|
@ -1211,10 +1260,19 @@ class KeycloakAPI(object):
|
|||
If the group does not exist, None is returned.
|
||||
:param name: Name of the group to fetch.
|
||||
:param realm: Realm in which the group resides; default 'master'
|
||||
:param parents: Optional list of parents when group to look for is a subgroup
|
||||
"""
|
||||
groups_url = URL_GROUPS.format(url=self.baseurl, realm=realm)
|
||||
try:
|
||||
all_groups = self.get_groups(realm=realm)
|
||||
if parents:
|
||||
parent = self.get_subgroup_direct_parent(parents, realm)
|
||||
|
||||
if not parent:
|
||||
return None
|
||||
|
||||
all_groups = parent['subGroups']
|
||||
else:
|
||||
all_groups = self.get_groups(realm=realm)
|
||||
|
||||
for group in all_groups:
|
||||
if group['name'] == name:
|
||||
|
|
@ -1226,6 +1284,102 @@ class KeycloakAPI(object):
|
|||
self.module.fail_json(msg="Could not fetch group %s in realm %s: %s"
|
||||
% (name, realm, str(e)))
|
||||
|
||||
def _get_normed_group_parent(self, parent):
|
||||
""" Converts parent dict information into a more easy to use form.
|
||||
|
||||
:param parent: parent describing dict
|
||||
"""
|
||||
if parent['id']:
|
||||
return (parent['id'], True)
|
||||
|
||||
return (parent['name'], False)
|
||||
|
||||
def get_subgroup_by_chain(self, name_chain, realm="master"):
|
||||
""" Access a subgroup API object by walking down a given name/id chain.
|
||||
|
||||
Groups can be given either as by name or by ID, the first element
|
||||
must either be a toplvl group or given as ID, all parents must exist.
|
||||
|
||||
If the group cannot be found, None is returned.
|
||||
:param name_chain: Topdown ordered list of subgroup parent (ids or names) + its own name at the end
|
||||
:param realm: Realm in which the group resides; default 'master'
|
||||
"""
|
||||
cp = name_chain[0]
|
||||
|
||||
# for 1st parent in chain we must query the server
|
||||
cp, is_id = self._get_normed_group_parent(cp)
|
||||
|
||||
if is_id:
|
||||
tmp = self.get_group_by_groupid(cp, realm=realm)
|
||||
else:
|
||||
# given as name, assume toplvl group
|
||||
tmp = self.get_group_by_name(cp, realm=realm)
|
||||
|
||||
if not tmp:
|
||||
return None
|
||||
|
||||
for p in name_chain[1:]:
|
||||
for sg in tmp['subGroups']:
|
||||
pv, is_id = self._get_normed_group_parent(p)
|
||||
|
||||
if is_id:
|
||||
cmpkey = "id"
|
||||
else:
|
||||
cmpkey = "name"
|
||||
|
||||
if pv == sg[cmpkey]:
|
||||
tmp = sg
|
||||
break
|
||||
|
||||
if not tmp:
|
||||
return None
|
||||
|
||||
return tmp
|
||||
|
||||
def get_subgroup_direct_parent(self, parents, realm="master", children_to_resolve=None):
|
||||
""" Get keycloak direct parent group API object for a given chain of parents.
|
||||
|
||||
To succesfully work the API for subgroups we actually dont need
|
||||
to "walk the whole tree" for nested groups but only need to know
|
||||
the ID for the direct predecessor of current subgroup. This
|
||||
method will guarantee us this information getting there with
|
||||
as minimal work as possible.
|
||||
|
||||
Note that given parent list can and might be incomplete at the
|
||||
upper levels as long as it starts with an ID instead of a name
|
||||
|
||||
If the group does not exist, None is returned.
|
||||
:param parents: Topdown ordered list of subgroup parents
|
||||
:param realm: Realm in which the group resides; default 'master'
|
||||
"""
|
||||
if children_to_resolve is None:
|
||||
# start recursion by reversing parents (in optimal cases
|
||||
# we dont need to walk the whole tree upwarts)
|
||||
parents = list(reversed(parents))
|
||||
children_to_resolve = []
|
||||
|
||||
if not parents:
|
||||
# walk complete parents list to the top, all names, no id's,
|
||||
# try to resolve it assuming list is complete and 1st
|
||||
# element is a toplvl group
|
||||
return self.get_subgroup_by_chain(list(reversed(children_to_resolve)), realm=realm)
|
||||
|
||||
cp = parents[0]
|
||||
unused, is_id = self._get_normed_group_parent(cp)
|
||||
|
||||
if is_id:
|
||||
# current parent is given as ID, we can stop walking
|
||||
# upwards searching for an entry point
|
||||
return self.get_subgroup_by_chain([cp] + list(reversed(children_to_resolve)), realm=realm)
|
||||
else:
|
||||
# current parent is given as name, it must be resolved
|
||||
# later, try next parent (recurse)
|
||||
children_to_resolve.append(cp)
|
||||
return self.get_subgroup_direct_parent(
|
||||
parents[1:],
|
||||
realm=realm, children_to_resolve=children_to_resolve
|
||||
)
|
||||
|
||||
def create_group(self, grouprep, realm="master"):
|
||||
""" Create a Keycloak group.
|
||||
|
||||
|
|
@ -1240,6 +1394,34 @@ class KeycloakAPI(object):
|
|||
self.module.fail_json(msg="Could not create group %s in realm %s: %s"
|
||||
% (grouprep['name'], realm, str(e)))
|
||||
|
||||
def create_subgroup(self, parents, grouprep, realm="master"):
|
||||
""" Create a Keycloak subgroup.
|
||||
|
||||
:param parents: list of one or more parent groups
|
||||
:param grouprep: a GroupRepresentation of the group to be created. Must contain at minimum the field name.
|
||||
:return: HTTPResponse object on success
|
||||
"""
|
||||
parent_id = "---UNDETERMINED---"
|
||||
try:
|
||||
parent_id = self.get_subgroup_direct_parent(parents, realm)
|
||||
|
||||
if not parent_id:
|
||||
raise Exception(
|
||||
"Could not determine subgroup parent ID for given"
|
||||
" parent chain {0}. Assure that all parents exist"
|
||||
" already and the list is complete and properly"
|
||||
" ordered, starts with an ID or starts at the"
|
||||
" top level".format(parents)
|
||||
)
|
||||
|
||||
parent_id = parent_id["id"]
|
||||
url = URL_GROUP_CHILDREN.format(url=self.baseurl, realm=realm, groupid=parent_id)
|
||||
return open_url(url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout,
|
||||
data=json.dumps(grouprep), validate_certs=self.validate_certs)
|
||||
except Exception as e:
|
||||
self.module.fail_json(msg="Could not create subgroup %s for parent group %s in realm %s: %s"
|
||||
% (grouprep['name'], parent_id, realm, str(e)))
|
||||
|
||||
def update_group(self, grouprep, realm="master"):
|
||||
""" Update an existing group.
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,77 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2022, John Cant <a.johncant@gmail.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible.module_utils.basic import AnsibleModule
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak import \
|
||||
keycloak_argument_spec
|
||||
|
||||
|
||||
def keycloak_clientsecret_module():
|
||||
"""
|
||||
Returns an AnsibleModule definition for modules that interact with a client
|
||||
secret.
|
||||
|
||||
:return: argument_spec dict
|
||||
"""
|
||||
argument_spec = keycloak_argument_spec()
|
||||
|
||||
meta_args = dict(
|
||||
realm=dict(default='master'),
|
||||
id=dict(type='str'),
|
||||
client_id=dict(type='str', aliases=['clientId']),
|
||||
)
|
||||
|
||||
argument_spec.update(meta_args)
|
||||
|
||||
module = AnsibleModule(
|
||||
argument_spec=argument_spec,
|
||||
supports_check_mode=True,
|
||||
required_one_of=([['id', 'client_id'],
|
||||
['token', 'auth_realm', 'auth_username', 'auth_password']]),
|
||||
required_together=([['auth_realm', 'auth_username', 'auth_password']]),
|
||||
mutually_exclusive=[
|
||||
['token', 'auth_realm'],
|
||||
['token', 'auth_username'],
|
||||
['token', 'auth_password']
|
||||
])
|
||||
|
||||
return module
|
||||
|
||||
|
||||
def keycloak_clientsecret_module_resolve_params(module, kc):
|
||||
"""
|
||||
Given an AnsibleModule definition for keycloak_clientsecret_*, and a
|
||||
KeycloakAPI client, resolve the params needed to interact with the Keycloak
|
||||
client secret, looking up the client by clientId if necessary via an API
|
||||
call.
|
||||
|
||||
:return: tuple of id, realm
|
||||
"""
|
||||
|
||||
realm = module.params.get('realm')
|
||||
id = module.params.get('id')
|
||||
client_id = module.params.get('client_id')
|
||||
|
||||
# only lookup the client_id if id isn't provided.
|
||||
# in the case that both are provided, prefer the ID, since it's one
|
||||
# less lookup.
|
||||
if id is None:
|
||||
# Due to the required_one_of spec, client_id is guaranteed to not be None
|
||||
client = kc.get_client_by_clientid(client_id, realm=realm)
|
||||
|
||||
if client is None:
|
||||
module.fail_json(
|
||||
msg='Client does not exist {client_id}'.format(client_id=client_id)
|
||||
)
|
||||
|
||||
id = client['id']
|
||||
|
||||
return id, realm
|
||||
|
|
@ -85,17 +85,16 @@ class iLORedfishUtils(RedfishUtils):
|
|||
|
||||
datetime_uri = self.manager_uri + "DateTime"
|
||||
|
||||
response = self.get_request(self.root_uri + datetime_uri)
|
||||
if not response['ret']:
|
||||
return response
|
||||
listofips = mgr_attributes['mgr_attr_value'].split(" ")
|
||||
if len(listofips) > 2:
|
||||
return {'ret': False, 'changed': False, 'msg': "More than 2 NTP Servers mentioned"}
|
||||
|
||||
data = response['data']
|
||||
ntp_list = []
|
||||
for ips in listofips:
|
||||
ntp_list.append(ips)
|
||||
|
||||
ntp_list = data[setkey]
|
||||
if len(ntp_list) == 2:
|
||||
ntp_list.pop(0)
|
||||
|
||||
ntp_list.append(mgr_attributes['mgr_attr_value'])
|
||||
while len(ntp_list) < 2:
|
||||
ntp_list.append("0.0.0.0")
|
||||
|
||||
payload = {setkey: ntp_list}
|
||||
|
||||
|
|
@ -137,18 +136,16 @@ class iLORedfishUtils(RedfishUtils):
|
|||
nic_info = self.get_manager_ethernet_uri()
|
||||
uri = nic_info["nic_addr"]
|
||||
|
||||
response = self.get_request(self.root_uri + uri)
|
||||
if not response['ret']:
|
||||
return response
|
||||
listofips = attr['mgr_attr_value'].split(" ")
|
||||
if len(listofips) > 3:
|
||||
return {'ret': False, 'changed': False, 'msg': "More than 3 DNS Servers mentioned"}
|
||||
|
||||
data = response['data']
|
||||
dns_list = []
|
||||
for ips in listofips:
|
||||
dns_list.append(ips)
|
||||
|
||||
dns_list = data["Oem"]["Hpe"]["IPv4"][key]
|
||||
|
||||
if len(dns_list) == 3:
|
||||
dns_list.pop(0)
|
||||
|
||||
dns_list.append(attr['mgr_attr_value'])
|
||||
while len(dns_list) < 3:
|
||||
dns_list.append("0.0.0.0")
|
||||
|
||||
payload = {
|
||||
"Oem": {
|
||||
|
|
|
|||
|
|
@ -15,7 +15,7 @@ from ansible_collections.community.general.plugins.module_utils.version import L
|
|||
|
||||
REQUESTS_IMP_ERR = None
|
||||
try:
|
||||
import requests.exceptions
|
||||
import requests.exceptions # noqa: F401, pylint: disable=unused-import
|
||||
HAS_REQUESTS = True
|
||||
except ImportError:
|
||||
REQUESTS_IMP_ERR = traceback.format_exc()
|
||||
|
|
@ -25,7 +25,7 @@ INFLUXDB_IMP_ERR = None
|
|||
try:
|
||||
from influxdb import InfluxDBClient
|
||||
from influxdb import __version__ as influxdb_version
|
||||
from influxdb import exceptions
|
||||
from influxdb import exceptions # noqa: F401, pylint: disable=unused-import
|
||||
HAS_INFLUXDB = True
|
||||
except ImportError:
|
||||
INFLUXDB_IMP_ERR = traceback.format_exc()
|
||||
|
|
|
|||
35
plugins/module_utils/jenkins.py
Normal file
35
plugins/module_utils/jenkins.py
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright (c) 2022, Alexei Znamensky <russoz@gmail.com>
|
||||
#
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
|
||||
def download_updates_file(updates_expiration):
|
||||
updates_filename = 'jenkins-plugin-cache.json'
|
||||
updates_dir = os.path.expanduser('~/.ansible/tmp')
|
||||
updates_file = os.path.join(updates_dir, updates_filename)
|
||||
download_updates = True
|
||||
|
||||
# Make sure the destination directory exists
|
||||
if not os.path.isdir(updates_dir):
|
||||
os.makedirs(updates_dir, 0o700)
|
||||
|
||||
# Check if we need to download new updates file
|
||||
if os.path.isfile(updates_file):
|
||||
# Get timestamp when the file was changed last time
|
||||
ts_file = os.stat(updates_file).st_mtime
|
||||
ts_now = time.time()
|
||||
|
||||
if ts_now - ts_file < updates_expiration:
|
||||
download_updates = False
|
||||
|
||||
return updates_file, download_updates
|
||||
|
|
@ -10,6 +10,7 @@
|
|||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
import re
|
||||
import traceback
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
|
||||
|
|
@ -39,6 +40,7 @@ def gen_specs(**specs):
|
|||
'start_tls': dict(default=False, type='bool'),
|
||||
'validate_certs': dict(default=True, type='bool'),
|
||||
'sasl_class': dict(choices=['external', 'gssapi'], default='external', type='str'),
|
||||
'xorder_discovery': dict(choices=['enable', 'auto', 'disable'], default='auto', type='str'),
|
||||
})
|
||||
|
||||
return specs
|
||||
|
|
@ -55,12 +57,16 @@ class LdapGeneric(object):
|
|||
self.start_tls = self.module.params['start_tls']
|
||||
self.verify_cert = self.module.params['validate_certs']
|
||||
self.sasl_class = self.module.params['sasl_class']
|
||||
self.xorder_discovery = self.module.params['xorder_discovery']
|
||||
|
||||
# Establish connection
|
||||
self.connection = self._connect_to_ldap()
|
||||
|
||||
# Try to find the X_ORDERed version of the DN
|
||||
self.dn = self._find_dn()
|
||||
if self.xorder_discovery == "enable" or (self.xorder_discovery == "auto" and not self._xorder_dn()):
|
||||
# Try to find the X_ORDERed version of the DN
|
||||
self.dn = self._find_dn()
|
||||
else:
|
||||
self.dn = self.module.params['dn']
|
||||
|
||||
def fail(self, msg, exn):
|
||||
self.module.fail_json(
|
||||
|
|
@ -113,3 +119,8 @@ class LdapGeneric(object):
|
|||
self.fail("Cannot bind to the server.", e)
|
||||
|
||||
return connection
|
||||
|
||||
def _xorder_dn(self):
|
||||
# match X_ORDERed DNs
|
||||
regex = r"\w+=\{\d+\}.+"
|
||||
return re.match(regex, self.module.params['dn']) is not None
|
||||
|
|
|
|||
|
|
@ -8,8 +8,10 @@ from __future__ import (absolute_import, division, print_function)
|
|||
__metaclass__ = type
|
||||
|
||||
|
||||
import os
|
||||
import socket
|
||||
import ssl
|
||||
import json
|
||||
|
||||
from ansible.module_utils.urls import generic_urlparse
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
||||
|
|
@ -20,8 +22,6 @@ from ansible.module_utils.common.text.converters import to_text
|
|||
HTTPConnection = http_client.HTTPConnection
|
||||
HTTPSConnection = http_client.HTTPSConnection
|
||||
|
||||
import json
|
||||
|
||||
|
||||
class UnixHTTPConnection(HTTPConnection):
|
||||
def __init__(self, path):
|
||||
|
|
@ -60,7 +60,7 @@ class LXDClient(object):
|
|||
self.cert_file = cert_file
|
||||
self.key_file = key_file
|
||||
parts = generic_urlparse(urlparse(self.url))
|
||||
ctx = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
|
||||
ctx = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
|
||||
ctx.load_cert_chain(cert_file, keyfile=key_file)
|
||||
self.connection = HTTPSConnection(parts.get('netloc'), context=ctx)
|
||||
elif url.startswith('unix:'):
|
||||
|
|
@ -124,3 +124,11 @@ class LXDClient(object):
|
|||
if err is None:
|
||||
err = resp_json.get('error', None)
|
||||
return err
|
||||
|
||||
|
||||
def default_key_file():
|
||||
return os.path.expanduser('~/.config/lxc/client.key')
|
||||
|
||||
|
||||
def default_cert_file():
|
||||
return os.path.expanduser('~/.config/lxc/client.crt')
|
||||
|
|
|
|||
|
|
@ -37,8 +37,17 @@ def cause_changes(on_success=None, on_failure=None):
|
|||
|
||||
|
||||
def module_fails_on_exception(func):
|
||||
conflict_list = ('msg', 'exception', 'output', 'vars', 'changed')
|
||||
|
||||
@wraps(func)
|
||||
def wrapper(self, *args, **kwargs):
|
||||
def fix_var_conflicts(output):
|
||||
result = dict([
|
||||
(k if k not in conflict_list else "_" + k, v)
|
||||
for k, v in output.items()
|
||||
])
|
||||
return result
|
||||
|
||||
try:
|
||||
func(self, *args, **kwargs)
|
||||
except SystemExit:
|
||||
|
|
@ -46,12 +55,16 @@ def module_fails_on_exception(func):
|
|||
except ModuleHelperException as e:
|
||||
if e.update_output:
|
||||
self.update_output(e.update_output)
|
||||
# patchy solution to resolve conflict with output variables
|
||||
output = fix_var_conflicts(self.output)
|
||||
self.module.fail_json(msg=e.msg, exception=traceback.format_exc(),
|
||||
output=self.output, vars=self.vars.output(), **self.output)
|
||||
output=self.output, vars=self.vars.output(), **output)
|
||||
except Exception as e:
|
||||
# patchy solution to resolve conflict with output variables
|
||||
output = fix_var_conflicts(self.output)
|
||||
msg = "Module failed with exception: {0}".format(str(e).strip())
|
||||
self.module.fail_json(msg=msg, exception=traceback.format_exc(),
|
||||
output=self.output, vars=self.vars.output(), **self.output)
|
||||
output=self.output, vars=self.vars.output(), **output)
|
||||
return wrapper
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -9,7 +9,8 @@ __metaclass__ = type
|
|||
|
||||
from ansible.module_utils.common.dict_transformations import dict_merge
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.base import ModuleHelperBase, AnsibleModule
|
||||
# (TODO: remove AnsibleModule!) pylint: disable-next=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.base import ModuleHelperBase, AnsibleModule # noqa: F401
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.cmd import CmdMixin
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.state import StateMixin
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.deps import DependencyMixin
|
||||
|
|
@ -18,7 +19,6 @@ from ansible_collections.community.general.plugins.module_utils.mh.mixins.deprec
|
|||
|
||||
|
||||
class ModuleHelper(DeprecateAttrsMixin, VarsMixin, DependencyMixin, ModuleHelperBase):
|
||||
_output_conflict_list = ('msg', 'exception', 'output', 'vars', 'changed')
|
||||
facts_name = None
|
||||
output_params = ()
|
||||
diff_params = ()
|
||||
|
|
@ -60,10 +60,6 @@ class ModuleHelper(DeprecateAttrsMixin, VarsMixin, DependencyMixin, ModuleHelper
|
|||
vars_diff = self.vars.diff() or {}
|
||||
result['diff'] = dict_merge(dict(diff), vars_diff)
|
||||
|
||||
for varname in result:
|
||||
if varname in self._output_conflict_list:
|
||||
result["_" + varname] = result[varname]
|
||||
del result[varname]
|
||||
return result
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -8,12 +8,13 @@ from __future__ import absolute_import, division, print_function
|
|||
__metaclass__ = type
|
||||
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.module_helper import (
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.module_helper import ( # noqa: F401, pylint: disable=unused-import
|
||||
ModuleHelper, StateModuleHelper, CmdModuleHelper, CmdStateModuleHelper, AnsibleModule
|
||||
)
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.cmd import CmdMixin, ArgFormat
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.state import StateMixin
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.deps import DependencyCtxMgr
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.exceptions import ModuleHelperException
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.deco import cause_changes, module_fails_on_exception
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.vars import VarMeta, VarDict
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.cmd import CmdMixin, ArgFormat # noqa: F401, pylint: disable=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.state import StateMixin # noqa: F401, pylint: disable=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.deps import DependencyCtxMgr # noqa: F401, pylint: disable=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.exceptions import ModuleHelperException # noqa: F401, pylint: disable=unused-import
|
||||
# pylint: disable-next=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.deco import cause_changes, module_fails_on_exception # noqa: F401
|
||||
from ansible_collections.community.general.plugins.module_utils.mh.mixins.vars import VarMeta, VarDict # noqa: F401, pylint: disable=unused-import
|
||||
|
|
|
|||
502
plugins/module_utils/ocapi_utils.py
Normal file
502
plugins/module_utils/ocapi_utils.py
Normal file
|
|
@ -0,0 +1,502 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2022 Western Digital Corporation
|
||||
# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
import os
|
||||
import uuid
|
||||
|
||||
from ansible.module_utils.urls import open_url
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
from ansible.module_utils.common.text.converters import to_text
|
||||
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
|
||||
from ansible.module_utils.six.moves.urllib.parse import urlparse
|
||||
|
||||
|
||||
GET_HEADERS = {'accept': 'application/json'}
|
||||
PUT_HEADERS = {'content-type': 'application/json', 'accept': 'application/json'}
|
||||
POST_HEADERS = {'content-type': 'application/json', 'accept': 'application/json'}
|
||||
DELETE_HEADERS = {'accept': 'application/json'}
|
||||
|
||||
HEALTH_OK = 5
|
||||
|
||||
|
||||
class OcapiUtils(object):
|
||||
|
||||
def __init__(self, creds, base_uri, proxy_slot_number, timeout, module):
|
||||
self.root_uri = base_uri
|
||||
self.proxy_slot_number = proxy_slot_number
|
||||
self.creds = creds
|
||||
self.timeout = timeout
|
||||
self.module = module
|
||||
|
||||
def _auth_params(self):
|
||||
"""
|
||||
Return tuple of required authentication params based on the username and password.
|
||||
|
||||
:return: tuple of username, password
|
||||
"""
|
||||
username = self.creds['user']
|
||||
password = self.creds['pswd']
|
||||
force_basic_auth = True
|
||||
return username, password, force_basic_auth
|
||||
|
||||
def get_request(self, uri):
|
||||
req_headers = dict(GET_HEADERS)
|
||||
username, password, basic_auth = self._auth_params()
|
||||
try:
|
||||
resp = open_url(uri, method="GET", headers=req_headers,
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout)
|
||||
data = json.loads(to_native(resp.read()))
|
||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||
except HTTPError as e:
|
||||
return {'ret': False,
|
||||
'msg': "HTTP Error %s on GET request to '%s'"
|
||||
% (e.code, uri),
|
||||
'status': e.code}
|
||||
except URLError as e:
|
||||
return {'ret': False, 'msg': "URL Error on GET request to '%s': '%s'"
|
||||
% (uri, e.reason)}
|
||||
# Almost all errors should be caught above, but just in case
|
||||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed GET request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'data': data, 'headers': headers}
|
||||
|
||||
def delete_request(self, uri, etag=None):
|
||||
req_headers = dict(DELETE_HEADERS)
|
||||
if etag is not None:
|
||||
req_headers['If-Match'] = etag
|
||||
username, password, basic_auth = self._auth_params()
|
||||
try:
|
||||
resp = open_url(uri, method="DELETE", headers=req_headers,
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout)
|
||||
if resp.status != 204:
|
||||
data = json.loads(to_native(resp.read()))
|
||||
else:
|
||||
data = ""
|
||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||
except HTTPError as e:
|
||||
return {'ret': False,
|
||||
'msg': "HTTP Error %s on DELETE request to '%s'"
|
||||
% (e.code, uri),
|
||||
'status': e.code}
|
||||
except URLError as e:
|
||||
return {'ret': False, 'msg': "URL Error on DELETE request to '%s': '%s'"
|
||||
% (uri, e.reason)}
|
||||
# Almost all errors should be caught above, but just in case
|
||||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed DELETE request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'data': data, 'headers': headers}
|
||||
|
||||
def put_request(self, uri, payload, etag=None):
|
||||
req_headers = dict(PUT_HEADERS)
|
||||
if etag is not None:
|
||||
req_headers['If-Match'] = etag
|
||||
username, password, basic_auth = self._auth_params()
|
||||
try:
|
||||
resp = open_url(uri, data=json.dumps(payload),
|
||||
headers=req_headers, method="PUT",
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout)
|
||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||
except HTTPError as e:
|
||||
return {'ret': False,
|
||||
'msg': "HTTP Error %s on PUT request to '%s'"
|
||||
% (e.code, uri),
|
||||
'status': e.code}
|
||||
except URLError as e:
|
||||
return {'ret': False, 'msg': "URL Error on PUT request to '%s': '%s'"
|
||||
% (uri, e.reason)}
|
||||
# Almost all errors should be caught above, but just in case
|
||||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed PUT request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'headers': headers, 'resp': resp}
|
||||
|
||||
def post_request(self, uri, payload, content_type="application/json", timeout=None):
|
||||
req_headers = dict(POST_HEADERS)
|
||||
if content_type != "application/json":
|
||||
req_headers["content-type"] = content_type
|
||||
username, password, basic_auth = self._auth_params()
|
||||
if content_type == "application/json":
|
||||
request_data = json.dumps(payload)
|
||||
else:
|
||||
request_data = payload
|
||||
try:
|
||||
resp = open_url(uri, data=request_data,
|
||||
headers=req_headers, method="POST",
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout if timeout is None else timeout)
|
||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||
except HTTPError as e:
|
||||
return {'ret': False,
|
||||
'msg': "HTTP Error %s on POST request to '%s'"
|
||||
% (e.code, uri),
|
||||
'status': e.code}
|
||||
except URLError as e:
|
||||
return {'ret': False, 'msg': "URL Error on POST request to '%s': '%s'"
|
||||
% (uri, e.reason)}
|
||||
# Almost all errors should be caught above, but just in case
|
||||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed POST request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'headers': headers, 'resp': resp}
|
||||
|
||||
def get_uri_with_slot_number_query_param(self, uri):
|
||||
"""Return the URI with proxy slot number added as a query param, if there is one.
|
||||
|
||||
If a proxy slot number is provided, to access it, we must append it as a query parameter.
|
||||
This method returns the given URI with the slotnumber query param added, if there is one.
|
||||
If there is not a proxy slot number, it just returns the URI as it was passed in.
|
||||
"""
|
||||
if self.proxy_slot_number is not None:
|
||||
parsed_url = urlparse(uri)
|
||||
return parsed_url._replace(query="slotnumber=" + str(self.proxy_slot_number)).geturl()
|
||||
else:
|
||||
return uri
|
||||
|
||||
def manage_system_power(self, command):
|
||||
"""Process a command to manage the system power.
|
||||
|
||||
:param str command: The Ansible command being processed.
|
||||
"""
|
||||
if command == "PowerGracefulRestart":
|
||||
resource_uri = self.root_uri
|
||||
resource_uri = self.get_uri_with_slot_number_query_param(resource_uri)
|
||||
|
||||
# Get the resource so that we have the Etag
|
||||
response = self.get_request(resource_uri)
|
||||
if 'etag' not in response['headers']:
|
||||
return {'ret': False, 'msg': 'Etag not found in response.'}
|
||||
etag = response['headers']['etag']
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
|
||||
# Issue the PUT to do the reboot (unless we are in check mode)
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
payload = {'Reboot': True}
|
||||
response = self.put_request(resource_uri, payload, etag)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
elif command.startswith("PowerMode"):
|
||||
return self.manage_power_mode(command)
|
||||
else:
|
||||
return {'ret': False, 'msg': 'Invalid command: ' + command}
|
||||
|
||||
return {'ret': True}
|
||||
|
||||
def manage_chassis_indicator_led(self, command):
|
||||
"""Process a command to manage the chassis indicator LED.
|
||||
|
||||
:param string command: The Ansible command being processed.
|
||||
"""
|
||||
return self.manage_indicator_led(command, self.root_uri)
|
||||
|
||||
def manage_indicator_led(self, command, resource_uri=None):
|
||||
"""Process a command to manage an indicator LED.
|
||||
|
||||
:param string command: The Ansible command being processed.
|
||||
:param string resource_uri: URI of the resource whose indicator LED is being managed.
|
||||
"""
|
||||
key = "IndicatorLED"
|
||||
if resource_uri is None:
|
||||
resource_uri = self.root_uri
|
||||
resource_uri = self.get_uri_with_slot_number_query_param(resource_uri)
|
||||
|
||||
payloads = {
|
||||
'IndicatorLedOn': {
|
||||
'ID': 2
|
||||
},
|
||||
'IndicatorLedOff': {
|
||||
'ID': 4
|
||||
}
|
||||
}
|
||||
|
||||
response = self.get_request(resource_uri)
|
||||
if 'etag' not in response['headers']:
|
||||
return {'ret': False, 'msg': 'Etag not found in response.'}
|
||||
etag = response['headers']['etag']
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
data = response['data']
|
||||
if key not in data:
|
||||
return {'ret': False, 'msg': "Key %s not found" % key}
|
||||
if 'ID' not in data[key]:
|
||||
return {'ret': False, 'msg': 'IndicatorLED for resource has no ID.'}
|
||||
|
||||
if command in payloads.keys():
|
||||
# See if the LED is already set as requested.
|
||||
current_led_status = data[key]['ID']
|
||||
if current_led_status == payloads[command]['ID']:
|
||||
return {'ret': True, 'changed': False}
|
||||
|
||||
# Set the LED (unless we are in check mode)
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
payload = {'IndicatorLED': payloads[command]}
|
||||
response = self.put_request(resource_uri, payload, etag)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
else:
|
||||
return {'ret': False, 'msg': 'Invalid command'}
|
||||
|
||||
return {'ret': True}
|
||||
|
||||
def manage_power_mode(self, command):
|
||||
key = "PowerState"
|
||||
resource_uri = self.get_uri_with_slot_number_query_param(self.root_uri)
|
||||
|
||||
payloads = {
|
||||
"PowerModeNormal": 2,
|
||||
"PowerModeLow": 4
|
||||
}
|
||||
|
||||
response = self.get_request(resource_uri)
|
||||
if 'etag' not in response['headers']:
|
||||
return {'ret': False, 'msg': 'Etag not found in response.'}
|
||||
etag = response['headers']['etag']
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
data = response['data']
|
||||
if key not in data:
|
||||
return {'ret': False, 'msg': "Key %s not found" % key}
|
||||
if 'ID' not in data[key]:
|
||||
return {'ret': False, 'msg': 'PowerState for resource has no ID.'}
|
||||
|
||||
if command in payloads.keys():
|
||||
# See if the PowerState is already set as requested.
|
||||
current_power_state = data[key]['ID']
|
||||
if current_power_state == payloads[command]:
|
||||
return {'ret': True, 'changed': False}
|
||||
|
||||
# Set the Power State (unless we are in check mode)
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
payload = {'PowerState': {"ID": payloads[command]}}
|
||||
response = self.put_request(resource_uri, payload, etag)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
else:
|
||||
return {'ret': False, 'msg': 'Invalid command: ' + command}
|
||||
|
||||
return {'ret': True}
|
||||
|
||||
def prepare_multipart_firmware_upload(self, filename):
|
||||
"""Prepare a multipart/form-data body for OCAPI firmware upload.
|
||||
|
||||
:arg filename: The name of the file to upload.
|
||||
:returns: tuple of (content_type, body) where ``content_type`` is
|
||||
the ``multipart/form-data`` ``Content-Type`` header including
|
||||
``boundary`` and ``body`` is the prepared bytestring body
|
||||
|
||||
Prepares the body to include "FirmwareFile" field with the contents of the file.
|
||||
Because some OCAPI targets do not support Base-64 encoding for multipart/form-data,
|
||||
this method sends the file as binary.
|
||||
"""
|
||||
boundary = str(uuid.uuid4()) # Generate a random boundary
|
||||
body = "--" + boundary + '\r\n'
|
||||
body += 'Content-Disposition: form-data; name="FirmwareFile"; filename="%s"\r\n' % to_native(os.path.basename(filename))
|
||||
body += 'Content-Type: application/octet-stream\r\n\r\n'
|
||||
body_bytes = bytearray(body, 'utf-8')
|
||||
with open(filename, 'rb') as f:
|
||||
body_bytes += f.read()
|
||||
body_bytes += bytearray("\r\n--%s--" % boundary, 'utf-8')
|
||||
return ("multipart/form-data; boundary=%s" % boundary,
|
||||
body_bytes)
|
||||
|
||||
def upload_firmware_image(self, update_image_path):
|
||||
"""Perform Firmware Upload to the OCAPI storage device.
|
||||
|
||||
:param str update_image_path: The path/filename of the firmware image, on the local filesystem.
|
||||
"""
|
||||
if not (os.path.exists(update_image_path) and os.path.isfile(update_image_path)):
|
||||
return {'ret': False, 'msg': 'File does not exist.'}
|
||||
url = self.root_uri + "OperatingSystem"
|
||||
url = self.get_uri_with_slot_number_query_param(url)
|
||||
content_type, b_form_data = self.prepare_multipart_firmware_upload(update_image_path)
|
||||
|
||||
# Post the firmware (unless we are in check mode)
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
result = self.post_request(url, b_form_data, content_type=content_type, timeout=300)
|
||||
if result['ret'] is False:
|
||||
return result
|
||||
return {'ret': True}
|
||||
|
||||
def update_firmware_image(self):
|
||||
"""Perform a Firmware Update on the OCAPI storage device."""
|
||||
resource_uri = self.root_uri
|
||||
resource_uri = self.get_uri_with_slot_number_query_param(resource_uri)
|
||||
# We have to do a GET to obtain the Etag. It's required on the PUT.
|
||||
response = self.get_request(resource_uri)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
if 'etag' not in response['headers']:
|
||||
return {'ret': False, 'msg': 'Etag not found in response.'}
|
||||
etag = response['headers']['etag']
|
||||
|
||||
# Issue the PUT (unless we are in check mode)
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
payload = {'FirmwareUpdate': True}
|
||||
response = self.put_request(resource_uri, payload, etag)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
|
||||
return {'ret': True, 'jobUri': response["headers"]["location"]}
|
||||
|
||||
def activate_firmware_image(self):
|
||||
"""Perform a Firmware Activate on the OCAPI storage device."""
|
||||
resource_uri = self.root_uri
|
||||
resource_uri = self.get_uri_with_slot_number_query_param(resource_uri)
|
||||
# We have to do a GET to obtain the Etag. It's required on the PUT.
|
||||
response = self.get_request(resource_uri)
|
||||
if 'etag' not in response['headers']:
|
||||
return {'ret': False, 'msg': 'Etag not found in response.'}
|
||||
etag = response['headers']['etag']
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
|
||||
# Issue the PUT (unless we are in check mode)
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
payload = {'FirmwareActivate': True}
|
||||
response = self.put_request(resource_uri, payload, etag)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
|
||||
return {'ret': True, 'jobUri': response["headers"]["location"]}
|
||||
|
||||
def get_job_status(self, job_uri):
|
||||
"""Get the status of a job.
|
||||
|
||||
:param str job_uri: The URI of the job's status monitor.
|
||||
"""
|
||||
job_uri = self.get_uri_with_slot_number_query_param(job_uri)
|
||||
response = self.get_request(job_uri)
|
||||
if response['ret'] is False:
|
||||
if response.get('status') == 404:
|
||||
# Job not found -- assume 0%
|
||||
return {
|
||||
"ret": True,
|
||||
"percentComplete": 0,
|
||||
"operationStatus": "Not Available",
|
||||
"operationStatusId": 1,
|
||||
"operationHealth": None,
|
||||
"operationHealthId": None,
|
||||
"details": "Job does not exist.",
|
||||
"jobExists": False
|
||||
}
|
||||
else:
|
||||
return response
|
||||
details = response["data"]["Status"].get("Details")
|
||||
if type(details) is str:
|
||||
details = [details]
|
||||
health_list = response["data"]["Status"]["Health"]
|
||||
return_value = {
|
||||
"ret": True,
|
||||
"percentComplete": response["data"]["PercentComplete"],
|
||||
"operationStatus": response["data"]["Status"]["State"]["Name"],
|
||||
"operationStatusId": response["data"]["Status"]["State"]["ID"],
|
||||
"operationHealth": health_list[0]["Name"] if len(health_list) > 0 else None,
|
||||
"operationHealthId": health_list[0]["ID"] if len(health_list) > 0 else None,
|
||||
"details": details,
|
||||
"jobExists": True
|
||||
}
|
||||
return return_value
|
||||
|
||||
def delete_job(self, job_uri):
|
||||
"""Delete the OCAPI job referenced by the specified job_uri."""
|
||||
job_uri = self.get_uri_with_slot_number_query_param(job_uri)
|
||||
# We have to do a GET to obtain the Etag. It's required on the DELETE.
|
||||
response = self.get_request(job_uri)
|
||||
|
||||
if response['ret'] is True:
|
||||
if 'etag' not in response['headers']:
|
||||
return {'ret': False, 'msg': 'Etag not found in response.'}
|
||||
else:
|
||||
etag = response['headers']['etag']
|
||||
|
||||
if response['data']['PercentComplete'] != 100:
|
||||
return {
|
||||
'ret': False,
|
||||
'changed': False,
|
||||
'msg': 'Cannot delete job because it is in progress.'
|
||||
}
|
||||
|
||||
if response['ret'] is False:
|
||||
if response['status'] == 404:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': False,
|
||||
'msg': 'Job already deleted.'
|
||||
}
|
||||
return response
|
||||
if self.module.check_mode:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True,
|
||||
'msg': 'Update not performed in check mode.'
|
||||
}
|
||||
|
||||
# Do the DELETE (unless we are in check mode)
|
||||
response = self.delete_request(job_uri, etag)
|
||||
if response['ret'] is False:
|
||||
if response['status'] == 404:
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': False
|
||||
}
|
||||
elif response['status'] == 409:
|
||||
return {
|
||||
'ret': False,
|
||||
'changed': False,
|
||||
'msg': 'Cannot delete job because it is in progress.'
|
||||
}
|
||||
return response
|
||||
return {
|
||||
'ret': True,
|
||||
'changed': True
|
||||
}
|
||||
|
|
@ -16,7 +16,8 @@ __metaclass__ = type
|
|||
import abc
|
||||
import collections
|
||||
import json
|
||||
import os
|
||||
# (TODO: remove next line!)
|
||||
import os # noqa: F401, pylint: disable=unused-import
|
||||
import traceback
|
||||
|
||||
HPE_ONEVIEW_IMP_ERR = None
|
||||
|
|
|
|||
|
|
@ -26,6 +26,36 @@ except ImportError:
|
|||
HAS_PYONE = False
|
||||
|
||||
|
||||
# A helper function to mitigate https://github.com/OpenNebula/one/issues/6064.
|
||||
# It allows for easily handling lists like "NIC" or "DISK" in the JSON-like template representation.
|
||||
# There are either lists of dictionaries (length > 1) or just dictionaries.
|
||||
def flatten(to_flatten, extract=False):
|
||||
"""Flattens nested lists (with optional value extraction)."""
|
||||
def recurse(to_flatten):
|
||||
return sum(map(recurse, to_flatten), []) if isinstance(to_flatten, list) else [to_flatten]
|
||||
value = recurse(to_flatten)
|
||||
if extract and len(value) == 1:
|
||||
return value[0]
|
||||
return value
|
||||
|
||||
|
||||
# A helper function to mitigate https://github.com/OpenNebula/one/issues/6064.
|
||||
# It renders JSON-like template representation into OpenNebula's template syntax (string).
|
||||
def render(to_render):
|
||||
"""Converts dictionary to OpenNebula template."""
|
||||
def recurse(to_render):
|
||||
for key, value in sorted(to_render.items()):
|
||||
if isinstance(value, dict):
|
||||
yield '{0:}=[{1:}]'.format(key, ','.join(recurse(value)))
|
||||
continue
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
yield '{0:}=[{1:}]'.format(key, ','.join(recurse(item)))
|
||||
continue
|
||||
yield '{0:}="{1:}"'.format(key, value)
|
||||
return '\n'.join(recurse(to_render))
|
||||
|
||||
|
||||
class OpenNebulaModule:
|
||||
"""
|
||||
Base class for all OpenNebula Ansible Modules.
|
||||
|
|
|
|||
|
|
@ -10,13 +10,14 @@ import logging
|
|||
import logging.config
|
||||
import os
|
||||
import tempfile
|
||||
from datetime import datetime
|
||||
# (TODO: remove next line!)
|
||||
from datetime import datetime # noqa: F401, pylint: disable=unused-import
|
||||
from operator import eq
|
||||
|
||||
import time
|
||||
|
||||
try:
|
||||
import yaml
|
||||
import yaml # noqa: F401, pylint: disable=unused-import
|
||||
|
||||
import oci
|
||||
from oci.constants import HEADER_NEXT_PAGE
|
||||
|
|
|
|||
|
|
@ -7,9 +7,12 @@
|
|||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
import atexit
|
||||
import time
|
||||
import re
|
||||
# (TODO: remove next line!)
|
||||
import atexit # noqa: F401, pylint: disable=unused-import
|
||||
# (TODO: remove next line!)
|
||||
import time # noqa: F401, pylint: disable=unused-import
|
||||
# (TODO: remove next line!)
|
||||
import re # noqa: F401, pylint: disable=unused-import
|
||||
import traceback
|
||||
|
||||
PROXMOXER_IMP_ERR = None
|
||||
|
|
@ -22,7 +25,8 @@ except ImportError:
|
|||
|
||||
|
||||
from ansible.module_utils.basic import env_fallback, missing_required_lib
|
||||
from ansible.module_utils.common.text.converters import to_native
|
||||
# (TODO: remove next line!)
|
||||
from ansible.module_utils.common.text.converters import to_native # noqa: F401, pylint: disable=unused-import
|
||||
from ansible_collections.community.general.plugins.module_utils.version import LooseVersion
|
||||
|
||||
|
||||
|
|
|
|||
114
plugins/module_utils/puppet.py
Normal file
114
plugins/module_utils/puppet.py
Normal file
|
|
@ -0,0 +1,114 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2022, Alexei Znamensky <russoz@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import os
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.cmd_runner import CmdRunner, cmd_runner_fmt
|
||||
|
||||
|
||||
_PUPPET_PATH_PREFIX = ["/opt/puppetlabs/bin"]
|
||||
|
||||
|
||||
def get_facter_dir():
|
||||
if os.getuid() == 0:
|
||||
return '/etc/facter/facts.d'
|
||||
else:
|
||||
return os.path.expanduser('~/.facter/facts.d')
|
||||
|
||||
|
||||
def _puppet_cmd(module):
|
||||
return module.get_bin_path("puppet", False, _PUPPET_PATH_PREFIX)
|
||||
|
||||
|
||||
# If the `timeout` CLI command feature is removed,
|
||||
# Then we could add this as a fixed param to `puppet_runner`
|
||||
def ensure_agent_enabled(module):
|
||||
runner = CmdRunner(
|
||||
module,
|
||||
command="puppet",
|
||||
path_prefix=_PUPPET_PATH_PREFIX,
|
||||
arg_formats=dict(
|
||||
_agent_disabled=cmd_runner_fmt.as_fixed(['config', 'print', 'agent_disabled_lockfile']),
|
||||
),
|
||||
check_rc=False,
|
||||
)
|
||||
|
||||
rc, stdout, stderr = runner("_agent_disabled").run()
|
||||
if os.path.exists(stdout.strip()):
|
||||
module.fail_json(
|
||||
msg="Puppet agent is administratively disabled.",
|
||||
disabled=True)
|
||||
elif rc != 0:
|
||||
module.fail_json(
|
||||
msg="Puppet agent state could not be determined.")
|
||||
|
||||
|
||||
def puppet_runner(module):
|
||||
|
||||
# Keeping backward compatibility, allow for running with the `timeout` CLI command.
|
||||
# If this can be replaced with ansible `timeout` parameter in playbook,
|
||||
# then this function could be removed.
|
||||
def _prepare_base_cmd():
|
||||
_tout_cmd = module.get_bin_path("timeout", False)
|
||||
if _tout_cmd:
|
||||
cmd = ["timeout", "-s", "9", module.params["timeout"], _puppet_cmd(module)]
|
||||
else:
|
||||
cmd = ["puppet"]
|
||||
return cmd
|
||||
|
||||
def noop_func(v):
|
||||
_noop = cmd_runner_fmt.as_map({
|
||||
True: "--noop",
|
||||
False: "--no-noop",
|
||||
})
|
||||
return _noop(module.check_mode or v)
|
||||
|
||||
_logdest_map = {
|
||||
"syslog": ["--logdest", "syslog"],
|
||||
"all": ["--logdest", "syslog", "--logdest", "console"],
|
||||
}
|
||||
|
||||
@cmd_runner_fmt.unpack_args
|
||||
def execute_func(execute, manifest):
|
||||
if execute:
|
||||
return ["--execute", execute]
|
||||
else:
|
||||
return [manifest]
|
||||
|
||||
runner = CmdRunner(
|
||||
module,
|
||||
command=_prepare_base_cmd(),
|
||||
path_prefix=_PUPPET_PATH_PREFIX,
|
||||
arg_formats=dict(
|
||||
_agent_fixed=cmd_runner_fmt.as_fixed([
|
||||
"agent", "--onetime", "--no-daemonize", "--no-usecacheonfailure",
|
||||
"--no-splay", "--detailed-exitcodes", "--verbose", "--color", "0",
|
||||
]),
|
||||
_apply_fixed=cmd_runner_fmt.as_fixed(["apply", "--detailed-exitcodes"]),
|
||||
puppetmaster=cmd_runner_fmt.as_opt_val("--server"),
|
||||
show_diff=cmd_runner_fmt.as_bool("--show-diff"),
|
||||
confdir=cmd_runner_fmt.as_opt_val("--confdir"),
|
||||
environment=cmd_runner_fmt.as_opt_val("--environment"),
|
||||
tags=cmd_runner_fmt.as_func(lambda v: ["--tags", ",".join(v)]),
|
||||
certname=cmd_runner_fmt.as_opt_eq_val("--certname"),
|
||||
noop=cmd_runner_fmt.as_func(noop_func),
|
||||
use_srv_records=cmd_runner_fmt.as_map({
|
||||
True: "--usr_srv_records",
|
||||
False: "--no-usr_srv_records",
|
||||
}),
|
||||
logdest=cmd_runner_fmt.as_map(_logdest_map, default=[]),
|
||||
modulepath=cmd_runner_fmt.as_opt_eq_val("--modulepath"),
|
||||
_execute=cmd_runner_fmt.as_func(execute_func),
|
||||
summarize=cmd_runner_fmt.as_bool("--summarize"),
|
||||
debug=cmd_runner_fmt.as_bool("--debug"),
|
||||
verbose=cmd_runner_fmt.as_bool("--verbose"),
|
||||
),
|
||||
check_rc=False,
|
||||
)
|
||||
return runner
|
||||
|
|
@ -21,13 +21,15 @@ except ImportError:
|
|||
|
||||
HAS_PURITY_FB = True
|
||||
try:
|
||||
from purity_fb import PurityFb, FileSystem, FileSystemSnapshot, SnapshotSuffix, rest
|
||||
from purity_fb import PurityFb, FileSystem, FileSystemSnapshot, SnapshotSuffix, rest # noqa: F401, pylint: disable=unused-import
|
||||
except ImportError:
|
||||
HAS_PURITY_FB = False
|
||||
|
||||
from functools import wraps
|
||||
# (TODO: remove next line!)
|
||||
from functools import wraps # noqa: F401, pylint: disable=unused-import
|
||||
from os import environ
|
||||
from os import path
|
||||
# (TODO: remove next line!)
|
||||
from os import path # noqa: F401, pylint: disable=unused-import
|
||||
import platform
|
||||
|
||||
VERSION = 1.2
|
||||
|
|
|
|||
|
|
@ -314,3 +314,21 @@ def setup_rax_module(module, rax_module, region_required=True):
|
|||
(region, ','.join(rax_module.regions)))
|
||||
|
||||
return rax_module
|
||||
|
||||
|
||||
def rax_scaling_group_personality_file(module, files):
|
||||
if not files:
|
||||
return []
|
||||
|
||||
results = []
|
||||
for rpath, lpath in files.items():
|
||||
lpath = os.path.expanduser(lpath)
|
||||
try:
|
||||
with open(lpath, 'r') as f:
|
||||
results.append({
|
||||
'path': rpath,
|
||||
'contents': f.read(),
|
||||
})
|
||||
except Exception as e:
|
||||
module.fail_json(msg='Failed to load %s: %s' % (lpath, str(e)))
|
||||
return results
|
||||
|
|
|
|||
|
|
@ -19,6 +19,8 @@ POST_HEADERS = {'content-type': 'application/json', 'accept': 'application/json'
|
|||
'OData-Version': '4.0'}
|
||||
PATCH_HEADERS = {'content-type': 'application/json', 'accept': 'application/json',
|
||||
'OData-Version': '4.0'}
|
||||
PUT_HEADERS = {'content-type': 'application/json', 'accept': 'application/json',
|
||||
'OData-Version': '4.0'}
|
||||
DELETE_HEADERS = {'accept': 'application/json', 'OData-Version': '4.0'}
|
||||
|
||||
FAIL_MSG = 'Issuing a data modification command without specifying the '\
|
||||
|
|
@ -36,6 +38,8 @@ class RedfishUtils(object):
|
|||
self.timeout = timeout
|
||||
self.module = module
|
||||
self.service_root = '/redfish/v1/'
|
||||
self.session_service_uri = '/redfish/v1/SessionService'
|
||||
self.sessions_uri = '/redfish/v1/SessionService/Sessions'
|
||||
self.resource_id = resource_id
|
||||
self.data_modification = data_modification
|
||||
self.strip_etag_quotes = strip_etag_quotes
|
||||
|
|
@ -123,6 +127,10 @@ class RedfishUtils(object):
|
|||
req_headers = dict(GET_HEADERS)
|
||||
username, password, basic_auth = self._auth_params(req_headers)
|
||||
try:
|
||||
# Service root is an unauthenticated resource; remove credentials
|
||||
# in case the caller will be using sessions later.
|
||||
if uri == (self.root_uri + self.service_root):
|
||||
basic_auth = False
|
||||
resp = open_url(uri, method="GET", headers=req_headers,
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
|
|
@ -143,18 +151,28 @@ class RedfishUtils(object):
|
|||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed GET request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'data': data, 'headers': headers}
|
||||
return {'ret': True, 'data': data, 'headers': headers, 'resp': resp}
|
||||
|
||||
def post_request(self, uri, pyld):
|
||||
req_headers = dict(POST_HEADERS)
|
||||
username, password, basic_auth = self._auth_params(req_headers)
|
||||
try:
|
||||
# When performing a POST to the session collection, credentials are
|
||||
# provided in the request body. Do not provide the basic auth
|
||||
# header since this can cause conflicts with some services
|
||||
if self.sessions_uri is not None and uri == (self.root_uri + self.sessions_uri):
|
||||
basic_auth = False
|
||||
resp = open_url(uri, data=json.dumps(pyld),
|
||||
headers=req_headers, method="POST",
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout)
|
||||
try:
|
||||
data = json.loads(to_native(resp.read()))
|
||||
except Exception as e:
|
||||
# No response data; this is okay in many cases
|
||||
data = None
|
||||
headers = dict((k.lower(), v) for (k, v) in resp.info().items())
|
||||
except HTTPError as e:
|
||||
msg = self._get_extended_message(e)
|
||||
|
|
@ -169,7 +187,7 @@ class RedfishUtils(object):
|
|||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed POST request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'headers': headers, 'resp': resp}
|
||||
return {'ret': True, 'data': data, 'headers': headers, 'resp': resp}
|
||||
|
||||
def patch_request(self, uri, pyld, check_pyld=False):
|
||||
req_headers = dict(PATCH_HEADERS)
|
||||
|
|
@ -219,6 +237,41 @@ class RedfishUtils(object):
|
|||
'msg': "Failed PATCH request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'changed': True, 'resp': resp, 'msg': 'Modified %s' % uri}
|
||||
|
||||
def put_request(self, uri, pyld):
|
||||
req_headers = dict(PUT_HEADERS)
|
||||
r = self.get_request(uri)
|
||||
if r['ret']:
|
||||
# Get etag from etag header or @odata.etag property
|
||||
etag = r['headers'].get('etag')
|
||||
if not etag:
|
||||
etag = r['data'].get('@odata.etag')
|
||||
if etag:
|
||||
if self.strip_etag_quotes:
|
||||
etag = etag.strip('"')
|
||||
req_headers['If-Match'] = etag
|
||||
username, password, basic_auth = self._auth_params(req_headers)
|
||||
try:
|
||||
resp = open_url(uri, data=json.dumps(pyld),
|
||||
headers=req_headers, method="PUT",
|
||||
url_username=username, url_password=password,
|
||||
force_basic_auth=basic_auth, validate_certs=False,
|
||||
follow_redirects='all',
|
||||
use_proxy=True, timeout=self.timeout)
|
||||
except HTTPError as e:
|
||||
msg = self._get_extended_message(e)
|
||||
return {'ret': False,
|
||||
'msg': "HTTP Error %s on PUT request to '%s', extended message: '%s'"
|
||||
% (e.code, uri, msg),
|
||||
'status': e.code}
|
||||
except URLError as e:
|
||||
return {'ret': False, 'msg': "URL Error on PUT request to '%s': '%s'"
|
||||
% (uri, e.reason)}
|
||||
# Almost all errors should be caught above, but just in case
|
||||
except Exception as e:
|
||||
return {'ret': False,
|
||||
'msg': "Failed PUT request to '%s': '%s'" % (uri, to_text(e))}
|
||||
return {'ret': True, 'resp': resp}
|
||||
|
||||
def delete_request(self, uri, pyld=None):
|
||||
req_headers = dict(DELETE_HEADERS)
|
||||
username, password, basic_auth = self._auth_params(req_headers)
|
||||
|
|
@ -321,23 +374,23 @@ class RedfishUtils(object):
|
|||
return {'ret': True}
|
||||
|
||||
def _find_sessionservice_resource(self):
|
||||
# Get the service root
|
||||
response = self.get_request(self.root_uri + self.service_root)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
data = response['data']
|
||||
if 'SessionService' not in data:
|
||||
|
||||
# Check for the session service and session collection. Well-known
|
||||
# defaults are provided in the constructor, but services that predate
|
||||
# Redfish 1.6.0 might contain different values.
|
||||
self.session_service_uri = data.get('SessionService', {}).get('@odata.id')
|
||||
self.sessions_uri = data.get('Links', {}).get('Sessions', {}).get('@odata.id')
|
||||
|
||||
# If one isn't found, return an error
|
||||
if self.session_service_uri is None:
|
||||
return {'ret': False, 'msg': "SessionService resource not found"}
|
||||
else:
|
||||
session_service = data["SessionService"]["@odata.id"]
|
||||
self.session_service_uri = session_service
|
||||
response = self.get_request(self.root_uri + session_service)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
data = response['data']
|
||||
sessions = data['Sessions']['@odata.id']
|
||||
if sessions[-1:] == '/':
|
||||
sessions = sessions[:-1]
|
||||
self.sessions_uri = sessions
|
||||
if self.sessions_uri is None:
|
||||
return {'ret': False, 'msg': "SessionCollection resource not found"}
|
||||
return {'ret': True}
|
||||
|
||||
def _get_resource_uri_by_id(self, uris, id_prop):
|
||||
|
|
@ -1384,11 +1437,82 @@ class RedfishUtils(object):
|
|||
else:
|
||||
return self._software_inventory(self.software_uri)
|
||||
|
||||
def _operation_results(self, response, data, handle=None):
|
||||
"""
|
||||
Builds the results for an operation from task, job, or action response.
|
||||
|
||||
:param response: HTTP response object
|
||||
:param data: HTTP response data
|
||||
:param handle: The task or job handle that was last used
|
||||
:return: dict containing operation results
|
||||
"""
|
||||
|
||||
operation_results = {'status': None, 'messages': [], 'handle': None, 'ret': True,
|
||||
'resets_requested': []}
|
||||
|
||||
if response.status == 204:
|
||||
# No content; successful, but nothing to return
|
||||
# Use the Redfish "Completed" enum from TaskState for the operation status
|
||||
operation_results['status'] = 'Completed'
|
||||
else:
|
||||
# Parse the response body for details
|
||||
|
||||
# Determine the next handle, if any
|
||||
operation_results['handle'] = handle
|
||||
if response.status == 202:
|
||||
# Task generated; get the task monitor URI
|
||||
operation_results['handle'] = response.getheader('Location', handle)
|
||||
|
||||
# Pull out the status and messages based on the body format
|
||||
if data is not None:
|
||||
response_type = data.get('@odata.type', '')
|
||||
if response_type.startswith('#Task.') or response_type.startswith('#Job.'):
|
||||
# Task and Job have similar enough structures to treat the same
|
||||
operation_results['status'] = data.get('TaskState', data.get('JobState'))
|
||||
operation_results['messages'] = data.get('Messages', [])
|
||||
else:
|
||||
# Error response body, which is a bit of a misnomer since it's used in successful action responses
|
||||
operation_results['status'] = 'Completed'
|
||||
if response.status >= 400:
|
||||
operation_results['status'] = 'Exception'
|
||||
operation_results['messages'] = data.get('error', {}).get('@Message.ExtendedInfo', [])
|
||||
else:
|
||||
# No response body (or malformed); build based on status code
|
||||
operation_results['status'] = 'Completed'
|
||||
if response.status == 202:
|
||||
operation_results['status'] = 'New'
|
||||
elif response.status >= 400:
|
||||
operation_results['status'] = 'Exception'
|
||||
|
||||
# Clear out the handle if the operation is complete
|
||||
if operation_results['status'] in ['Completed', 'Cancelled', 'Exception', 'Killed']:
|
||||
operation_results['handle'] = None
|
||||
|
||||
# Scan the messages to see if next steps are needed
|
||||
for message in operation_results['messages']:
|
||||
message_id = message['MessageId']
|
||||
|
||||
if message_id.startswith('Update.1.') and message_id.endswith('.OperationTransitionedToJob'):
|
||||
# Operation rerouted to a job; update the status and handle
|
||||
operation_results['status'] = 'New'
|
||||
operation_results['handle'] = message['MessageArgs'][0]
|
||||
operation_results['resets_requested'] = []
|
||||
# No need to process other messages in this case
|
||||
break
|
||||
|
||||
if message_id.startswith('Base.1.') and message_id.endswith('.ResetRequired'):
|
||||
# A reset to some device is needed to continue the update
|
||||
reset = {'uri': message['MessageArgs'][0], 'type': message['MessageArgs'][1]}
|
||||
operation_results['resets_requested'].append(reset)
|
||||
|
||||
return operation_results
|
||||
|
||||
def simple_update(self, update_opts):
|
||||
image_uri = update_opts.get('update_image_uri')
|
||||
protocol = update_opts.get('update_protocol')
|
||||
targets = update_opts.get('update_targets')
|
||||
creds = update_opts.get('update_creds')
|
||||
apply_time = update_opts.get('update_apply_time')
|
||||
|
||||
if not image_uri:
|
||||
return {'ret': False, 'msg':
|
||||
|
|
@ -1439,11 +1563,65 @@ class RedfishUtils(object):
|
|||
payload["Username"] = creds.get('username')
|
||||
if creds.get('password'):
|
||||
payload["Password"] = creds.get('password')
|
||||
if apply_time:
|
||||
payload["@Redfish.OperationApplyTime"] = apply_time
|
||||
response = self.post_request(self.root_uri + update_uri, payload)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
return {'ret': True, 'changed': True,
|
||||
'msg': "SimpleUpdate requested"}
|
||||
'msg': "SimpleUpdate requested",
|
||||
'update_status': self._operation_results(response['resp'], response['data'])}
|
||||
|
||||
def get_update_status(self, update_handle):
|
||||
"""
|
||||
Gets the status of an update operation.
|
||||
|
||||
:param handle: The task or job handle tracking the update
|
||||
:return: dict containing the response of the update status
|
||||
"""
|
||||
|
||||
if not update_handle:
|
||||
return {'ret': False, 'msg': 'Must provide a handle tracking the update.'}
|
||||
|
||||
# Get the task or job tracking the update
|
||||
response = self.get_request(self.root_uri + update_handle)
|
||||
if response['ret'] is False:
|
||||
return response
|
||||
|
||||
# Inspect the response to build the update status
|
||||
return self._operation_results(response['resp'], response['data'], update_handle)
|
||||
|
||||
def perform_requested_update_operations(self, update_handle):
|
||||
"""
|
||||
Performs requested operations to allow the update to continue.
|
||||
|
||||
:param handle: The task or job handle tracking the update
|
||||
:return: dict containing the result of the operations
|
||||
"""
|
||||
|
||||
# Get the current update status
|
||||
update_status = self.get_update_status(update_handle)
|
||||
if update_status['ret'] is False:
|
||||
return update_status
|
||||
|
||||
changed = False
|
||||
|
||||
# Perform any requested updates
|
||||
for reset in update_status['resets_requested']:
|
||||
resp = self.post_request(self.root_uri + reset['uri'], {'ResetType': reset['type']})
|
||||
if resp['ret'] is False:
|
||||
# Override the 'changed' indicator since other resets may have
|
||||
# been successful
|
||||
resp['changed'] = changed
|
||||
return resp
|
||||
changed = True
|
||||
|
||||
msg = 'No operations required for the update'
|
||||
if changed:
|
||||
# Will need to consider finetuning this message if the scope of the
|
||||
# requested operations grow over time
|
||||
msg = 'One or more components reset to continue the update'
|
||||
return {'ret': True, 'changed': changed, 'msg': msg}
|
||||
|
||||
def get_bios_attributes(self, systems_uri):
|
||||
result = {}
|
||||
|
|
@ -2985,3 +3163,57 @@ class RedfishUtils(object):
|
|||
if resp['ret'] and resp['changed']:
|
||||
resp['msg'] = 'Modified session service'
|
||||
return resp
|
||||
|
||||
def verify_bios_attributes(self, bios_attributes):
|
||||
# This method verifies BIOS attributes against the provided input
|
||||
server_bios = self.get_multi_bios_attributes()
|
||||
if server_bios["ret"] is False:
|
||||
return server_bios
|
||||
|
||||
bios_dict = {}
|
||||
wrong_param = {}
|
||||
|
||||
# Verify bios_attributes with BIOS settings available in the server
|
||||
for key, value in bios_attributes.items():
|
||||
if key in server_bios["entries"][0][1]:
|
||||
if server_bios["entries"][0][1][key] != value:
|
||||
bios_dict.update({key: value})
|
||||
else:
|
||||
wrong_param.update({key: value})
|
||||
|
||||
if wrong_param:
|
||||
return {
|
||||
"ret": False,
|
||||
"msg": "Wrong parameters are provided: %s" % wrong_param
|
||||
}
|
||||
|
||||
if bios_dict:
|
||||
return {
|
||||
"ret": False,
|
||||
"msg": "BIOS parameters are not matching: %s" % bios_dict
|
||||
}
|
||||
|
||||
return {
|
||||
"ret": True,
|
||||
"changed": False,
|
||||
"msg": "BIOS verification completed"
|
||||
}
|
||||
|
||||
def enable_secure_boot(self):
|
||||
# This function enable Secure Boot on an OOB controller
|
||||
|
||||
response = self.get_request(self.root_uri + self.systems_uri)
|
||||
if response["ret"] is False:
|
||||
return response
|
||||
|
||||
server_details = response["data"]
|
||||
secure_boot_url = server_details["SecureBoot"]["@odata.id"]
|
||||
|
||||
response = self.get_request(self.root_uri + secure_boot_url)
|
||||
if response["ret"] is False:
|
||||
return response
|
||||
|
||||
body = {}
|
||||
body["SecureBootEnable"] = True
|
||||
|
||||
return self.patch_request(self.root_uri + secure_boot_url, body, check_pyld=True)
|
||||
|
|
|
|||
|
|
@ -84,6 +84,10 @@ def parse_pagination_link(header):
|
|||
|
||||
|
||||
def filter_sensitive_attributes(container, attributes):
|
||||
'''
|
||||
WARNING: This function is effectively private, **do not use it**!
|
||||
It will be removed or renamed once changing its name no longer triggers a pylint bug.
|
||||
'''
|
||||
for attr in attributes:
|
||||
container[attr] = "SENSITIVE_VALUE"
|
||||
|
||||
|
|
|
|||
21
plugins/module_utils/ssh.py
Normal file
21
plugins/module_utils/ssh.py
Normal file
|
|
@ -0,0 +1,21 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# Copyright (c) 2015, Björn Andersson
|
||||
# Copyright (c) 2021, Ansible Project
|
||||
# Copyright (c) 2021, Abhijeet Kasurde <akasurde@redhat.com>
|
||||
# Copyright (c) 2022, Alexei Znamensky <russoz@gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
|
||||
import os
|
||||
|
||||
|
||||
def determine_config_file(user, config_file):
|
||||
if user:
|
||||
config_file = os.path.join(os.path.expanduser('~%s' % user), '.ssh', 'config')
|
||||
elif config_file is None:
|
||||
config_file = '/etc/ssh/ssh_config'
|
||||
return config_file
|
||||
|
|
@ -13,10 +13,10 @@ __metaclass__ = type
|
|||
from ansible.module_utils.six import raise_from
|
||||
|
||||
try:
|
||||
from ansible.module_utils.compat.version import LooseVersion
|
||||
from ansible.module_utils.compat.version import LooseVersion # noqa: F401, pylint: disable=unused-import
|
||||
except ImportError:
|
||||
try:
|
||||
from distutils.version import LooseVersion
|
||||
from distutils.version import LooseVersion # noqa: F401, pylint: disable=unused-import
|
||||
except ImportError as exc:
|
||||
msg = 'To use this plugin or module with ansible-core 2.11, you need to use Python < 3.12 with distutils.version present'
|
||||
raise_from(ImportError(msg), exc)
|
||||
|
|
|
|||
|
|
@ -19,6 +19,13 @@ description:
|
|||
- If waiting for migrations is not desired, simply just poll until
|
||||
port 3000 if available or asinfo -v status returns ok
|
||||
author: "Albert Autin (@Alb0t)"
|
||||
extends_documentation_fragment:
|
||||
- community.general.attributes
|
||||
attributes:
|
||||
check_mode:
|
||||
support: full
|
||||
diff_mode:
|
||||
support: none
|
||||
options:
|
||||
host:
|
||||
description:
|
||||
|
|
|
|||
|
|
@ -18,6 +18,13 @@ author:
|
|||
short_description: Notify airbrake about app deployments
|
||||
description:
|
||||
- Notify airbrake about app deployments (see U(https://airbrake.io/docs/api/#deploys-v4)).
|
||||
extends_documentation_fragment:
|
||||
- community.general.attributes
|
||||
attributes:
|
||||
check_mode:
|
||||
support: full
|
||||
diff_mode:
|
||||
support: none
|
||||
options:
|
||||
project_id:
|
||||
description:
|
||||
|
|
|
|||
|
|
@ -16,6 +16,13 @@ module: aix_devices
|
|||
short_description: Manages AIX devices
|
||||
description:
|
||||
- This module discovers, defines, removes and modifies attributes of AIX devices.
|
||||
extends_documentation_fragment:
|
||||
- community.general.attributes
|
||||
attributes:
|
||||
check_mode:
|
||||
support: full
|
||||
diff_mode:
|
||||
support: none
|
||||
options:
|
||||
attributes:
|
||||
description:
|
||||
|
|
|
|||
|
|
@ -19,6 +19,13 @@ description:
|
|||
- This module creates, removes, mount and unmount LVM and NFS file system for
|
||||
AIX using C(/etc/filesystems).
|
||||
- For LVM file systems is possible to resize a file system.
|
||||
extends_documentation_fragment:
|
||||
- community.general.attributes
|
||||
attributes:
|
||||
check_mode:
|
||||
support: full
|
||||
diff_mode:
|
||||
support: none
|
||||
options:
|
||||
account_subsystem:
|
||||
description:
|
||||
|
|
|
|||
|
|
@ -11,11 +11,18 @@ __metaclass__ = type
|
|||
DOCUMENTATION = r'''
|
||||
---
|
||||
author:
|
||||
- Joris Weijters (@molekuul)
|
||||
- Joris Weijters (@molekuul)
|
||||
module: aix_inittab
|
||||
short_description: Manages the inittab on AIX
|
||||
description:
|
||||
- Manages the inittab on AIX.
|
||||
extends_documentation_fragment:
|
||||
- community.general.attributes
|
||||
attributes:
|
||||
check_mode:
|
||||
support: full
|
||||
diff_mode:
|
||||
support: none
|
||||
options:
|
||||
name:
|
||||
description:
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue