diff --git a/.github/BOTMETA.yml b/.github/BOTMETA.yml index bfc1ca4da9..23c2b7258b 100644 --- a/.github/BOTMETA.yml +++ b/.github/BOTMETA.yml @@ -111,6 +111,9 @@ files: $connections/lxd.py: labels: lxd maintainers: mattclay + $connections/proxmox_pct_remote.py: + labels: proxmox + maintainers: mietzen $connections/qubes.py: maintainers: kushaldas $connections/saltstack.py: @@ -161,6 +164,14 @@ files: maintainers: Ajpantuso $filters/jc.py: maintainers: kellyjonbrazil + $filters/json_diff.yml: + maintainers: numo68 + $filters/json_patch.py: + maintainers: numo68 + $filters/json_patch.yml: + maintainers: numo68 + $filters/json_patch_recipe.yml: + maintainers: numo68 $filters/json_query.py: {} $filters/keep_keys.py: maintainers: vbotka @@ -296,6 +307,8 @@ files: $lookups/onepassword_raw.py: ignore: scottsb maintainers: azenk + $lookups/onepassword_ssh_key.py: + maintainers: mohammedbabelly20 $lookups/passwordstore.py: {} $lookups/random_pet.py: maintainers: Akasurde @@ -1135,6 +1148,8 @@ files: maintainers: helldorado krauthosting $modules/proxmox_backup.py: maintainers: IamLunchbox + $modules/proxmox_backup_info.py: + maintainers: raoufnezhad mmayabi $modules/proxmox_nic.py: maintainers: Kogelvis krauthosting $modules/proxmox_node_info.py: diff --git a/changelogs/fragments/9225-proxmox-module-refactoring.yml b/changelogs/fragments/9225-proxmox-module-refactoring.yml new file mode 100644 index 0000000000..4e49037ab2 --- /dev/null +++ b/changelogs/fragments/9225-proxmox-module-refactoring.yml @@ -0,0 +1,11 @@ +minor_changes: + - proxmox - refactors the proxmox module (https://github.com/ansible-collections/community.general/pull/9225). + +bugfixes: + - proxmox - fixes idempotency of template conversions (https://github.com/ansible-collections/community.general/pull/9225, https://github.com/ansible-collections/community.general/issues/8811). + - proxmox - fixes issues with disk_volume variable (https://github.com/ansible-collections/community.general/pull/9225, https://github.com/ansible-collections/community.general/issues/9065). + - proxmox - fixes incorrect parsing for bind-only mounts (https://github.com/ansible-collections/community.general/pull/9225, https://github.com/ansible-collections/community.general/issues/8982). + - proxmox module utils - fixes ignoring of ``choose_first_if_multiple`` argument in ``get_vmid`` (https://github.com/ansible-collections/community.general/pull/9225). + +deprecated_features: + - proxmox - removes default value ``false`` of ``update`` parameter. This will be changed to a default of ``true`` in community.general 11.0.0 (https://github.com/ansible-collections/community.general/pull/9225). diff --git a/changelogs/fragments/9494-keycloak-modules-retry-request-on-authentication-error.yaml b/changelogs/fragments/9494-keycloak-modules-retry-request-on-authentication-error.yaml new file mode 100644 index 0000000000..7e5c71ade1 --- /dev/null +++ b/changelogs/fragments/9494-keycloak-modules-retry-request-on-authentication-error.yaml @@ -0,0 +1,2 @@ +minor_changes: + - keycloak_* modules - ``refresh_token`` parameter added. When multiple authentication parameters are provided (``token``, ``refresh_token``, and ``auth_username``/``auth_password``), modules will now automatically retry requests upon authentication errors (401), using in order the token, refresh token, and username/password (https://github.com/ansible-collections/community.general/pull/9494). diff --git a/changelogs/fragments/9547-one_template-filter.yml b/changelogs/fragments/9547-one_template-filter.yml new file mode 100644 index 0000000000..f67575d640 --- /dev/null +++ b/changelogs/fragments/9547-one_template-filter.yml @@ -0,0 +1,2 @@ +minor_changes: + - one_template - adds ``filter`` option for retrieving templates which are not owned by the user (https://github.com/ansible-collections/community.general/pull/9547, https://github.com/ansible-collections/community.general/issues/9278). diff --git a/changelogs/fragments/9554-add-cpanm-option_with-recommends-and-suggests.yml b/changelogs/fragments/9554-add-cpanm-option_with-recommends-and-suggests.yml new file mode 100644 index 0000000000..5cdaf24e9e --- /dev/null +++ b/changelogs/fragments/9554-add-cpanm-option_with-recommends-and-suggests.yml @@ -0,0 +1,3 @@ +minor_changes: + - cpanm - enable usage of option ``--with-recommends`` (https://github.com/ansible-collections/community.general/issues/9554, https://github.com/ansible-collections/community.general/pull/9555). + - cpanm - enable usage of option ``--with-suggests`` (https://github.com/ansible-collections/community.general/pull/9555). diff --git a/changelogs/fragments/9570-feat-nmcli-add-fail-over-mac-parameter.yml b/changelogs/fragments/9570-feat-nmcli-add-fail-over-mac-parameter.yml new file mode 100644 index 0000000000..c46526e2f6 --- /dev/null +++ b/changelogs/fragments/9570-feat-nmcli-add-fail-over-mac-parameter.yml @@ -0,0 +1,2 @@ +minor_changes: + - nmcli - add a option ``fail_over_mac`` (https://github.com/ansible-collections/community.general/issues/9570, https://github.com/ansible-collections/community.general/pull/9571). \ No newline at end of file diff --git a/changelogs/fragments/9573-iocage-inventory-sudo.yml b/changelogs/fragments/9573-iocage-inventory-sudo.yml new file mode 100644 index 0000000000..f636f34055 --- /dev/null +++ b/changelogs/fragments/9573-iocage-inventory-sudo.yml @@ -0,0 +1,2 @@ +minor_changes: + - iocage inventory plugin - the new parameter ``sudo`` of the plugin lets the command ``iocage list -l`` to run as root on the iocage host. This is needed to get the IPv4 of a running DHCP jail (https://github.com/ansible-collections/community.general/issues/9572, https://github.com/ansible-collections/community.general/pull/9573). diff --git a/changelogs/fragments/9577-mh-delegate-debug.yml b/changelogs/fragments/9577-mh-delegate-debug.yml new file mode 100644 index 0000000000..9d50f68806 --- /dev/null +++ b/changelogs/fragments/9577-mh-delegate-debug.yml @@ -0,0 +1,6 @@ +minor_changes: + - MH module utils - delegate ``debug`` to the underlying ``AnsibleModule`` instance or issues a warning if an attribute already exists with that name (https://github.com/ansible-collections/community.general/pull/9577). +deprecated_features: + - > + MH module utils - attribute ``debug`` definition in subclasses of MH is now deprecated, as that name will become a delegation to ``AnsibleModule`` in + community.general 12.0.0, and any such attribute will be overridden by that delegation in that version (https://github.com/ansible-collections/community.general/pull/9577). diff --git a/changelogs/fragments/9578-redhat_subscription-no-remove-on-unregister.yml b/changelogs/fragments/9578-redhat_subscription-no-remove-on-unregister.yml new file mode 100644 index 0000000000..1403b2faa1 --- /dev/null +++ b/changelogs/fragments/9578-redhat_subscription-no-remove-on-unregister.yml @@ -0,0 +1,7 @@ +bugfixes: + - | + redhat_subscription - do not try to unsubscribe (i.e. remove subscriptions) + when unregistering a system: newer versions of subscription-manager, as + available in EL 10 and Fedora 41+, do not support entitlements anymore, and + thus unsubscribing will fail + (https://github.com/ansible-collections/community.general/pull/9578). diff --git a/changelogs/fragments/9579-with-open.yml b/changelogs/fragments/9579-with-open.yml new file mode 100644 index 0000000000..449ba8b1b1 --- /dev/null +++ b/changelogs/fragments/9579-with-open.yml @@ -0,0 +1,11 @@ +minor_changes: + - known_hosts - open file using ``open()`` as a context manager (https://github.com/ansible-collections/community.general/pull/9579). + - cloud_init_data_facts - open file using ``open()`` as a context manager (https://github.com/ansible-collections/community.general/pull/9579). + - cronvar - open file using ``open()`` as a context manager (https://github.com/ansible-collections/community.general/pull/9579). + - crypttab - open file using ``open()`` as a context manager (https://github.com/ansible-collections/community.general/pull/9579). + - parted - open file using ``open()`` as a context manager (https://github.com/ansible-collections/community.general/pull/9579). + - pulp_repo - open file using ``open()`` as a context manager (https://github.com/ansible-collections/community.general/pull/9579). + - redhat_subscription - open file using ``open()`` as a context manager (https://github.com/ansible-collections/community.general/pull/9579). + - solaris_zone - open file using ``open()`` as a context manager (https://github.com/ansible-collections/community.general/pull/9579). + - sorcery - open file using ``open()`` as a context manager (https://github.com/ansible-collections/community.general/pull/9579). + - timezone - open file using ``open()`` as a context manager (https://github.com/ansible-collections/community.general/pull/9579). diff --git a/changelogs/fragments/9582-add-support-for-vrrp.yml b/changelogs/fragments/9582-add-support-for-vrrp.yml new file mode 100644 index 0000000000..473ccbf258 --- /dev/null +++ b/changelogs/fragments/9582-add-support-for-vrrp.yml @@ -0,0 +1,2 @@ +minor_changes: + - ufw - add support for ``vrrp`` protocol (https://github.com/ansible-collections/community.general/issues/9562, https://github.com/ansible-collections/community.general/pull/9582). diff --git a/changelogs/fragments/9583-py3-imports-actionbecomecachecallback.yml b/changelogs/fragments/9583-py3-imports-actionbecomecachecallback.yml new file mode 100644 index 0000000000..3838e52ae9 --- /dev/null +++ b/changelogs/fragments/9583-py3-imports-actionbecomecachecallback.yml @@ -0,0 +1,43 @@ +minor_changes: + - iptables_state action plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - shutdown action plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - doas become plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - dzdo become plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - ksu become plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - machinectl become plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - pbrun become plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - pfexec become plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - pmrun become plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - run0 become plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - sesu become plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - sudosu become plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - memcached cache plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - pickle cache plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - redis cache plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - yaml cache plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - cgroup_memory_recap callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - context_demo callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - counter_enabled callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - default_without_diff callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - dense callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - diy callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - elastic callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - jabber callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - log_plays callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - loganalytics callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - logdna callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - logentries callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - logstash callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - mail callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - nrdp callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - null callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - opentelemetry callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - say callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - selective callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - slack callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - splunk callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - sumologic callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - syslog_json callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - timestamp callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - unixy callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). + - yaml callback plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9583). diff --git a/changelogs/fragments/9584-py3-imports-connectioninventory.yml b/changelogs/fragments/9584-py3-imports-connectioninventory.yml new file mode 100644 index 0000000000..0b50cdf052 --- /dev/null +++ b/changelogs/fragments/9584-py3-imports-connectioninventory.yml @@ -0,0 +1,26 @@ +minor_changes: + - chroot connection plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - funcd connection plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - incus connection plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - iocage connection plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - jail connection plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - lxc connection plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - lxd connection plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - proxmox_pct_remote connection plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - qubes connection plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - saltstack connection plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - zone connection plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - cobbler inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - gitlab_runners inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - icinga2 inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - iocage inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - linode inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - lxd inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - nmap inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - online inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - opennebula inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - proxmox inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - scaleway inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - stackpath_compute inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - virtualbox inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). + - xen_orchestra inventory plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9584). diff --git a/changelogs/fragments/9585-py3-imports-filter.yml b/changelogs/fragments/9585-py3-imports-filter.yml new file mode 100644 index 0000000000..88644996b2 --- /dev/null +++ b/changelogs/fragments/9585-py3-imports-filter.yml @@ -0,0 +1,22 @@ +minor_changes: + - counter filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - crc32 filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - dict filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - dict_kv filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - from_csv filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - from_ini filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - groupby_as_dict filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - hashids filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - jc filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - json_query filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - keep_keys filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - lists filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - lists_mergeby filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - random_mac filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - remove_keys filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - replace_keys filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - reveal_ansible_type filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - time filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - to_ini filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - unicode_normalize filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). + - version_sort filter plugin - adjust standard preamble for Python 3 (https://github.com/ansible-collections/community.general/pull/9585). diff --git a/changelogs/fragments/9586-allow-transition-id-jira.yml b/changelogs/fragments/9586-allow-transition-id-jira.yml new file mode 100644 index 0000000000..3c10231376 --- /dev/null +++ b/changelogs/fragments/9586-allow-transition-id-jira.yml @@ -0,0 +1,2 @@ +minor_changes: + - jira - transition operation now has ``status_id`` to directly reference wanted transition (https://github.com/ansible-collections/community.general/pull/9602). diff --git a/changelogs/fragments/9598-snap-version.yml b/changelogs/fragments/9598-snap-version.yml new file mode 100644 index 0000000000..d6b5ebb67c --- /dev/null +++ b/changelogs/fragments/9598-snap-version.yml @@ -0,0 +1,3 @@ +minor_changes: + - snap - add return value ``version`` (https://github.com/ansible-collections/community.general/pull/9598). + - snap_alias - add return value ``version`` (https://github.com/ansible-collections/community.general/pull/9598). diff --git a/changelogs/fragments/9599-apache2-mod-proxy-revamp1.yml b/changelogs/fragments/9599-apache2-mod-proxy-revamp1.yml new file mode 100644 index 0000000000..8f72117d52 --- /dev/null +++ b/changelogs/fragments/9599-apache2-mod-proxy-revamp1.yml @@ -0,0 +1,2 @@ +minor_changes: + - apache2_mod_proxy - refactor repeated code into method (https://github.com/ansible-collections/community.general/pull/9599). diff --git a/changelogs/fragments/9600-apache2-mod-proxy-revamp2.yml b/changelogs/fragments/9600-apache2-mod-proxy-revamp2.yml new file mode 100644 index 0000000000..c4e5eea2d3 --- /dev/null +++ b/changelogs/fragments/9600-apache2-mod-proxy-revamp2.yml @@ -0,0 +1,2 @@ +minor_changes: + - apache2_mod_proxy - change type of ``state`` to a list of strings. No change for the users (https://github.com/ansible-collections/community.general/pull/9600). diff --git a/changelogs/fragments/9601-proxmox-template-support-for-checksums.yml b/changelogs/fragments/9601-proxmox-template-support-for-checksums.yml new file mode 100644 index 0000000000..40ffbfbec0 --- /dev/null +++ b/changelogs/fragments/9601-proxmox-template-support-for-checksums.yml @@ -0,0 +1,2 @@ +minor_changes: + - proxmox_template - add support for checksum validation with new options ``checksum_algorithm`` and ``checksum`` (https://github.com/ansible-collections/community.general/issues/9553, https://github.com/ansible-collections/community.general/pull/9601). \ No newline at end of file diff --git a/changelogs/fragments/9608-apache2-mod-proxy-revamp3.yml b/changelogs/fragments/9608-apache2-mod-proxy-revamp3.yml new file mode 100644 index 0000000000..4641ae1d9e --- /dev/null +++ b/changelogs/fragments/9608-apache2-mod-proxy-revamp3.yml @@ -0,0 +1,2 @@ +minor_changes: + - apache2_mod_proxy - improve readability when using results from ``fecth_url()`` (https://github.com/ansible-collections/community.general/pull/9608). diff --git a/changelogs/fragments/9609-apache2-mod-proxy-revamp4.yml b/changelogs/fragments/9609-apache2-mod-proxy-revamp4.yml new file mode 100644 index 0000000000..009a2a582d --- /dev/null +++ b/changelogs/fragments/9609-apache2-mod-proxy-revamp4.yml @@ -0,0 +1,2 @@ +minor_changes: + - apache2_mod_proxy - better handling regexp extraction (https://github.com/ansible-collections/community.general/pull/9609). diff --git a/changelogs/fragments/9612-apache2-mod-proxy-revamp5.yml b/changelogs/fragments/9612-apache2-mod-proxy-revamp5.yml new file mode 100644 index 0000000000..57fdccbf3e --- /dev/null +++ b/changelogs/fragments/9612-apache2-mod-proxy-revamp5.yml @@ -0,0 +1,2 @@ +minor_changes: + - apache2_mod_proxy - use ``deps`` to handle dependencies (https://github.com/ansible-collections/community.general/pull/9612). diff --git a/changelogs/fragments/9614-apache2-mod-proxy-revamp7.yml b/changelogs/fragments/9614-apache2-mod-proxy-revamp7.yml new file mode 100644 index 0000000000..e808413318 --- /dev/null +++ b/changelogs/fragments/9614-apache2-mod-proxy-revamp7.yml @@ -0,0 +1,3 @@ +minor_changes: + - apache2_mod_proxy - simplified and improved string manipulation (https://github.com/ansible-collections/community.general/pull/9614). + - apache2_mod_proxy - remove unused parameter and code from ``Balancer`` constructor (https://github.com/ansible-collections/community.general/pull/9614). diff --git a/changelogs/fragments/9621-keycloak_client-sanitize-saml-encryption-key.yml b/changelogs/fragments/9621-keycloak_client-sanitize-saml-encryption-key.yml new file mode 100644 index 0000000000..5d3401e430 --- /dev/null +++ b/changelogs/fragments/9621-keycloak_client-sanitize-saml-encryption-key.yml @@ -0,0 +1,2 @@ +security_fixes: + - keycloak_client - Sanitize ``saml.encryption.private.key`` so it does not show in the logs (https://github.com/ansible-collections/community.general/pull/9621). \ No newline at end of file diff --git a/changelogs/fragments/9623-pipx-global-latest.yml b/changelogs/fragments/9623-pipx-global-latest.yml new file mode 100644 index 0000000000..c939ea5847 --- /dev/null +++ b/changelogs/fragments/9623-pipx-global-latest.yml @@ -0,0 +1,2 @@ +bugfixes: + - pipx - honor option ``global`` when ``state=latest`` (https://github.com/ansible-collections/community.general/pull/9623). diff --git a/changelogs/fragments/9633-onepassword_ssh_key.yml b/changelogs/fragments/9633-onepassword_ssh_key.yml new file mode 100644 index 0000000000..bd4676319e --- /dev/null +++ b/changelogs/fragments/9633-onepassword_ssh_key.yml @@ -0,0 +1,2 @@ +minor_changes: + - "onepassword_ssh_key - refactor to move code to lookup class (https://github.com/ansible-collections/community.general/pull/9633)." diff --git a/changelogs/fragments/9645-proxmox-fix-pubkey.yml b/changelogs/fragments/9645-proxmox-fix-pubkey.yml new file mode 100644 index 0000000000..786daf88df --- /dev/null +++ b/changelogs/fragments/9645-proxmox-fix-pubkey.yml @@ -0,0 +1,3 @@ +bugfixes: + - proxmox - fixes a typo in the translation of the ``pubkey`` parameter to proxmox' ``ssh-public-keys`` (https://github.com/ansible-collections/community.general/issues/9642, https://github.com/ansible-collections/community.general/pull/9645). + - proxmox - adds the ``pubkey`` parameter (back to) the ``update`` state (https://github.com/ansible-collections/community.general/issues/9642, https://github.com/ansible-collections/community.general/pull/9645). diff --git a/changelogs/fragments/9649-cloudflare_dns-fix-crash-when-deleting-record.yml b/changelogs/fragments/9649-cloudflare_dns-fix-crash-when-deleting-record.yml new file mode 100644 index 0000000000..c345947575 --- /dev/null +++ b/changelogs/fragments/9649-cloudflare_dns-fix-crash-when-deleting-record.yml @@ -0,0 +1,2 @@ +bugfixes: + - cloudflare_dns - fix crash when deleting a DNS record or when updating a record with ``solo=true`` (https://github.com/ansible-collections/community.general/issues/9652, https://github.com/ansible-collections/community.general/pull/9649). diff --git a/docs/docsite/rst/guide_modulehelper.rst b/docs/docsite/rst/guide_modulehelper.rst index e3c7a124cf..1f8d305643 100644 --- a/docs/docsite/rst/guide_modulehelper.rst +++ b/docs/docsite/rst/guide_modulehelper.rst @@ -468,6 +468,11 @@ Additionally, MH will also delegate: - ``diff_mode`` to ``self.module._diff`` - ``verbosity`` to ``self.module._verbosity`` +Starting in community.general 10.3.0, MH will also delegate the method ``debug`` to ``self.module``. +If any existing module already has a ``debug`` attribute defined, a warning message will be generated, +requesting it to be renamed. Upon the release of community.general 12.0.0, the delegation will be +preemptive and will override any existing method or property in the subclasses. + Decorators """""""""" diff --git a/galaxy.yml b/galaxy.yml index 2373f46167..fa2246c419 100644 --- a/galaxy.yml +++ b/galaxy.yml @@ -5,7 +5,7 @@ namespace: community name: general -version: 10.3.0 +version: 10.4.0 readme: README.md authors: - Ansible (https://github.com/ansible) diff --git a/meta/runtime.yml b/meta/runtime.yml index 1106260176..d6ffdbec57 100644 --- a/meta/runtime.yml +++ b/meta/runtime.yml @@ -17,6 +17,7 @@ action_groups: proxmox: - proxmox - proxmox_backup + - proxmox_backup_info - proxmox_disk - proxmox_domain_info - proxmox_group_info diff --git a/plugins/action/iptables_state.py b/plugins/action/iptables_state.py index 39ee85d778..595d0ece66 100644 --- a/plugins/action/iptables_state.py +++ b/plugins/action/iptables_state.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import absolute_import, division, print_function -__metaclass__ = type +from __future__ import annotations import time diff --git a/plugins/action/shutdown.py b/plugins/action/shutdown.py index e5c2d15a5c..d5db878812 100644 --- a/plugins/action/shutdown.py +++ b/plugins/action/shutdown.py @@ -5,9 +5,8 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) +from __future__ import annotations -__metaclass__ = type from ansible.errors import AnsibleError, AnsibleConnectionFailure from ansible.module_utils.common.text.converters import to_native, to_text diff --git a/plugins/become/doas.py b/plugins/become/doas.py index 9011fa69e9..13aef19874 100644 --- a/plugins/become/doas.py +++ b/plugins/become/doas.py @@ -2,8 +2,7 @@ # Copyright (c) 2018, Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: doas diff --git a/plugins/become/dzdo.py b/plugins/become/dzdo.py index 70e2e0d777..d890bede09 100644 --- a/plugins/become/dzdo.py +++ b/plugins/become/dzdo.py @@ -2,8 +2,7 @@ # Copyright (c) 2018, Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: dzdo diff --git a/plugins/become/ksu.py b/plugins/become/ksu.py index 88a29e7362..1c936e46da 100644 --- a/plugins/become/ksu.py +++ b/plugins/become/ksu.py @@ -2,8 +2,7 @@ # Copyright (c) 2018, Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: ksu diff --git a/plugins/become/machinectl.py b/plugins/become/machinectl.py index 1dd80bc80f..81a9d06f86 100644 --- a/plugins/become/machinectl.py +++ b/plugins/become/machinectl.py @@ -2,8 +2,7 @@ # Copyright (c) 2018, Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: machinectl diff --git a/plugins/become/pbrun.py b/plugins/become/pbrun.py index 56f3b2c315..92a49fe349 100644 --- a/plugins/become/pbrun.py +++ b/plugins/become/pbrun.py @@ -2,8 +2,7 @@ # Copyright (c) 2018, Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: pbrun diff --git a/plugins/become/pfexec.py b/plugins/become/pfexec.py index b23509281c..65690f359b 100644 --- a/plugins/become/pfexec.py +++ b/plugins/become/pfexec.py @@ -2,8 +2,7 @@ # Copyright (c) 2018, Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: pfexec diff --git a/plugins/become/pmrun.py b/plugins/become/pmrun.py index 64820ecde5..a2432d92ee 100644 --- a/plugins/become/pmrun.py +++ b/plugins/become/pmrun.py @@ -2,8 +2,7 @@ # Copyright (c) 2018, Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: pmrun diff --git a/plugins/become/run0.py b/plugins/become/run0.py index 0c0d6bfffb..39e4667e7a 100644 --- a/plugins/become/run0.py +++ b/plugins/become/run0.py @@ -3,9 +3,8 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import absolute_import, division, print_function +from __future__ import annotations -__metaclass__ = type DOCUMENTATION = r""" name: run0 diff --git a/plugins/become/sesu.py b/plugins/become/sesu.py index 6fe64e41f8..cf921e2e47 100644 --- a/plugins/become/sesu.py +++ b/plugins/become/sesu.py @@ -2,8 +2,7 @@ # Copyright (c) 2018, Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: sesu diff --git a/plugins/become/sudosu.py b/plugins/become/sudosu.py index fe85c9ee91..509b2725df 100644 --- a/plugins/become/sudosu.py +++ b/plugins/become/sudosu.py @@ -2,8 +2,7 @@ # Copyright (c) 2021, Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: sudosu diff --git a/plugins/cache/memcached.py b/plugins/cache/memcached.py index 94cc7058d8..9c4fbec595 100644 --- a/plugins/cache/memcached.py +++ b/plugins/cache/memcached.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/cache/pickle.py b/plugins/cache/pickle.py index 60b1ea74e0..2f4b2b7b02 100644 --- a/plugins/cache/pickle.py +++ b/plugins/cache/pickle.py @@ -5,8 +5,7 @@ # SPDX-License-Identifier: GPL-3.0-or-later # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: pickle diff --git a/plugins/cache/redis.py b/plugins/cache/redis.py index 30d5364032..41f69d659f 100644 --- a/plugins/cache/redis.py +++ b/plugins/cache/redis.py @@ -3,8 +3,7 @@ # Copyright (c) 2017 Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/cache/yaml.py b/plugins/cache/yaml.py index 88cdad2acb..676423d3b6 100644 --- a/plugins/cache/yaml.py +++ b/plugins/cache/yaml.py @@ -5,8 +5,7 @@ # SPDX-License-Identifier: GPL-3.0-or-later # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: yaml diff --git a/plugins/callback/cgroup_memory_recap.py b/plugins/callback/cgroup_memory_recap.py index 079d1ccd08..b4099eae49 100644 --- a/plugins/callback/cgroup_memory_recap.py +++ b/plugins/callback/cgroup_memory_recap.py @@ -4,8 +4,7 @@ # SPDX-License-Identifier: GPL-3.0-or-later # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/callback/context_demo.py b/plugins/callback/context_demo.py index 96acd2f947..28be2882b6 100644 --- a/plugins/callback/context_demo.py +++ b/plugins/callback/context_demo.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/callback/counter_enabled.py b/plugins/callback/counter_enabled.py index 845a7823e0..15fc85a01b 100644 --- a/plugins/callback/counter_enabled.py +++ b/plugins/callback/counter_enabled.py @@ -6,8 +6,7 @@ Counter enabled Ansible callback plugin (See DOCUMENTATION for more information) ''' -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/callback/default_without_diff.py b/plugins/callback/default_without_diff.py index b6ef75ce91..3ea55100bf 100644 --- a/plugins/callback/default_without_diff.py +++ b/plugins/callback/default_without_diff.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: default_without_diff diff --git a/plugins/callback/dense.py b/plugins/callback/dense.py index cf1130e3d1..67cad4fd8f 100644 --- a/plugins/callback/dense.py +++ b/plugins/callback/dense.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: dense diff --git a/plugins/callback/diy.py b/plugins/callback/diy.py index 5e46563aa4..b3cd0cdbce 100644 --- a/plugins/callback/diy.py +++ b/plugins/callback/diy.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: diy diff --git a/plugins/callback/elastic.py b/plugins/callback/elastic.py index 6866e52712..cfa66e53b9 100644 --- a/plugins/callback/elastic.py +++ b/plugins/callback/elastic.py @@ -2,8 +2,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Victor Martinez (@v1v) diff --git a/plugins/callback/jabber.py b/plugins/callback/jabber.py index 8f9d7cd833..10aa866142 100644 --- a/plugins/callback/jabber.py +++ b/plugins/callback/jabber.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/callback/log_plays.py b/plugins/callback/log_plays.py index ed1ed39a72..483976acae 100644 --- a/plugins/callback/log_plays.py +++ b/plugins/callback/log_plays.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/callback/loganalytics.py b/plugins/callback/loganalytics.py index fa891bd10c..224ce7efd8 100644 --- a/plugins/callback/loganalytics.py +++ b/plugins/callback/loganalytics.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: loganalytics diff --git a/plugins/callback/logdna.py b/plugins/callback/logdna.py index 35c5b86c1e..90fe6d4465 100644 --- a/plugins/callback/logdna.py +++ b/plugins/callback/logdna.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/callback/logentries.py b/plugins/callback/logentries.py index 0b3e2baaf0..bc5d7e03ce 100644 --- a/plugins/callback/logentries.py +++ b/plugins/callback/logentries.py @@ -3,8 +3,7 @@ # Copyright (c) 2017 Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/callback/logstash.py b/plugins/callback/logstash.py index 088a84bf78..9d299e50ed 100644 --- a/plugins/callback/logstash.py +++ b/plugins/callback/logstash.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Yevhen Khmelenko (@ujenmr) diff --git a/plugins/callback/mail.py b/plugins/callback/mail.py index 7571993ea4..80bef26044 100644 --- a/plugins/callback/mail.py +++ b/plugins/callback/mail.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: mail diff --git a/plugins/callback/nrdp.py b/plugins/callback/nrdp.py index fa5d7cfd05..375876973a 100644 --- a/plugins/callback/nrdp.py +++ b/plugins/callback/nrdp.py @@ -4,8 +4,7 @@ # SPDX-License-Identifier: GPL-3.0-or-later # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: nrdp diff --git a/plugins/callback/null.py b/plugins/callback/null.py index 0cc722f63b..b59389e39a 100644 --- a/plugins/callback/null.py +++ b/plugins/callback/null.py @@ -4,8 +4,7 @@ # SPDX-License-Identifier: GPL-3.0-or-later # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/callback/opentelemetry.py b/plugins/callback/opentelemetry.py index 38388e8270..039408f301 100644 --- a/plugins/callback/opentelemetry.py +++ b/plugins/callback/opentelemetry.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Victor Martinez (@v1v) diff --git a/plugins/callback/say.py b/plugins/callback/say.py index 94f49cc822..e6da490ec7 100644 --- a/plugins/callback/say.py +++ b/plugins/callback/say.py @@ -5,8 +5,7 @@ # SPDX-License-Identifier: GPL-3.0-or-later # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/callback/selective.py b/plugins/callback/selective.py index 27ac63658c..9cc805d3cd 100644 --- a/plugins/callback/selective.py +++ b/plugins/callback/selective.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/callback/slack.py b/plugins/callback/slack.py index fda430b778..a9290f4a93 100644 --- a/plugins/callback/slack.py +++ b/plugins/callback/slack.py @@ -5,8 +5,7 @@ # SPDX-License-Identifier: GPL-3.0-or-later # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/callback/splunk.py b/plugins/callback/splunk.py index 05cca87a69..1d4534892a 100644 --- a/plugins/callback/splunk.py +++ b/plugins/callback/splunk.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: splunk diff --git a/plugins/callback/sumologic.py b/plugins/callback/sumologic.py index 108f324b29..5c310d1c50 100644 --- a/plugins/callback/sumologic.py +++ b/plugins/callback/sumologic.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: sumologic diff --git a/plugins/callback/syslog_json.py b/plugins/callback/syslog_json.py index d1797455ac..9e5c78c90c 100644 --- a/plugins/callback/syslog_json.py +++ b/plugins/callback/syslog_json.py @@ -4,8 +4,7 @@ # SPDX-License-Identifier: GPL-3.0-or-later # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/callback/timestamp.py b/plugins/callback/timestamp.py index 89249c6562..a43ddcbef9 100644 --- a/plugins/callback/timestamp.py +++ b/plugins/callback/timestamp.py @@ -5,9 +5,8 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import absolute_import, division, print_function +from __future__ import annotations -__metaclass__ = type DOCUMENTATION = r""" name: timestamp diff --git a/plugins/callback/unixy.py b/plugins/callback/unixy.py index 8f80bf8f12..48f9b2d1f0 100644 --- a/plugins/callback/unixy.py +++ b/plugins/callback/unixy.py @@ -5,8 +5,7 @@ # SPDX-License-Identifier: GPL-3.0-or-later # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: unixy diff --git a/plugins/callback/yaml.py b/plugins/callback/yaml.py index a68c590cf7..25c797e236 100644 --- a/plugins/callback/yaml.py +++ b/plugins/callback/yaml.py @@ -4,8 +4,7 @@ # SPDX-License-Identifier: GPL-3.0-or-later # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Unknown (!UNKNOWN) diff --git a/plugins/connection/chroot.py b/plugins/connection/chroot.py index 7c4000ec5c..842c3f05d3 100644 --- a/plugins/connection/chroot.py +++ b/plugins/connection/chroot.py @@ -7,8 +7,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Maykel Moya (!UNKNOWN) diff --git a/plugins/connection/funcd.py b/plugins/connection/funcd.py index 31a9431ce1..ad01326aff 100644 --- a/plugins/connection/funcd.py +++ b/plugins/connection/funcd.py @@ -6,8 +6,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Michael Scherer (@mscherer) diff --git a/plugins/connection/incus.py b/plugins/connection/incus.py index 9d5a3e7a57..326e91ec38 100644 --- a/plugins/connection/incus.py +++ b/plugins/connection/incus.py @@ -5,8 +5,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Stéphane Graber (@stgraber) diff --git a/plugins/connection/iocage.py b/plugins/connection/iocage.py index 4d3f415194..35d5ab0658 100644 --- a/plugins/connection/iocage.py +++ b/plugins/connection/iocage.py @@ -7,8 +7,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Stephan Lohse (!UNKNOWN) diff --git a/plugins/connection/jail.py b/plugins/connection/jail.py index 6e6c156330..6f06c96774 100644 --- a/plugins/connection/jail.py +++ b/plugins/connection/jail.py @@ -7,8 +7,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Ansible Core Team diff --git a/plugins/connection/lxc.py b/plugins/connection/lxc.py index 0744136192..a9e46cf56f 100644 --- a/plugins/connection/lxc.py +++ b/plugins/connection/lxc.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Joerg Thalheim (!UNKNOWN) diff --git a/plugins/connection/lxd.py b/plugins/connection/lxd.py index 1a071e1d8d..fc8b4ae474 100644 --- a/plugins/connection/lxd.py +++ b/plugins/connection/lxd.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Matt Clay (@mattclay) diff --git a/plugins/connection/proxmox_pct_remote.py b/plugins/connection/proxmox_pct_remote.py new file mode 100644 index 0000000000..c46090083e --- /dev/null +++ b/plugins/connection/proxmox_pct_remote.py @@ -0,0 +1,857 @@ +# -*- coding: utf-8 -*- +# Derived from ansible/plugins/connection/paramiko_ssh.py (c) 2012, Michael DeHaan +# Copyright (c) 2024 Nils Stein (@mietzen) +# Copyright (c) 2024 Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import annotations + +DOCUMENTATION = r""" +author: Nils Stein (@mietzen) +name: proxmox_pct_remote +short_description: Run tasks in Proxmox LXC container instances using pct CLI via SSH +requirements: + - paramiko +description: + - Run commands or put/fetch files to an existing Proxmox LXC container using pct CLI via SSH. + - Uses the Python SSH implementation (Paramiko) to connect to the Proxmox host. +version_added: "10.3.0" +options: + remote_addr: + description: + - Address of the remote target. + default: inventory_hostname + type: string + vars: + - name: inventory_hostname + - name: ansible_host + - name: ansible_ssh_host + - name: ansible_paramiko_host + port: + description: Remote port to connect to. + type: int + default: 22 + ini: + - section: defaults + key: remote_port + - section: paramiko_connection + key: remote_port + env: + - name: ANSIBLE_REMOTE_PORT + - name: ANSIBLE_REMOTE_PARAMIKO_PORT + vars: + - name: ansible_port + - name: ansible_ssh_port + - name: ansible_paramiko_port + keyword: + - name: port + remote_user: + description: + - User to login/authenticate as. + - Can be set from the CLI via the C(--user) or C(-u) options. + type: string + vars: + - name: ansible_user + - name: ansible_ssh_user + - name: ansible_paramiko_user + env: + - name: ANSIBLE_REMOTE_USER + - name: ANSIBLE_PARAMIKO_REMOTE_USER + ini: + - section: defaults + key: remote_user + - section: paramiko_connection + key: remote_user + keyword: + - name: remote_user + password: + description: + - Secret used to either login the SSH server or as a passphrase for SSH keys that require it. + - Can be set from the CLI via the C(--ask-pass) option. + type: string + vars: + - name: ansible_password + - name: ansible_ssh_pass + - name: ansible_ssh_password + - name: ansible_paramiko_pass + - name: ansible_paramiko_password + use_rsa_sha2_algorithms: + description: + - Whether or not to enable RSA SHA2 algorithms for pubkeys and hostkeys. + - On paramiko versions older than 2.9, this only affects hostkeys. + - For behavior matching paramiko<2.9 set this to V(false). + vars: + - name: ansible_paramiko_use_rsa_sha2_algorithms + ini: + - {key: use_rsa_sha2_algorithms, section: paramiko_connection} + env: + - {name: ANSIBLE_PARAMIKO_USE_RSA_SHA2_ALGORITHMS} + default: true + type: boolean + host_key_auto_add: + description: "Automatically add host keys to C(~/.ssh/known_hosts)." + env: + - name: ANSIBLE_PARAMIKO_HOST_KEY_AUTO_ADD + ini: + - key: host_key_auto_add + section: paramiko_connection + type: boolean + look_for_keys: + default: True + description: "Set to V(false) to disable searching for private key files in C(~/.ssh/)." + env: + - name: ANSIBLE_PARAMIKO_LOOK_FOR_KEYS + ini: + - {key: look_for_keys, section: paramiko_connection} + type: boolean + proxy_command: + default: "" + description: + - Proxy information for running the connection via a jumphost. + type: string + env: + - name: ANSIBLE_PARAMIKO_PROXY_COMMAND + ini: + - {key: proxy_command, section: paramiko_connection} + vars: + - name: ansible_paramiko_proxy_command + pty: + default: True + description: "C(sudo) usually requires a PTY, V(true) to give a PTY and V(false) to not give a PTY." + env: + - name: ANSIBLE_PARAMIKO_PTY + ini: + - section: paramiko_connection + key: pty + type: boolean + record_host_keys: + default: True + description: "Save the host keys to a file." + env: + - name: ANSIBLE_PARAMIKO_RECORD_HOST_KEYS + ini: + - section: paramiko_connection + key: record_host_keys + type: boolean + host_key_checking: + description: "Set this to V(false) if you want to avoid host key checking by the underlying tools Ansible uses to connect to the host." + type: boolean + default: true + env: + - name: ANSIBLE_HOST_KEY_CHECKING + - name: ANSIBLE_SSH_HOST_KEY_CHECKING + - name: ANSIBLE_PARAMIKO_HOST_KEY_CHECKING + ini: + - section: defaults + key: host_key_checking + - section: paramiko_connection + key: host_key_checking + vars: + - name: ansible_host_key_checking + - name: ansible_ssh_host_key_checking + - name: ansible_paramiko_host_key_checking + use_persistent_connections: + description: "Toggles the use of persistence for connections." + type: boolean + default: False + env: + - name: ANSIBLE_USE_PERSISTENT_CONNECTIONS + ini: + - section: defaults + key: use_persistent_connections + banner_timeout: + type: float + default: 30 + description: + - Configures, in seconds, the amount of time to wait for the SSH + banner to be presented. This option is supported by paramiko + version 1.15.0 or newer. + ini: + - section: paramiko_connection + key: banner_timeout + env: + - name: ANSIBLE_PARAMIKO_BANNER_TIMEOUT + timeout: + type: int + default: 10 + description: Number of seconds until the plugin gives up on failing to establish a TCP connection. + ini: + - section: defaults + key: timeout + - section: ssh_connection + key: timeout + - section: paramiko_connection + key: timeout + env: + - name: ANSIBLE_TIMEOUT + - name: ANSIBLE_SSH_TIMEOUT + - name: ANSIBLE_PARAMIKO_TIMEOUT + vars: + - name: ansible_ssh_timeout + - name: ansible_paramiko_timeout + cli: + - name: timeout + lock_file_timeout: + type: int + default: 60 + description: Number of seconds until the plugin gives up on trying to write a lock file when writing SSH known host keys. + vars: + - name: ansible_lock_file_timeout + env: + - name: ANSIBLE_LOCK_FILE_TIMEOUT + private_key_file: + description: + - Path to private key file to use for authentication. + type: string + ini: + - section: defaults + key: private_key_file + - section: paramiko_connection + key: private_key_file + env: + - name: ANSIBLE_PRIVATE_KEY_FILE + - name: ANSIBLE_PARAMIKO_PRIVATE_KEY_FILE + vars: + - name: ansible_private_key_file + - name: ansible_ssh_private_key_file + - name: ansible_paramiko_private_key_file + cli: + - name: private_key_file + option: "--private-key" + vmid: + description: + - LXC Container ID + type: int + vars: + - name: proxmox_vmid + proxmox_become_method: + description: + - Become command used in proxmox + type: str + default: sudo + vars: + - name: proxmox_become_method +notes: + - > + When NOT using this plugin as root, you need to have a become mechanism, + e.g. C(sudo), installed on Proxmox and setup so we can run it without prompting for the password. + Inside the container, we need a shell, for example C(sh) and the C(cat) command to be available in the C(PATH) for this plugin to work. +""" + +EXAMPLES = r""" +# -------------------------------------------------------------- +# Setup sudo with password less access to pct for user 'ansible': +# -------------------------------------------------------------- +# +# Open a Proxmox root shell and execute: +# $ useradd -d /opt/ansible-pct -r -m -s /bin/sh ansible +# $ mkdir -p /opt/ansible-pct/.ssh +# $ ssh-keygen -t ed25519 -C 'ansible' -N "" -f /opt/ansible-pct/.ssh/ansible <<< y > /dev/null +# $ cat /opt/ansible-pct/.ssh/ansible +# $ mv /opt/ansible-pct/.ssh/ansible.pub /opt/ansible-pct/.ssh/authorized-keys +# $ rm /opt/ansible-pct/.ssh/ansible* +# $ chown -R ansible:ansible /opt/ansible-pct/.ssh +# $ chmod 700 /opt/ansible-pct/.ssh +# $ chmod 600 /opt/ansible-pct/.ssh/authorized-keys +# $ echo 'ansible ALL = (root) NOPASSWD: /usr/sbin/pct' > /etc/sudoers.d/ansible_pct +# +# Save the displayed private key and add it to your ssh-agent +# +# Or use ansible: +# --- +# - name: Setup ansible-pct user and configure environment on Proxmox host +# hosts: proxmox +# become: true +# gather_facts: false +# +# tasks: +# - name: Create ansible user +# ansible.builtin.user: +# name: ansible +# comment: Ansible User +# home: /opt/ansible-pct +# shell: /bin/sh +# create_home: true +# system: true +# +# - name: Create .ssh directory +# ansible.builtin.file: +# path: /opt/ansible-pct/.ssh +# state: directory +# owner: ansible +# group: ansible +# mode: '0700' +# +# - name: Generate SSH key for ansible user +# community.crypto.openssh_keypair: +# path: /opt/ansible-pct/.ssh/ansible +# type: ed25519 +# comment: 'ansible' +# force: true +# mode: '0600' +# owner: ansible +# group: ansible +# +# - name: Set public key as authorized key +# ansible.builtin.copy: +# src: /opt/ansible-pct/.ssh/ansible.pub +# dest: /opt/ansible-pct/.ssh/authorized-keys +# remote_src: yes +# owner: ansible +# group: ansible +# mode: '0600' +# +# - name: Add sudoers entry for ansible user +# ansible.builtin.copy: +# content: 'ansible ALL = (root) NOPASSWD: /usr/sbin/pct' +# dest: /etc/sudoers.d/ansible_pct +# owner: root +# group: root +# mode: '0440' +# +# - name: Fetch private SSH key to localhost +# ansible.builtin.fetch: +# src: /opt/ansible-pct/.ssh/ansible +# dest: ~/.ssh/proxmox_ansible_private_key +# flat: yes +# fail_on_missing: true +# +# - name: Clean up generated SSH keys +# ansible.builtin.file: +# path: /opt/ansible-pct/.ssh/ansible* +# state: absent +# +# - name: Configure private key permissions on localhost +# hosts: localhost +# tasks: +# - name: Set permissions for fetched private key +# ansible.builtin.file: +# path: ~/.ssh/proxmox_ansible_private_key +# mode: '0600' +# +# -------------------------------- +# Static inventory file: hosts.yml +# -------------------------------- +# all: +# children: +# lxc: +# hosts: +# container-1: +# ansible_host: 10.0.0.10 +# proxmox_vmid: 100 +# ansible_connection: community.general.proxmox_pct_remote +# ansible_user: ansible +# container-2: +# ansible_host: 10.0.0.10 +# proxmox_vmid: 200 +# ansible_connection: community.general.proxmox_pct_remote +# ansible_user: ansible +# proxmox: +# hosts: +# proxmox-1: +# ansible_host: 10.0.0.10 +# +# +# --------------------------------------------- +# Dynamic inventory file: inventory.proxmox.yml +# --------------------------------------------- +# plugin: community.general.proxmox +# url: https://10.0.0.10:8006 +# validate_certs: false +# user: ansible@pam +# token_id: ansible +# token_secret: !vault | +# $ANSIBLE_VAULT;1.1;AES256 +# ... + +# want_facts: true +# exclude_nodes: true +# filters: +# - proxmox_vmtype == "lxc" +# want_proxmox_nodes_ansible_host: false +# compose: +# ansible_host: "'10.0.0.10'" +# ansible_connection: "'community.general.proxmox_pct_remote'" +# ansible_user: "'ansible'" +# +# +# ---------------------- +# Playbook: playbook.yml +# ---------------------- +--- +- hosts: lxc + # On nodes with many containers you might want to deactivate the devices facts + # or set `gather_facts: false` if you don't need them. + # More info on gathering fact subsets: + # https://docs.ansible.com/ansible/latest/collections/ansible/builtin/setup_module.html + # + # gather_facts: true + # gather_subset: + # - "!devices" + tasks: + - name: Ping LXC container + ansible.builtin.ping: +""" + +import os +import pathlib +import socket +import tempfile +import typing as t + +from ansible.errors import ( + AnsibleAuthenticationFailure, + AnsibleConnectionFailure, + AnsibleError, +) +from ansible_collections.community.general.plugins.module_utils._filelock import FileLock, LockTimeout +from ansible.module_utils.common.text.converters import to_bytes, to_native, to_text +from ansible.module_utils.compat.paramiko import PARAMIKO_IMPORT_ERR, paramiko +from ansible.module_utils.compat.version import LooseVersion +from ansible.plugins.connection import ConnectionBase +from ansible.utils.display import Display +from ansible.utils.path import makedirs_safe +from binascii import hexlify + + +display = Display() + + +def authenticity_msg(hostname: str, ktype: str, fingerprint: str) -> str: + msg = f""" + paramiko: The authenticity of host '{hostname}' can't be established. + The {ktype} key fingerprint is {fingerprint}. + Are you sure you want to continue connecting (yes/no)? + """ + return msg + + +MissingHostKeyPolicy: type = object +if paramiko: + MissingHostKeyPolicy = paramiko.MissingHostKeyPolicy + + +class MyAddPolicy(MissingHostKeyPolicy): + """ + Based on AutoAddPolicy in paramiko so we can determine when keys are added + + and also prompt for input. + + Policy for automatically adding the hostname and new host key to the + local L{HostKeys} object, and saving it. This is used by L{SSHClient}. + """ + + def __init__(self, connection: Connection) -> None: + self.connection = connection + self._options = connection._options + + def missing_host_key(self, client, hostname, key) -> None: + + if all((self.connection.get_option('host_key_checking'), not self.connection.get_option('host_key_auto_add'))): + + fingerprint = hexlify(key.get_fingerprint()) + ktype = key.get_name() + + if self.connection.get_option('use_persistent_connections') or self.connection.force_persistence: + # don't print the prompt string since the user cannot respond + # to the question anyway + raise AnsibleError(authenticity_msg(hostname, ktype, fingerprint)[1:92]) + + inp = to_text( + display.prompt_until(authenticity_msg(hostname, ktype, fingerprint), private=False), + errors='surrogate_or_strict' + ) + + if inp.lower() not in ['yes', 'y', '']: + raise AnsibleError('host connection rejected by user') + + key._added_by_ansible_this_time = True + + # existing implementation below: + client._host_keys.add(hostname, key.get_name(), key) + + # host keys are actually saved in close() function below + # in order to control ordering. + + +class Connection(ConnectionBase): + """ SSH based connections (paramiko) to Proxmox pct """ + + transport = 'community.general.proxmox_pct_remote' + _log_channel: str | None = None + + def __init__(self, play_context, new_stdin, *args, **kwargs): + super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs) + + def _set_log_channel(self, name: str) -> None: + """ Mimic paramiko.SSHClient.set_log_channel """ + self._log_channel = name + + def _parse_proxy_command(self, port: int = 22) -> dict[str, t.Any]: + proxy_command = self.get_option('proxy_command') or None + + sock_kwarg = {} + if proxy_command: + replacers = { + '%h': self.get_option('remote_addr'), + '%p': port, + '%r': self.get_option('remote_user') + } + for find, replace in replacers.items(): + proxy_command = proxy_command.replace(find, str(replace)) + try: + sock_kwarg = {'sock': paramiko.ProxyCommand(proxy_command)} + display.vvv(f'CONFIGURE PROXY COMMAND FOR CONNECTION: {proxy_command}', host=self.get_option('remote_addr')) + except AttributeError: + display.warning('Paramiko ProxyCommand support unavailable. ' + 'Please upgrade to Paramiko 1.9.0 or newer. ' + 'Not using configured ProxyCommand') + + return sock_kwarg + + def _connect(self) -> Connection: + """ activates the connection object """ + + if paramiko is None: + raise AnsibleError(f'paramiko is not installed: {to_native(PARAMIKO_IMPORT_ERR)}') + + port = self.get_option('port') + display.vvv(f'ESTABLISH PARAMIKO SSH CONNECTION FOR USER: {self.get_option("remote_user")} on PORT {to_text(port)} TO {self.get_option("remote_addr")}', + host=self.get_option('remote_addr')) + + ssh = paramiko.SSHClient() + + # Set pubkey and hostkey algorithms to disable, the only manipulation allowed currently + # is keeping or omitting rsa-sha2 algorithms + # default_keys: t.Tuple[str] = () + paramiko_preferred_pubkeys = getattr(paramiko.Transport, '_preferred_pubkeys', ()) + paramiko_preferred_hostkeys = getattr(paramiko.Transport, '_preferred_keys', ()) + use_rsa_sha2_algorithms = self.get_option('use_rsa_sha2_algorithms') + disabled_algorithms: t.Dict[str, t.Iterable[str]] = {} + if not use_rsa_sha2_algorithms: + if paramiko_preferred_pubkeys: + disabled_algorithms['pubkeys'] = tuple(a for a in paramiko_preferred_pubkeys if 'rsa-sha2' in a) + if paramiko_preferred_hostkeys: + disabled_algorithms['keys'] = tuple(a for a in paramiko_preferred_hostkeys if 'rsa-sha2' in a) + + # override paramiko's default logger name + if self._log_channel is not None: + ssh.set_log_channel(self._log_channel) + + self.keyfile = os.path.expanduser('~/.ssh/known_hosts') + + if self.get_option('host_key_checking'): + for ssh_known_hosts in ('/etc/ssh/ssh_known_hosts', '/etc/openssh/ssh_known_hosts'): + try: + ssh.load_system_host_keys(ssh_known_hosts) + break + except IOError: + pass # file was not found, but not required to function + except paramiko.hostkeys.InvalidHostKey as e: + raise AnsibleConnectionFailure(f'Invalid host key: {to_text(e.line)}') + try: + ssh.load_system_host_keys() + except paramiko.hostkeys.InvalidHostKey as e: + raise AnsibleConnectionFailure(f'Invalid host key: {to_text(e.line)}') + + ssh_connect_kwargs = self._parse_proxy_command(port) + ssh.set_missing_host_key_policy(MyAddPolicy(self)) + conn_password = self.get_option('password') + allow_agent = True + + if conn_password is not None: + allow_agent = False + + try: + key_filename = None + if self.get_option('private_key_file'): + key_filename = os.path.expanduser(self.get_option('private_key_file')) + + # paramiko 2.2 introduced auth_timeout parameter + if LooseVersion(paramiko.__version__) >= LooseVersion('2.2.0'): + ssh_connect_kwargs['auth_timeout'] = self.get_option('timeout') + + # paramiko 1.15 introduced banner timeout parameter + if LooseVersion(paramiko.__version__) >= LooseVersion('1.15.0'): + ssh_connect_kwargs['banner_timeout'] = self.get_option('banner_timeout') + + ssh.connect( + self.get_option('remote_addr').lower(), + username=self.get_option('remote_user'), + allow_agent=allow_agent, + look_for_keys=self.get_option('look_for_keys'), + key_filename=key_filename, + password=conn_password, + timeout=self.get_option('timeout'), + port=port, + disabled_algorithms=disabled_algorithms, + **ssh_connect_kwargs, + ) + except paramiko.ssh_exception.BadHostKeyException as e: + raise AnsibleConnectionFailure(f'host key mismatch for {to_text(e.hostname)}') + except paramiko.ssh_exception.AuthenticationException as e: + msg = f'Failed to authenticate: {e}' + raise AnsibleAuthenticationFailure(msg) + except Exception as e: + msg = to_text(e) + if u'PID check failed' in msg: + raise AnsibleError('paramiko version issue, please upgrade paramiko on the machine running ansible') + elif u'Private key file is encrypted' in msg: + msg = f'ssh {self.get_option("remote_user")}@{self.get_options("remote_addr")}:{port} : ' + \ + f'{msg}\nTo connect as a different user, use -u .' + raise AnsibleConnectionFailure(msg) + else: + raise AnsibleConnectionFailure(msg) + self.ssh = ssh + self._connected = True + return self + + def _any_keys_added(self) -> bool: + for hostname, keys in self.ssh._host_keys.items(): + for keytype, key in keys.items(): + added_this_time = getattr(key, '_added_by_ansible_this_time', False) + if added_this_time: + return True + return False + + def _save_ssh_host_keys(self, filename: str) -> None: + """ + not using the paramiko save_ssh_host_keys function as we want to add new SSH keys at the bottom so folks + don't complain about it :) + """ + + if not self._any_keys_added(): + return + + path = os.path.expanduser('~/.ssh') + makedirs_safe(path) + + with open(filename, 'w') as f: + for hostname, keys in self.ssh._host_keys.items(): + for keytype, key in keys.items(): + # was f.write + added_this_time = getattr(key, '_added_by_ansible_this_time', False) + if not added_this_time: + f.write(f'{hostname} {keytype} {key.get_base64()}\n') + + for hostname, keys in self.ssh._host_keys.items(): + for keytype, key in keys.items(): + added_this_time = getattr(key, '_added_by_ansible_this_time', False) + if added_this_time: + f.write(f'{hostname} {keytype} {key.get_base64()}\n') + + def _build_pct_command(self, cmd: str) -> str: + cmd = ['/usr/sbin/pct', 'exec', str(self.get_option('vmid')), '--', cmd] + if self.get_option('remote_user') != 'root': + cmd = [self.get_option('proxmox_become_method')] + cmd + display.vvv(f'INFO Running as non root user: {self.get_option("remote_user")}, trying to run pct with become method: ' + + f'{self.get_option("proxmox_become_method")}', + host=self.get_option('remote_addr')) + return ' '.join(cmd) + + def exec_command(self, cmd: str, in_data: bytes | None = None, sudoable: bool = True) -> tuple[int, bytes, bytes]: + """ run a command on inside the LXC container """ + + cmd = self._build_pct_command(cmd) + + super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable) + + bufsize = 4096 + + try: + self.ssh.get_transport().set_keepalive(5) + chan = self.ssh.get_transport().open_session() + except Exception as e: + text_e = to_text(e) + msg = 'Failed to open session' + if text_e: + msg += f': {text_e}' + raise AnsibleConnectionFailure(to_native(msg)) + + # sudo usually requires a PTY (cf. requiretty option), therefore + # we give it one by default (pty=True in ansible.cfg), and we try + # to initialise from the calling environment when sudoable is enabled + if self.get_option('pty') and sudoable: + chan.get_pty(term=os.getenv('TERM', 'vt100'), width=int(os.getenv('COLUMNS', 0)), height=int(os.getenv('LINES', 0))) + + display.vvv(f'EXEC {cmd}', host=self.get_option('remote_addr')) + + cmd = to_bytes(cmd, errors='surrogate_or_strict') + + no_prompt_out = b'' + no_prompt_err = b'' + become_output = b'' + + try: + chan.exec_command(cmd) + if self.become and self.become.expect_prompt(): + password_prompt = False + become_success = False + while not (become_success or password_prompt): + display.debug('Waiting for Privilege Escalation input') + + chunk = chan.recv(bufsize) + display.debug(f'chunk is: {to_text(chunk)}') + if not chunk: + if b'unknown user' in become_output: + n_become_user = to_native(self.become.get_option('become_user')) + raise AnsibleError(f'user {n_become_user} does not exist') + else: + break + # raise AnsibleError('ssh connection closed waiting for password prompt') + become_output += chunk + + # need to check every line because we might get lectured + # and we might get the middle of a line in a chunk + for line in become_output.splitlines(True): + if self.become.check_success(line): + become_success = True + break + elif self.become.check_password_prompt(line): + password_prompt = True + break + + if password_prompt: + if self.become: + become_pass = self.become.get_option('become_pass') + chan.sendall(to_bytes(become_pass, errors='surrogate_or_strict') + b'\n') + else: + raise AnsibleError('A password is required but none was supplied') + else: + no_prompt_out += become_output + no_prompt_err += become_output + + if in_data: + for i in range(0, len(in_data), bufsize): + chan.send(in_data[i:i + bufsize]) + chan.shutdown_write() + elif in_data == b'': + chan.shutdown_write() + + except socket.timeout: + raise AnsibleError('ssh timed out waiting for privilege escalation.\n' + to_text(become_output)) + + stdout = b''.join(chan.makefile('rb', bufsize)) + stderr = b''.join(chan.makefile_stderr('rb', bufsize)) + returncode = chan.recv_exit_status() + + if 'pct: not found' in stderr.decode('utf-8'): + raise AnsibleError( + f'pct not found in path of host: {to_text(self.get_option("remote_addr"))}') + + return (returncode, no_prompt_out + stdout, no_prompt_out + stderr) + + def put_file(self, in_path: str, out_path: str) -> None: + """ transfer a file from local to remote """ + + display.vvv(f'PUT {in_path} TO {out_path}', host=self.get_option('remote_addr')) + try: + with open(in_path, 'rb') as f: + data = f.read() + returncode, stdout, stderr = self.exec_command( + ' '.join([ + self._shell.executable, '-c', + self._shell.quote(f'cat > {out_path}')]), + in_data=data, + sudoable=False) + if returncode != 0: + if 'cat: not found' in stderr.decode('utf-8'): + raise AnsibleError( + f'cat not found in path of container: {to_text(self.get_option("vmid"))}') + raise AnsibleError( + f'{to_text(stdout)}\n{to_text(stderr)}') + except Exception as e: + raise AnsibleError( + f'error occurred while putting file from {in_path} to {out_path}!\n{to_text(e)}') + + def fetch_file(self, in_path: str, out_path: str) -> None: + """ save a remote file to the specified path """ + + display.vvv(f'FETCH {in_path} TO {out_path}', host=self.get_option('remote_addr')) + try: + returncode, stdout, stderr = self.exec_command( + ' '.join([ + self._shell.executable, '-c', + self._shell.quote(f'cat {in_path}')]), + sudoable=False) + if returncode != 0: + if 'cat: not found' in stderr.decode('utf-8'): + raise AnsibleError( + f'cat not found in path of container: {to_text(self.get_option("vmid"))}') + raise AnsibleError( + f'{to_text(stdout)}\n{to_text(stderr)}') + with open(out_path, 'wb') as f: + f.write(stdout) + except Exception as e: + raise AnsibleError( + f'error occurred while fetching file from {in_path} to {out_path}!\n{to_text(e)}') + + def reset(self) -> None: + """ reset the connection """ + + if not self._connected: + return + self.close() + self._connect() + + def close(self) -> None: + """ terminate the connection """ + + if self.get_option('host_key_checking') and self.get_option('record_host_keys') and self._any_keys_added(): + # add any new SSH host keys -- warning -- this could be slow + # (This doesn't acquire the connection lock because it needs + # to exclude only other known_hosts writers, not connections + # that are starting up.) + lockfile = os.path.basename(self.keyfile) + dirname = os.path.dirname(self.keyfile) + makedirs_safe(dirname) + tmp_keyfile_name = None + try: + with FileLock().lock_file(lockfile, dirname, self.get_option('lock_file_timeout')): + # just in case any were added recently + + self.ssh.load_system_host_keys() + self.ssh._host_keys.update(self.ssh._system_host_keys) + + # gather information about the current key file, so + # we can ensure the new file has the correct mode/owner + + key_dir = os.path.dirname(self.keyfile) + if os.path.exists(self.keyfile): + key_stat = os.stat(self.keyfile) + mode = key_stat.st_mode & 0o777 + uid = key_stat.st_uid + gid = key_stat.st_gid + else: + mode = 0o644 + uid = os.getuid() + gid = os.getgid() + + # Save the new keys to a temporary file and move it into place + # rather than rewriting the file. We set delete=False because + # the file will be moved into place rather than cleaned up. + + with tempfile.NamedTemporaryFile(dir=key_dir, delete=False) as tmp_keyfile: + tmp_keyfile_name = tmp_keyfile.name + os.chmod(tmp_keyfile_name, mode) + os.chown(tmp_keyfile_name, uid, gid) + self._save_ssh_host_keys(tmp_keyfile_name) + + os.rename(tmp_keyfile_name, self.keyfile) + except LockTimeout: + raise AnsibleError( + f'writing lock file for {self.keyfile} ran in to the timeout of {self.get_option("lock_file_timeout")}s') + except paramiko.hostkeys.InvalidHostKey as e: + raise AnsibleConnectionFailure(f'Invalid host key: {e.line}') + except Exception as e: + # unable to save keys, including scenario when key was invalid + # and caught earlier + raise AnsibleError( + f'error occurred while writing SSH host keys!\n{to_text(e)}') + finally: + if tmp_keyfile_name is not None: + pathlib.Path(tmp_keyfile_name).unlink(missing_ok=True) + + self.ssh.close() + self._connected = False diff --git a/plugins/connection/qubes.py b/plugins/connection/qubes.py index dee476308c..5a9963df2d 100644 --- a/plugins/connection/qubes.py +++ b/plugins/connection/qubes.py @@ -8,8 +8,7 @@ # # Written by: Kushal Das (https://github.com/kushaldas) -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" diff --git a/plugins/connection/saltstack.py b/plugins/connection/saltstack.py index d9e5d3b1d9..f826741926 100644 --- a/plugins/connection/saltstack.py +++ b/plugins/connection/saltstack.py @@ -7,8 +7,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Michael Scherer (@mscherer) diff --git a/plugins/connection/zone.py b/plugins/connection/zone.py index aa5442f28e..baca9312b3 100644 --- a/plugins/connection/zone.py +++ b/plugins/connection/zone.py @@ -8,8 +8,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" author: Ansible Core Team diff --git a/plugins/doc_fragments/keycloak.py b/plugins/doc_fragments/keycloak.py index 102a60ab33..75c458d5fc 100644 --- a/plugins/doc_fragments/keycloak.py +++ b/plugins/doc_fragments/keycloak.py @@ -57,6 +57,12 @@ options: type: str version_added: 3.0.0 + refresh_token: + description: + - Authentication refresh token for Keycloak API. + type: str + version_added: 10.3.0 + validate_certs: description: - Verify TLS certificates (do not disable this in production). diff --git a/plugins/filter/counter.py b/plugins/filter/counter.py index 93ffa64d01..bd4b5d4448 100644 --- a/plugins/filter/counter.py +++ b/plugins/filter/counter.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: counter diff --git a/plugins/filter/crc32.py b/plugins/filter/crc32.py index bdf6d51614..e394d23732 100644 --- a/plugins/filter/crc32.py +++ b/plugins/filter/crc32.py @@ -2,8 +2,7 @@ # Copyright (c) 2022, Julien Riou # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations from ansible.errors import AnsibleFilterError from ansible.module_utils.common.text.converters import to_bytes diff --git a/plugins/filter/dict.py b/plugins/filter/dict.py index b3e81bd4ab..23c977dfd6 100644 --- a/plugins/filter/dict.py +++ b/plugins/filter/dict.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import absolute_import, division, print_function -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: dict diff --git a/plugins/filter/dict_kv.py b/plugins/filter/dict_kv.py index 8c4fb01752..1d73bde301 100644 --- a/plugins/filter/dict_kv.py +++ b/plugins/filter/dict_kv.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: dict_kv diff --git a/plugins/filter/from_csv.py b/plugins/filter/from_csv.py index 3a05769365..e9a5d73e53 100644 --- a/plugins/filter/from_csv.py +++ b/plugins/filter/from_csv.py @@ -5,8 +5,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import absolute_import, division, print_function -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: from_csv diff --git a/plugins/filter/from_ini.py b/plugins/filter/from_ini.py index 01ae150d08..d77338df99 100644 --- a/plugins/filter/from_ini.py +++ b/plugins/filter/from_ini.py @@ -4,7 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import absolute_import, division, print_function +from __future__ import annotations DOCUMENTATION = r""" name: from_ini @@ -44,7 +44,6 @@ _value: type: dictionary """ -__metaclass__ = type from ansible.errors import AnsibleFilterError from ansible.module_utils.six import string_types diff --git a/plugins/filter/groupby_as_dict.py b/plugins/filter/groupby_as_dict.py index 80c7ad7885..81a24a1e9f 100644 --- a/plugins/filter/groupby_as_dict.py +++ b/plugins/filter/groupby_as_dict.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: groupby_as_dict diff --git a/plugins/filter/hashids.py b/plugins/filter/hashids.py index ac771e6219..6ec64d5f59 100644 --- a/plugins/filter/hashids.py +++ b/plugins/filter/hashids.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import absolute_import, division, print_function -__metaclass__ = type +from __future__ import annotations from ansible.errors import ( AnsibleError, diff --git a/plugins/filter/jc.py b/plugins/filter/jc.py index 388fcf0d3f..48d53bcbd3 100644 --- a/plugins/filter/jc.py +++ b/plugins/filter/jc.py @@ -5,8 +5,7 @@ # # contributed by Kelly Brazil -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: jc diff --git a/plugins/filter/json_diff.yml b/plugins/filter/json_diff.yml new file mode 100644 index 0000000000..a370564d7a --- /dev/null +++ b/plugins/filter/json_diff.yml @@ -0,0 +1,56 @@ +--- +# Copyright (c) Stanislav Meduna (@numo68) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +DOCUMENTATION: + name: json_diff + short_description: Create a JSON patch by comparing two JSON files + description: + - This filter compares the input with the argument and computes a list of operations + that can be consumed by the P(community.general.json_patch_recipe#filter) to change the input + to the argument. + requirements: + - jsonpatch + version_added: 10.3.0 + author: + - Stanislav Meduna (@numo68) + positional: target + options: + _input: + description: A list or a dictionary representing a source JSON object, or a string containing a JSON object. + type: raw + required: true + target: + description: A list or a dictionary representing a target JSON object, or a string containing a JSON object. + type: raw + required: true + seealso: + - name: RFC 6902 + description: JavaScript Object Notation (JSON) Patch + link: https://datatracker.ietf.org/doc/html/rfc6902 + - name: RFC 6901 + description: JavaScript Object Notation (JSON) Pointer + link: https://datatracker.ietf.org/doc/html/rfc6901 + - name: jsonpatch Python Package + description: A Python library for applying JSON patches + link: https://pypi.org/project/jsonpatch/ + +RETURN: + _value: + description: A list of JSON patch operations to apply. + type: list + elements: dict + +EXAMPLES: | + - name: Compute a difference + ansible.builtin.debug: + msg: "{{ input | community.general.json_diff(target) }}" + vars: + input: {"foo": 1, "bar":{"baz": 2}, "baw": [1, 2, 3], "hello": "day"} + target: {"foo": 1, "bar": {"baz": 2}, "baw": [1, 3], "baq": {"baz": 2}, "hello": "night"} + # => [ + # {"op": "add", "path": "/baq", "value": {"baz": 2}}, + # {"op": "remove", "path": "/baw/1"}, + # {"op": "replace", "path": "/hello", "value": "night"} + # ] diff --git a/plugins/filter/json_patch.py b/plugins/filter/json_patch.py new file mode 100644 index 0000000000..4600bfaf92 --- /dev/null +++ b/plugins/filter/json_patch.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Stanislav Meduna (@numo68) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import annotations +from json import loads +from typing import TYPE_CHECKING +from ansible.errors import AnsibleFilterError + +__metaclass__ = type # pylint: disable=C0103 + +if TYPE_CHECKING: + from typing import Any, Callable, Union + +try: + import jsonpatch + +except ImportError as exc: + HAS_LIB = False + JSONPATCH_IMPORT_ERROR = exc +else: + HAS_LIB = True + JSONPATCH_IMPORT_ERROR = None + +OPERATIONS_AVAILABLE = ["add", "copy", "move", "remove", "replace", "test"] +OPERATIONS_NEEDING_FROM = ["copy", "move"] +OPERATIONS_NEEDING_VALUE = ["add", "replace", "test"] + + +class FilterModule: + """Filter plugin.""" + + def check_json_object(self, filter_name: str, object_name: str, inp: Any): + if isinstance(inp, (str, bytes, bytearray)): + try: + return loads(inp) + except Exception as e: + raise AnsibleFilterError( + f"{filter_name}: could not decode JSON from {object_name}: {e}" + ) from e + + if not isinstance(inp, (list, dict)): + raise AnsibleFilterError( + f"{filter_name}: {object_name} is not dictionary, list or string" + ) + + return inp + + def check_patch_arguments(self, filter_name: str, args: dict): + + if "op" not in args or not isinstance(args["op"], str): + raise AnsibleFilterError(f"{filter_name}: 'op' argument is not a string") + + if args["op"] not in OPERATIONS_AVAILABLE: + raise AnsibleFilterError( + f"{filter_name}: unsupported 'op' argument: {args['op']}" + ) + + if "path" not in args or not isinstance(args["path"], str): + raise AnsibleFilterError(f"{filter_name}: 'path' argument is not a string") + + if args["op"] in OPERATIONS_NEEDING_FROM: + if "from" not in args: + raise AnsibleFilterError( + f"{filter_name}: 'from' argument missing for '{args['op']}' operation" + ) + if not isinstance(args["from"], str): + raise AnsibleFilterError( + f"{filter_name}: 'from' argument is not a string" + ) + + def json_patch( + self, + inp: Union[str, list, dict, bytes, bytearray], + op: str, + path: str, + value: Any = None, + **kwargs: dict, + ) -> Any: + + if not HAS_LIB: + raise AnsibleFilterError( + "You need to install 'jsonpatch' package prior to running 'json_patch' filter" + ) from JSONPATCH_IMPORT_ERROR + + args = {"op": op, "path": path} + from_arg = kwargs.pop("from", None) + fail_test = kwargs.pop("fail_test", False) + + if kwargs: + raise AnsibleFilterError( + f"json_patch: unexpected keywords arguments: {', '.join(sorted(kwargs))}" + ) + + if not isinstance(fail_test, bool): + raise AnsibleFilterError("json_patch: 'fail_test' argument is not a bool") + + if op in OPERATIONS_NEEDING_VALUE: + args["value"] = value + if op in OPERATIONS_NEEDING_FROM and from_arg is not None: + args["from"] = from_arg + + inp = self.check_json_object("json_patch", "input", inp) + self.check_patch_arguments("json_patch", args) + + result = None + + try: + result = jsonpatch.apply_patch(inp, [args]) + except jsonpatch.JsonPatchTestFailed as e: + if fail_test: + raise AnsibleFilterError( + f"json_patch: test operation failed: {e}" + ) from e + else: + pass + except Exception as e: + raise AnsibleFilterError(f"json_patch: patch failed: {e}") from e + + return result + + def json_patch_recipe( + self, + inp: Union[str, list, dict, bytes, bytearray], + operations: list, + /, + fail_test: bool = False, + ) -> Any: + + if not HAS_LIB: + raise AnsibleFilterError( + "You need to install 'jsonpatch' package prior to running 'json_patch_recipe' filter" + ) from JSONPATCH_IMPORT_ERROR + + if not isinstance(operations, list): + raise AnsibleFilterError( + "json_patch_recipe: 'operations' needs to be a list" + ) + + if not isinstance(fail_test, bool): + raise AnsibleFilterError("json_patch: 'fail_test' argument is not a bool") + + result = None + + inp = self.check_json_object("json_patch_recipe", "input", inp) + for args in operations: + self.check_patch_arguments("json_patch_recipe", args) + + try: + result = jsonpatch.apply_patch(inp, operations) + except jsonpatch.JsonPatchTestFailed as e: + if fail_test: + raise AnsibleFilterError( + f"json_patch_recipe: test operation failed: {e}" + ) from e + else: + pass + except Exception as e: + raise AnsibleFilterError(f"json_patch_recipe: patch failed: {e}") from e + + return result + + def json_diff( + self, + inp: Union[str, list, dict, bytes, bytearray], + target: Union[str, list, dict, bytes, bytearray], + ) -> list: + + if not HAS_LIB: + raise AnsibleFilterError( + "You need to install 'jsonpatch' package prior to running 'json_diff' filter" + ) from JSONPATCH_IMPORT_ERROR + + inp = self.check_json_object("json_diff", "input", inp) + target = self.check_json_object("json_diff", "target", target) + + try: + result = list(jsonpatch.make_patch(inp, target)) + except Exception as e: + raise AnsibleFilterError(f"JSON diff failed: {e}") from e + + return result + + def filters(self) -> dict[str, Callable[..., Any]]: + """Map filter plugin names to their functions. + + Returns: + dict: The filter plugin functions. + """ + return { + "json_patch": self.json_patch, + "json_patch_recipe": self.json_patch_recipe, + "json_diff": self.json_diff, + } diff --git a/plugins/filter/json_patch.yml b/plugins/filter/json_patch.yml new file mode 100644 index 0000000000..6fd411d6ff --- /dev/null +++ b/plugins/filter/json_patch.yml @@ -0,0 +1,145 @@ +--- +# Copyright (c) Stanislav Meduna (@numo68) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +DOCUMENTATION: + name: json_patch + short_description: Apply a JSON-Patch (RFC 6902) operation to an object + description: + - This filter applies a single JSON patch operation and returns a modified object. + - If the operation is a test, the filter returns an ummodified object if the test + succeeded and a V(none) value otherwise. + requirements: + - jsonpatch + version_added: 10.3.0 + author: + - Stanislav Meduna (@numo68) + positional: op, path, value + options: + _input: + description: A list or a dictionary representing a JSON object, or a string containing a JSON object. + type: raw + required: true + op: + description: Operation to perform (see L(RFC 6902, https://datatracker.ietf.org/doc/html/rfc6902)). + type: str + choices: [add, copy, move, remove, replace, test] + required: true + path: + description: JSON Pointer path to the target location (see L(RFC 6901, https://datatracker.ietf.org/doc/html/rfc6901)). + type: str + required: true + value: + description: Value to use in the operation. Ignored for O(op=copy), O(op=move), and O(op=remove). + type: raw + from: + description: The source location for the copy and move operation. Mandatory + for O(op=copy) and O(op=move), ignored otherwise. + type: str + fail_test: + description: If V(false), a failed O(op=test) will return V(none). If V(true), the filter + invocation will fail with an error. + type: bool + default: false + seealso: + - name: RFC 6902 + description: JavaScript Object Notation (JSON) Patch + link: https://datatracker.ietf.org/doc/html/rfc6902 + - name: RFC 6901 + description: JavaScript Object Notation (JSON) Pointer + link: https://datatracker.ietf.org/doc/html/rfc6901 + - name: jsonpatch Python Package + description: A Python library for applying JSON patches + link: https://pypi.org/project/jsonpatch/ + +RETURN: + _value: + description: A modified object or V(none) if O(op=test), O(fail_test=false) and the test failed. + type: any + returned: always + +EXAMPLES: | + - name: Insert a new element into an array at a specified index + ansible.builtin.debug: + msg: "{{ input | community.general.json_patch('add', '/1', {'baz': 'qux'}) }}" + vars: + input: ["foo": { "one": 1 }, "bar": { "two": 2 }] + # => [{"foo": {"one": 1}}, {"baz": "qux"}, {"bar": {"two": 2}}] + + - name: Insert a new key into a dictionary + ansible.builtin.debug: + msg: "{{ input | community.general.json_patch('add', '/bar/baz', 'qux') }}" + vars: + input: { "foo": { "one": 1 }, "bar": { "two": 2 } } + # => {"foo": {"one": 1}, "bar": {"baz": "qux", "two": 2}} + + - name: Input is a string + ansible.builtin.debug: + msg: "{{ input | community.general.json_patch('add', '/baz', 3) }}" + vars: + input: '{ "foo": { "one": 1 }, "bar": { "two": 2 } }' + # => {"foo": {"one": 1}, "bar": { "two": 2 }, "baz": 3} + + - name: Existing key is replaced + ansible.builtin.debug: + msg: "{{ input | community.general.json_patch('add', '/bar', 'qux') }}" + vars: + input: { "foo": { "one": 1 }, "bar": { "two": 2 } } + # => {"foo": {"one": 1}, "bar": "qux"} + + - name: Escaping tilde as ~0 and slash as ~1 in the path + ansible.builtin.debug: + msg: "{{ input | community.general.json_patch('add', '/~0~1', 'qux') }}" + vars: + input: {} + # => {"~/": "qux"} + + - name: Add at the end of the array + ansible.builtin.debug: + msg: "{{ input | community.general.json_patch('add', '/-', 4) }}" + vars: + input: [1, 2, 3] + # => [1, 2, 3, 4] + + - name: Remove a key + ansible.builtin.debug: + msg: "{{ input | community.general.json_patch('remove', '/bar') }}" + vars: + input: { "foo": { "one": 1 }, "bar": { "two": 2 } } + # => {"foo": {"one": 1} } + + - name: Replace a value + ansible.builtin.debug: + msg: "{{ input | community.general.json_patch('replace', '/bar', 2) }}" + vars: + input: { "foo": { "one": 1 }, "bar": { "two": 2 } } + # => {"foo": {"one": 1}, "bar": 2} + + - name: Copy a value + ansible.builtin.debug: + msg: "{{ input | community.general.json_patch('copy', '/baz', from='/bar') }}" + vars: + input: { "foo": { "one": 1 }, "bar": { "two": 2 } } + # => {"foo": {"one": 1}, "bar": { "two": 2 }, "baz": { "two": 2 }} + + - name: Move a value + ansible.builtin.debug: + msg: "{{ input | community.general.json_patch('move', '/baz', from='/bar') }}" + vars: + input: { "foo": { "one": 1 }, "bar": { "two": 2 } } + # => {"foo": {"one": 1}, "baz": { "two": 2 }} + + - name: Successful test + ansible.builtin.debug: + msg: "{{ input | community.general.json_patch('test', '/bar/two', 2) | ternary('OK', 'Failed') }}" + vars: + input: { "foo": { "one": 1 }, "bar": { "two": 2 } } + # => OK + + - name: Unuccessful test + ansible.builtin.debug: + msg: "{{ input | community.general.json_patch('test', '/bar/two', 9) | ternary('OK', 'Failed') }}" + vars: + input: { "foo": { "one": 1 }, "bar": { "two": 2 } } + # => Failed diff --git a/plugins/filter/json_patch_recipe.yml b/plugins/filter/json_patch_recipe.yml new file mode 100644 index 0000000000..671600b941 --- /dev/null +++ b/plugins/filter/json_patch_recipe.yml @@ -0,0 +1,102 @@ +--- +# Copyright (c) Stanislav Meduna (@numo68) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +DOCUMENTATION: + name: json_patch_recipe + short_description: Apply JSON-Patch (RFC 6902) operations to an object + description: + - This filter sequentially applies JSON patch operations and returns a modified object. + - If there is a test operation in the list, the filter continues if the test + succeeded and returns a V(none) value otherwise. + requirements: + - jsonpatch + version_added: 10.3.0 + author: + - Stanislav Meduna (@numo68) + positional: operations, fail_test + options: + _input: + description: A list or a dictionary representing a JSON object, or a string containing a JSON object. + type: raw + required: true + operations: + description: A list of JSON patch operations to apply. + type: list + elements: dict + required: true + suboptions: + op: + description: Operation to perform (see L(RFC 6902, https://datatracker.ietf.org/doc/html/rfc6902)). + type: str + choices: [add, copy, move, remove, replace, test] + required: true + path: + description: JSON Pointer path to the target location (see L(RFC 6901, https://datatracker.ietf.org/doc/html/rfc6901)). + type: str + required: true + value: + description: Value to use in the operation. Ignored for O(operations[].op=copy), O(operations[].op=move), and O(operations[].op=remove). + type: raw + from: + description: The source location for the copy and move operation. Mandatory + for O(operations[].op=copy) and O(operations[].op=move), ignored otherwise. + type: str + fail_test: + description: If V(false), a failed O(operations[].op=test) will return V(none). If V(true), the filter + invocation will fail with an error. + type: bool + default: false + seealso: + - name: RFC 6902 + description: JavaScript Object Notation (JSON) Patch + link: https://datatracker.ietf.org/doc/html/rfc6902 + - name: RFC 6901 + description: JavaScript Object Notation (JSON) Pointer + link: https://datatracker.ietf.org/doc/html/rfc6901 + - name: jsonpatch Python Package + description: A Python library for applying JSON patches + link: https://pypi.org/project/jsonpatch/ + +RETURN: + _value: + description: A modified object or V(none) if O(operations[].op=test), O(fail_test=false) + and the test failed. + type: any + returned: always + +EXAMPLES: | + - name: Apply a series of operations + ansible.builtin.debug: + msg: "{{ input | community.general.json_patch_recipe(operations) }}" + vars: + input: {} + operations: + - op: 'add' + path: '/foo' + value: 1 + - op: 'add' + path: '/bar' + value: [] + - op: 'add' + path: '/bar/-' + value: 2 + - op: 'add' + path: '/bar/0' + value: 1 + - op: 'remove' + path: '/bar/0' + - op: 'move' + from: '/foo' + path: '/baz' + - op: 'copy' + from: '/baz' + path: '/bax' + - op: 'copy' + from: '/baz' + path: '/bay' + - op: 'replace' + path: '/baz' + value: [10, 20, 30] + # => {"bar":[2],"bax":1,"bay":1,"baz":[10,20,30]} diff --git a/plugins/filter/json_query.py b/plugins/filter/json_query.py index 61223b0702..8976694a94 100644 --- a/plugins/filter/json_query.py +++ b/plugins/filter/json_query.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: json_query diff --git a/plugins/filter/keep_keys.py b/plugins/filter/keep_keys.py index 4cff4405fc..98b34b4197 100644 --- a/plugins/filter/keep_keys.py +++ b/plugins/filter/keep_keys.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: keep_keys diff --git a/plugins/filter/lists.py b/plugins/filter/lists.py index d16f955c22..707ec9f1fe 100644 --- a/plugins/filter/lists.py +++ b/plugins/filter/lists.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import absolute_import, division, print_function -__metaclass__ = type +from __future__ import annotations from ansible.errors import AnsibleFilterError from ansible.module_utils.common.collections import is_sequence diff --git a/plugins/filter/lists_mergeby.py b/plugins/filter/lists_mergeby.py index b34246993c..b15df2e089 100644 --- a/plugins/filter/lists_mergeby.py +++ b/plugins/filter/lists_mergeby.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: lists_mergeby diff --git a/plugins/filter/random_mac.py b/plugins/filter/random_mac.py index 49910bc6be..1ece58230c 100644 --- a/plugins/filter/random_mac.py +++ b/plugins/filter/random_mac.py @@ -4,8 +4,7 @@ # SPDX-License-Identifier: GPL-3.0-or-later # Make coding more python3-ish -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: random_mac diff --git a/plugins/filter/remove_keys.py b/plugins/filter/remove_keys.py index 7baee12695..2058803138 100644 --- a/plugins/filter/remove_keys.py +++ b/plugins/filter/remove_keys.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: remove_keys diff --git a/plugins/filter/replace_keys.py b/plugins/filter/replace_keys.py index f317144be4..d47468bd3c 100644 --- a/plugins/filter/replace_keys.py +++ b/plugins/filter/replace_keys.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: replace_keys diff --git a/plugins/filter/reveal_ansible_type.py b/plugins/filter/reveal_ansible_type.py index 3d7e40111c..36fcba3df2 100644 --- a/plugins/filter/reveal_ansible_type.py +++ b/plugins/filter/reveal_ansible_type.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: reveal_ansible_type diff --git a/plugins/filter/time.py b/plugins/filter/time.py index 25970cd260..e8a867a1fe 100644 --- a/plugins/filter/time.py +++ b/plugins/filter/time.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations import re from ansible.errors import AnsibleFilterError diff --git a/plugins/filter/to_ini.py b/plugins/filter/to_ini.py index f06763ac66..4be1a684e7 100644 --- a/plugins/filter/to_ini.py +++ b/plugins/filter/to_ini.py @@ -4,7 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import absolute_import, division, print_function +from __future__ import annotations DOCUMENTATION = r""" name: to_ini @@ -50,8 +50,6 @@ _value: """ -__metaclass__ = type - from ansible.errors import AnsibleFilterError from ansible.module_utils.common._collections_compat import Mapping from ansible.module_utils.six.moves import StringIO diff --git a/plugins/filter/unicode_normalize.py b/plugins/filter/unicode_normalize.py index 9401197eba..e897bb9cee 100644 --- a/plugins/filter/unicode_normalize.py +++ b/plugins/filter/unicode_normalize.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import absolute_import, division, print_function -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: unicode_normalize diff --git a/plugins/filter/version_sort.py b/plugins/filter/version_sort.py index f5a844c542..f3fb30035a 100644 --- a/plugins/filter/version_sort.py +++ b/plugins/filter/version_sort.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r""" name: version_sort diff --git a/plugins/inventory/cobbler.py b/plugins/inventory/cobbler.py index 7d65f583d6..4546bf8d6c 100644 --- a/plugins/inventory/cobbler.py +++ b/plugins/inventory/cobbler.py @@ -3,8 +3,7 @@ # Copyright (c) 2020 Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = ''' author: Orion Poplawski (@opoplawski) diff --git a/plugins/inventory/gitlab_runners.py b/plugins/inventory/gitlab_runners.py index cd6f40169a..961f20444b 100644 --- a/plugins/inventory/gitlab_runners.py +++ b/plugins/inventory/gitlab_runners.py @@ -4,9 +4,8 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) +from __future__ import annotations -__metaclass__ = type DOCUMENTATION = ''' name: gitlab_runners diff --git a/plugins/inventory/icinga2.py b/plugins/inventory/icinga2.py index 527a329173..7ee87df065 100644 --- a/plugins/inventory/icinga2.py +++ b/plugins/inventory/icinga2.py @@ -3,9 +3,8 @@ # Copyright (c) 2021 Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) +from __future__ import annotations -__metaclass__ = type DOCUMENTATION = ''' name: icinga2 diff --git a/plugins/inventory/iocage.py b/plugins/inventory/iocage.py index 6ca7c2ef0a..31aad309f5 100644 --- a/plugins/inventory/iocage.py +++ b/plugins/inventory/iocage.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = ''' name: iocage @@ -46,14 +45,30 @@ DOCUMENTATION = ''' O(host) with SSH and execute the command C(iocage list). This option is not required if O(host) is V(localhost). type: str + sudo: + description: + - Enable execution as root. + - This requires passwordless sudo of the command C(iocage list*). + type: bool + default: false + version_added: 10.3.0 + sudo_preserve_env: + description: + - Preserve environment if O(sudo) is enabled. + - This requires C(SETENV) sudoers tag. + type: bool + default: false + version_added: 10.3.0 get_properties: description: - Get jails' properties. Creates dictionary C(iocage_properties) for each added host. - type: boolean + type: bool default: false env: - description: O(user)'s environment on O(host). + description: + - O(user)'s environment on O(host). + - Enable O(sudo_preserve_env) if O(sudo) is enabled. type: dict default: {} notes: @@ -88,6 +103,17 @@ user: admin env: CRYPTOGRAPHY_OPENSSL_NO_LEGACY: 1 +--- +# execute as root +# sudoers example 'admin ALL=(ALL) NOPASSWD:SETENV: /usr/local/bin/iocage list*' +plugin: community.general.iocage +host: 10.1.0.73 +user: admin +sudo: true +sudo_preserve_env: true +env: + CRYPTOGRAPHY_OPENSSL_NO_LEGACY: 1 + --- # enable cache plugin: community.general.iocage @@ -196,6 +222,8 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable): def get_inventory(self, path): host = self.get_option('host') + sudo = self.get_option('sudo') + sudo_preserve_env = self.get_option('sudo_preserve_env') env = self.get_option('env') get_properties = self.get_option('get_properties') @@ -208,9 +236,13 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable): cmd.append("ssh") cmd.append(f"{user}@{host}") cmd.extend([f"{k}={v}" for k, v in env.items()]) - cmd.append(self.IOCAGE) cmd_list = cmd.copy() + if sudo: + cmd_list.append('sudo') + if sudo_preserve_env: + cmd_list.append('--preserve-env') + cmd_list.append(self.IOCAGE) cmd_list.append('list') cmd_list.append('--long') try: @@ -233,6 +265,7 @@ class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable): if get_properties: for hostname, host_vars in results['_meta']['hostvars'].items(): cmd_get_properties = cmd.copy() + cmd_get_properties.append(self.IOCAGE) cmd_get_properties.append("get") cmd_get_properties.append("--all") cmd_get_properties.append(f"{hostname}") diff --git a/plugins/inventory/linode.py b/plugins/inventory/linode.py index 2419ef3197..594cf30eba 100644 --- a/plugins/inventory/linode.py +++ b/plugins/inventory/linode.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r''' name: linode diff --git a/plugins/inventory/lxd.py b/plugins/inventory/lxd.py index 81229186b8..480af8388c 100644 --- a/plugins/inventory/lxd.py +++ b/plugins/inventory/lxd.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r''' name: lxd diff --git a/plugins/inventory/nmap.py b/plugins/inventory/nmap.py index 5dacd28e95..a9384b7c27 100644 --- a/plugins/inventory/nmap.py +++ b/plugins/inventory/nmap.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = ''' author: Unknown (!UNKNOWN) diff --git a/plugins/inventory/online.py b/plugins/inventory/online.py index 9475049c08..9e29c91e54 100644 --- a/plugins/inventory/online.py +++ b/plugins/inventory/online.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = r''' name: online diff --git a/plugins/inventory/opennebula.py b/plugins/inventory/opennebula.py index 7fc320f326..ed26880d07 100644 --- a/plugins/inventory/opennebula.py +++ b/plugins/inventory/opennebula.py @@ -3,9 +3,8 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) +from __future__ import annotations -__metaclass__ = type DOCUMENTATION = r''' name: opennebula diff --git a/plugins/inventory/proxmox.py b/plugins/inventory/proxmox.py index 2d65657d67..9ef9b78adf 100644 --- a/plugins/inventory/proxmox.py +++ b/plugins/inventory/proxmox.py @@ -3,9 +3,8 @@ # Copyright (c) 2018 Ansible Project # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) +from __future__ import annotations -__metaclass__ = type DOCUMENTATION = ''' name: proxmox diff --git a/plugins/inventory/scaleway.py b/plugins/inventory/scaleway.py index e396740bca..d815890df4 100644 --- a/plugins/inventory/scaleway.py +++ b/plugins/inventory/scaleway.py @@ -3,9 +3,8 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) +from __future__ import annotations -__metaclass__ = type DOCUMENTATION = r''' name: scaleway diff --git a/plugins/inventory/stackpath_compute.py b/plugins/inventory/stackpath_compute.py index c87d0e5277..bc55027155 100644 --- a/plugins/inventory/stackpath_compute.py +++ b/plugins/inventory/stackpath_compute.py @@ -4,8 +4,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = ''' name: stackpath_compute diff --git a/plugins/inventory/virtualbox.py b/plugins/inventory/virtualbox.py index 9112518a46..db2750f636 100644 --- a/plugins/inventory/virtualbox.py +++ b/plugins/inventory/virtualbox.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = ''' author: Unknown (!UNKNOWN) diff --git a/plugins/inventory/xen_orchestra.py b/plugins/inventory/xen_orchestra.py index 0a050d0bf9..5c21fb365a 100644 --- a/plugins/inventory/xen_orchestra.py +++ b/plugins/inventory/xen_orchestra.py @@ -3,8 +3,7 @@ # GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) # SPDX-License-Identifier: GPL-3.0-or-later -from __future__ import (absolute_import, division, print_function) -__metaclass__ = type +from __future__ import annotations DOCUMENTATION = ''' name: xen_orchestra diff --git a/plugins/lookup/onepassword_ssh_key.py b/plugins/lookup/onepassword_ssh_key.py new file mode 100644 index 0000000000..253d8c68f4 --- /dev/null +++ b/plugins/lookup/onepassword_ssh_key.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2025, Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import annotations + +DOCUMENTATION = """ +name: onepassword_ssh_key +author: + - Mohammed Babelly (@mohammedbabelly20) +requirements: + - C(op) 1Password command line utility version 2 or later. +short_description: Fetch SSH keys stored in 1Password +version_added: "10.3.0" +description: + - P(community.general.onepassword_ssh_key#lookup) wraps C(op) command line utility to fetch SSH keys from 1Password. +notes: + - By default, it returns the private key value in PKCS#8 format, unless O(ssh_format=true) is passed. + - The pluging works only for C(SSHKEY) type items. + - This plugin requires C(op) version 2 or later. + +options: + _terms: + description: Identifier(s) (case-insensitive UUID or name) of item(s) to retrieve. + required: true + type: list + elements: string + ssh_format: + description: Output key in SSH format if V(true). Otherwise, outputs in the default format (PKCS#8). + default: false + type: bool + +extends_documentation_fragment: + - community.general.onepassword + - community.general.onepassword.lookup +""" + +EXAMPLES = """ +- name: Retrieve the private SSH key from 1Password + ansible.builtin.debug: + msg: "{{ lookup('community.general.onepassword_ssh_key', 'SSH Key', ssh_format=true) }}" +""" + +RETURN = """ +_raw: + description: Private key of SSH keypair. + type: list + elements: string +""" +import json + +from ansible_collections.community.general.plugins.lookup.onepassword import ( + OnePass, + OnePassCLIv2, +) +from ansible.errors import AnsibleLookupError +from ansible.plugins.lookup import LookupBase + + +class LookupModule(LookupBase): + def get_ssh_key(self, out, item_id, ssh_format=False): + data = json.loads(out) + + if data.get("category") != "SSH_KEY": + raise AnsibleLookupError(f"Item {item_id} is not an SSH key") + + private_key_field = next( + ( + field + for field in data.get("fields", {}) + if field.get("id") == "private_key" and field.get("type") == "SSHKEY" + ), + None, + ) + if not private_key_field: + raise AnsibleLookupError(f"No private key found for item {item_id}.") + + if ssh_format: + return ( + private_key_field.get("ssh_formats", {}) + .get("openssh", {}) + .get("value", "") + ) + return private_key_field.get("value", "") + + def run(self, terms, variables=None, **kwargs): + self.set_options(var_options=variables, direct=kwargs) + + ssh_format = self.get_option("ssh_format") + vault = self.get_option("vault") + subdomain = self.get_option("subdomain") + domain = self.get_option("domain", "1password.com") + username = self.get_option("username") + secret_key = self.get_option("secret_key") + master_password = self.get_option("master_password") + service_account_token = self.get_option("service_account_token") + account_id = self.get_option("account_id") + connect_host = self.get_option("connect_host") + connect_token = self.get_option("connect_token") + + op = OnePass( + subdomain=subdomain, + domain=domain, + username=username, + secret_key=secret_key, + master_password=master_password, + service_account_token=service_account_token, + account_id=account_id, + connect_host=connect_host, + connect_token=connect_token, + cli_class=OnePassCLIv2, + ) + op.assert_logged_in() + + return [ + self.get_ssh_key(op.get_raw(term, vault), term, ssh_format=ssh_format) + for term in terms + ] diff --git a/plugins/module_utils/identity/keycloak/keycloak.py b/plugins/module_utils/identity/keycloak/keycloak.py index e008131913..962ac276e7 100644 --- a/plugins/module_utils/identity/keycloak/keycloak.py +++ b/plugins/module_utils/identity/keycloak/keycloak.py @@ -143,6 +143,7 @@ def keycloak_argument_spec(): validate_certs=dict(type='bool', default=True), connection_timeout=dict(type='int', default=10), token=dict(type='str', no_log=True), + refresh_token=dict(type='str', no_log=True), http_agent=dict(type='str', default='Ansible'), ) @@ -152,58 +153,113 @@ def camel(words): class KeycloakError(Exception): - pass + def __init__(self, msg, authError=None): + self.msg = msg + self.authError = authError + + def __str__(self): + return str(self.msg) + + +def _token_request(module_params, payload): + """ Obtains connection header with token for the authentication, + using the provided auth_username/auth_password + :param module_params: parameters of the module + :param payload: + type: + dict + description: + Authentication request payload. Must contain at least + 'grant_type' and 'client_id', optionally 'client_secret', + along with parameters based on 'grant_type'; e.g., + 'username'/'password' for type 'password', + 'refresh_token' for type 'refresh_token'. + :return: access token + """ + base_url = module_params.get('auth_keycloak_url') + if not base_url.lower().startswith(('http', 'https')): + raise KeycloakError("auth_url '%s' should either start with 'http' or 'https'." % base_url) + auth_realm = module_params.get('auth_realm') + auth_url = URL_TOKEN.format(url=base_url, realm=auth_realm) + http_agent = module_params.get('http_agent') + validate_certs = module_params.get('validate_certs') + connection_timeout = module_params.get('connection_timeout') + + try: + r = json.loads(to_native(open_url(auth_url, method='POST', + validate_certs=validate_certs, http_agent=http_agent, timeout=connection_timeout, + data=urlencode(payload)).read())) + + return r['access_token'] + except ValueError as e: + raise KeycloakError( + 'API returned invalid JSON when trying to obtain access token from %s: %s' + % (auth_url, str(e))) + except KeyError: + raise KeycloakError( + 'API did not include access_token field in response from %s' % auth_url) + except Exception as e: + raise KeycloakError('Could not obtain access token from %s: %s' + % (auth_url, str(e)), authError=e) + + +def _request_token_using_credentials(module_params): + """ Obtains connection header with token for the authentication, + using the provided auth_username/auth_password + :param module_params: parameters of the module. Must include 'auth_username' and 'auth_password'. + :return: connection header + """ + client_id = module_params.get('auth_client_id') + auth_username = module_params.get('auth_username') + auth_password = module_params.get('auth_password') + client_secret = module_params.get('auth_client_secret') + + temp_payload = { + 'grant_type': 'password', + 'client_id': client_id, + 'client_secret': client_secret, + 'username': auth_username, + 'password': auth_password, + } + # Remove empty items, for instance missing client_secret + payload = {k: v for k, v in temp_payload.items() if v is not None} + + return _token_request(module_params, payload) + + +def _request_token_using_refresh_token(module_params): + """ Obtains connection header with token for the authentication, + using the provided refresh_token + :param module_params: parameters of the module. Must include 'refresh_token'. + :return: connection header + """ + client_id = module_params.get('auth_client_id') + refresh_token = module_params.get('refresh_token') + client_secret = module_params.get('auth_client_secret') + + temp_payload = { + 'grant_type': 'refresh_token', + 'client_id': client_id, + 'client_secret': client_secret, + 'refresh_token': refresh_token, + } + # Remove empty items, for instance missing client_secret + payload = {k: v for k, v in temp_payload.items() if v is not None} + + return _token_request(module_params, payload) def get_token(module_params): """ Obtains connection header with token for the authentication, - token already given or obtained from credentials - :param module_params: parameters of the module - :return: connection header + token already given or obtained from credentials + :param module_params: parameters of the module + :return: connection header """ token = module_params.get('token') - base_url = module_params.get('auth_keycloak_url') - http_agent = module_params.get('http_agent') - - if not base_url.lower().startswith(('http', 'https')): - raise KeycloakError("auth_url '%s' should either start with 'http' or 'https'." % base_url) if token is None: - base_url = module_params.get('auth_keycloak_url') - validate_certs = module_params.get('validate_certs') - auth_realm = module_params.get('auth_realm') - client_id = module_params.get('auth_client_id') - auth_username = module_params.get('auth_username') - auth_password = module_params.get('auth_password') - client_secret = module_params.get('auth_client_secret') - connection_timeout = module_params.get('connection_timeout') - auth_url = URL_TOKEN.format(url=base_url, realm=auth_realm) - temp_payload = { - 'grant_type': 'password', - 'client_id': client_id, - 'client_secret': client_secret, - 'username': auth_username, - 'password': auth_password, - } - # Remove empty items, for instance missing client_secret - payload = {k: v for k, v in temp_payload.items() if v is not None} - try: - r = json.loads(to_native(open_url(auth_url, method='POST', - validate_certs=validate_certs, http_agent=http_agent, timeout=connection_timeout, - data=urlencode(payload)).read())) - except ValueError as e: - raise KeycloakError( - 'API returned invalid JSON when trying to obtain access token from %s: %s' - % (auth_url, str(e))) - except Exception as e: - raise KeycloakError('Could not obtain access token from %s: %s' - % (auth_url, str(e))) + token = _request_token_using_credentials(module_params) - try: - token = r['access_token'] - except KeyError: - raise KeycloakError( - 'Could not obtain access token from %s' % auth_url) return { 'Authorization': 'Bearer ' + token, 'Content-Type': 'application/json' @@ -273,6 +329,7 @@ class KeycloakAPI(object): """ Keycloak API access; Keycloak uses OAuth 2.0 to protect its API, an access token for which is obtained through OpenID connect """ + def __init__(self, module, connection_header): self.module = module self.baseurl = self.module.params.get('auth_keycloak_url') @@ -281,6 +338,72 @@ class KeycloakAPI(object): self.restheaders = connection_header self.http_agent = self.module.params.get('http_agent') + def _request(self, url, method, data=None): + """ Makes a request to Keycloak and returns the raw response. + If a 401 is returned, attempts to re-authenticate + using first the module's refresh_token (if provided) + and then the module's username/password (if provided). + On successful re-authentication, the new token is stored + in the restheaders for future requests. + + :param url: request path + :param method: request method (e.g., 'GET', 'POST', etc.) + :param data: (optional) data for request + :return: raw API response + """ + def make_request_catching_401(): + try: + return open_url(url, method=method, data=data, + http_agent=self.http_agent, headers=self.restheaders, + timeout=self.connection_timeout, + validate_certs=self.validate_certs) + except HTTPError as e: + if e.code != 401: + raise e + return e + + r = make_request_catching_401() + + if isinstance(r, Exception): + # Try to refresh token and retry, if available + refresh_token = self.module.params.get('refresh_token') + if refresh_token is not None: + try: + token = _request_token_using_refresh_token(self.module.params) + self.restheaders['Authorization'] = 'Bearer ' + token + + r = make_request_catching_401() + except KeycloakError as e: + # Token refresh returns 400 if token is expired/invalid, so continue on if we get a 400 + if e.authError is not None and e.authError.code != 400: + raise e + + if isinstance(r, Exception): + # Try to re-auth with username/password, if available + auth_username = self.module.params.get('auth_username') + auth_password = self.module.params.get('auth_password') + if auth_username is not None and auth_password is not None: + token = _request_token_using_credentials(self.module.params) + self.restheaders['Authorization'] = 'Bearer ' + token + + r = make_request_catching_401() + + if isinstance(r, Exception): + # Either no re-auth options were available, or they all failed + raise r + + return r + + def _request_and_deserialize(self, url, method, data=None): + """ Wraps the _request method with JSON deserialization of the response. + + :param url: request path + :param method: request method (e.g., 'GET', 'POST', etc.) + :param data: (optional) data for request + :return: raw API response + """ + return json.loads(to_native(self._request(url, method, data).read())) + def get_realm_info_by_id(self, realm='master'): """ Obtain realm public info by id @@ -290,16 +413,14 @@ class KeycloakAPI(object): realm_info_url = URL_REALM_INFO.format(url=self.baseurl, realm=realm) try: - return json.loads(to_native(open_url(realm_info_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(realm_info_url, method='GET') except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg='Could not obtain realm %s: %s' % (realm, str(e)), - exception=traceback.format_exc()) + self.fail_request(e, msg='Could not obtain realm %s: %s' % (realm, str(e)), + exception=traceback.format_exc()) except ValueError as e: self.module.fail_json(msg='API returned incorrect JSON when trying to obtain realm %s: %s' % (realm, str(e)), exception=traceback.format_exc()) @@ -321,16 +442,14 @@ class KeycloakAPI(object): realm_keys_metadata_url = URL_REALM_KEYS_METADATA.format(url=self.baseurl, realm=realm) try: - return json.loads(to_native(open_url(realm_keys_metadata_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(realm_keys_metadata_url, method="GET") except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg='Could not obtain realm %s: %s' % (realm, str(e)), - exception=traceback.format_exc()) + self.fail_request(e, msg='Could not obtain realm %s: %s' % (realm, str(e)), + exception=traceback.format_exc()) except ValueError as e: self.module.fail_json(msg='API returned incorrect JSON when trying to obtain realm %s: %s' % (realm, str(e)), exception=traceback.format_exc()) @@ -347,15 +466,14 @@ class KeycloakAPI(object): realm_url = URL_REALM.format(url=self.baseurl, realm=realm) try: - return json.loads(to_native(open_url(realm_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(realm_url, method='GET') except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg='Could not obtain realm %s: %s' % (realm, str(e)), - exception=traceback.format_exc()) + self.fail_request(e, msg='Could not obtain realm %s: %s' % (realm, str(e)), + exception=traceback.format_exc()) except ValueError as e: self.module.fail_json(msg='API returned incorrect JSON when trying to obtain realm %s: %s' % (realm, str(e)), exception=traceback.format_exc()) @@ -372,11 +490,10 @@ class KeycloakAPI(object): realm_url = URL_REALM.format(url=self.baseurl, realm=realm) try: - return open_url(realm_url, method='PUT', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(realmrep), validate_certs=self.validate_certs) + return self._request(realm_url, method='PUT', data=json.dumps(realmrep)) except Exception as e: - self.fail_open_url(e, msg='Could not update realm %s: %s' % (realm, str(e)), - exception=traceback.format_exc()) + self.fail_request(e, msg='Could not update realm %s: %s' % (realm, str(e)), + exception=traceback.format_exc()) def create_realm(self, realmrep): """ Create a realm in keycloak @@ -386,11 +503,10 @@ class KeycloakAPI(object): realm_url = URL_REALMS.format(url=self.baseurl) try: - return open_url(realm_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(realmrep), validate_certs=self.validate_certs) + return self._request(realm_url, method='POST', data=json.dumps(realmrep)) except Exception as e: - self.fail_open_url(e, msg='Could not create realm %s: %s' % (realmrep['id'], str(e)), - exception=traceback.format_exc()) + self.fail_request(e, msg='Could not create realm %s: %s' % (realmrep['id'], str(e)), + exception=traceback.format_exc()) def delete_realm(self, realm="master"): """ Delete a realm from Keycloak @@ -401,11 +517,10 @@ class KeycloakAPI(object): realm_url = URL_REALM.format(url=self.baseurl, realm=realm) try: - return open_url(realm_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(realm_url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Could not delete realm %s: %s' % (realm, str(e)), - exception=traceback.format_exc()) + self.fail_request(e, msg='Could not delete realm %s: %s' % (realm, str(e)), + exception=traceback.format_exc()) def get_clients(self, realm='master', filter=None): """ Obtains client representations for clients in a realm @@ -419,15 +534,13 @@ class KeycloakAPI(object): clientlist_url += '?clientId=%s' % filter try: - return json.loads(to_native(open_url(clientlist_url, http_agent=self.http_agent, method='GET', headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(clientlist_url, method='GET') except ValueError as e: self.module.fail_json(msg='API returned incorrect JSON when trying to obtain list of clients for realm %s: %s' % (realm, str(e))) except Exception as e: - self.fail_open_url(e, msg='Could not obtain list of clients for realm %s: %s' - % (realm, str(e))) + self.fail_request(e, msg='Could not obtain list of clients for realm %s: %s' + % (realm, str(e))) def get_client_by_clientid(self, client_id, realm='master'): """ Get client representation by clientId @@ -451,16 +564,14 @@ class KeycloakAPI(object): client_url = URL_CLIENT.format(url=self.baseurl, realm=realm, id=id) try: - return json.loads(to_native(open_url(client_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(client_url, method='GET') except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg='Could not obtain client %s for realm %s: %s' - % (id, realm, str(e))) + self.fail_request(e, msg='Could not obtain client %s for realm %s: %s' + % (id, realm, str(e))) except ValueError as e: self.module.fail_json(msg='API returned incorrect JSON when trying to obtain client %s for realm %s: %s' % (id, realm, str(e))) @@ -491,11 +602,10 @@ class KeycloakAPI(object): client_url = URL_CLIENT.format(url=self.baseurl, realm=realm, id=id) try: - return open_url(client_url, method='PUT', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(clientrep), validate_certs=self.validate_certs) + return self._request(client_url, method='PUT', data=json.dumps(clientrep)) except Exception as e: - self.fail_open_url(e, msg='Could not update client %s in realm %s: %s' - % (id, realm, str(e))) + self.fail_request(e, msg='Could not update client %s in realm %s: %s' + % (id, realm, str(e))) def create_client(self, clientrep, realm="master"): """ Create a client in keycloak @@ -506,11 +616,10 @@ class KeycloakAPI(object): client_url = URL_CLIENTS.format(url=self.baseurl, realm=realm) try: - return open_url(client_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(clientrep), validate_certs=self.validate_certs) + return self._request(client_url, method='POST', data=json.dumps(clientrep)) except Exception as e: - self.fail_open_url(e, msg='Could not create client %s in realm %s: %s' - % (clientrep['clientId'], realm, str(e))) + self.fail_request(e, msg='Could not create client %s in realm %s: %s' + % (clientrep['clientId'], realm, str(e))) def delete_client(self, id, realm="master"): """ Delete a client from Keycloak @@ -522,11 +631,10 @@ class KeycloakAPI(object): client_url = URL_CLIENT.format(url=self.baseurl, realm=realm, id=id) try: - return open_url(client_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(client_url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Could not delete client %s in realm %s: %s' - % (id, realm, str(e))) + self.fail_request(e, msg='Could not delete client %s in realm %s: %s' + % (id, realm, str(e))) def get_client_roles_by_id(self, cid, realm="master"): """ Fetch the roles of the a client on the Keycloak server. @@ -537,12 +645,10 @@ class KeycloakAPI(object): """ client_roles_url = URL_CLIENT_ROLES.format(url=self.baseurl, realm=realm, id=cid) try: - return json.loads(to_native(open_url(client_roles_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(client_roles_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch rolemappings for client %s in realm %s: %s" - % (cid, realm, str(e))) + self.fail_request(e, msg="Could not fetch rolemappings for client %s in realm %s: %s" + % (cid, realm, str(e))) def get_client_role_id_by_name(self, cid, name, realm="master"): """ Get the role ID of a client. @@ -569,15 +675,13 @@ class KeycloakAPI(object): """ rolemappings_url = URL_CLIENT_GROUP_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=gid, client=cid) try: - rolemappings = json.loads(to_native(open_url(rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + rolemappings = self._request_and_deserialize(rolemappings_url, method="GET") for role in rolemappings: if rid == role['id']: return role except Exception as e: - self.fail_open_url(e, msg="Could not fetch rolemappings for client %s in group %s, realm %s: %s" - % (cid, gid, realm, str(e))) + self.fail_request(e, msg="Could not fetch rolemappings for client %s in group %s, realm %s: %s" + % (cid, gid, realm, str(e))) return None def get_client_group_available_rolemappings(self, gid, cid, realm="master"): @@ -590,12 +694,10 @@ class KeycloakAPI(object): """ available_rolemappings_url = URL_CLIENT_GROUP_ROLEMAPPINGS_AVAILABLE.format(url=self.baseurl, realm=realm, id=gid, client=cid) try: - return json.loads(to_native(open_url(available_rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(available_rolemappings_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch available rolemappings for client %s in group %s, realm %s: %s" - % (cid, gid, realm, str(e))) + self.fail_request(e, msg="Could not fetch available rolemappings for client %s in group %s, realm %s: %s" + % (cid, gid, realm, str(e))) def get_client_group_composite_rolemappings(self, gid, cid, realm="master"): """ Fetch the composite role of a client in a specified group on the Keycloak server. @@ -607,12 +709,10 @@ class KeycloakAPI(object): """ composite_rolemappings_url = URL_CLIENT_GROUP_ROLEMAPPINGS_COMPOSITE.format(url=self.baseurl, realm=realm, id=gid, client=cid) try: - return json.loads(to_native(open_url(composite_rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(composite_rolemappings_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch available rolemappings for client %s in group %s, realm %s: %s" - % (cid, gid, realm, str(e))) + self.fail_request(e, msg="Could not fetch available rolemappings for client %s in group %s, realm %s: %s" + % (cid, gid, realm, str(e))) def get_role_by_id(self, rid, realm="master"): """ Fetch a role by its id on the Keycloak server. @@ -623,12 +723,10 @@ class KeycloakAPI(object): """ client_roles_url = URL_ROLES_BY_ID.format(url=self.baseurl, realm=realm, id=rid) try: - return json.loads(to_native(open_url(client_roles_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(client_roles_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch role for id %s in realm %s: %s" - % (rid, realm, str(e))) + self.fail_request(e, msg="Could not fetch role for id %s in realm %s: %s" + % (rid, realm, str(e))) def get_client_roles_by_id_composite_rolemappings(self, rid, cid, realm="master"): """ Fetch a role by its id on the Keycloak server. @@ -640,12 +738,10 @@ class KeycloakAPI(object): """ client_roles_url = URL_ROLES_BY_ID_COMPOSITES_CLIENTS.format(url=self.baseurl, realm=realm, id=rid, cid=cid) try: - return json.loads(to_native(open_url(client_roles_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(client_roles_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch role for id %s and cid %s in realm %s: %s" - % (rid, cid, realm, str(e))) + self.fail_request(e, msg="Could not fetch role for id %s and cid %s in realm %s: %s" + % (rid, cid, realm, str(e))) def add_client_roles_by_id_composite_rolemapping(self, rid, roles_rep, realm="master"): """ Assign roles to composite role @@ -657,11 +753,10 @@ class KeycloakAPI(object): """ available_rolemappings_url = URL_ROLES_BY_ID_COMPOSITES.format(url=self.baseurl, realm=realm, id=rid) try: - open_url(available_rolemappings_url, method="POST", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(roles_rep), - validate_certs=self.validate_certs, timeout=self.connection_timeout) + self._request(available_rolemappings_url, method="POST", data=json.dumps(roles_rep)) except Exception as e: - self.fail_open_url(e, msg="Could not assign roles to composite role %s and realm %s: %s" - % (rid, realm, str(e))) + self.fail_request(e, msg="Could not assign roles to composite role %s and realm %s: %s" + % (rid, realm, str(e))) def add_group_realm_rolemapping(self, gid, role_rep, realm="master"): """ Add the specified realm role to specified group on the Keycloak server. @@ -673,11 +768,10 @@ class KeycloakAPI(object): """ url = URL_REALM_GROUP_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, group=gid) try: - open_url(url, method="POST", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep), - validate_certs=self.validate_certs, timeout=self.connection_timeout) + self._request(url, method="POST", data=json.dumps(role_rep)) except Exception as e: - self.fail_open_url(e, msg="Could add realm role mappings for group %s, realm %s: %s" - % (gid, realm, str(e))) + self.fail_request(e, msg="Could add realm role mappings for group %s, realm %s: %s" + % (gid, realm, str(e))) def delete_group_realm_rolemapping(self, gid, role_rep, realm="master"): """ Delete the specified realm role from the specified group on the Keycloak server. @@ -689,11 +783,10 @@ class KeycloakAPI(object): """ url = URL_REALM_GROUP_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, group=gid) try: - open_url(url, method="DELETE", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep), - validate_certs=self.validate_certs, timeout=self.connection_timeout) + self._request(url, method="DELETE", data=json.dumps(role_rep)) except Exception as e: - self.fail_open_url(e, msg="Could not delete realm role mappings for group %s, realm %s: %s" - % (gid, realm, str(e))) + self.fail_request(e, msg="Could not delete realm role mappings for group %s, realm %s: %s" + % (gid, realm, str(e))) def add_group_rolemapping(self, gid, cid, role_rep, realm="master"): """ Fetch the composite role of a client in a specified group on the Keycloak server. @@ -706,11 +799,10 @@ class KeycloakAPI(object): """ available_rolemappings_url = URL_CLIENT_GROUP_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=gid, client=cid) try: - open_url(available_rolemappings_url, method="POST", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep), - validate_certs=self.validate_certs, timeout=self.connection_timeout) + self._request(available_rolemappings_url, method="POST", data=json.dumps(role_rep)) except Exception as e: - self.fail_open_url(e, msg="Could not fetch available rolemappings for client %s in group %s, realm %s: %s" - % (cid, gid, realm, str(e))) + self.fail_request(e, msg="Could not fetch available rolemappings for client %s in group %s, realm %s: %s" + % (cid, gid, realm, str(e))) def delete_group_rolemapping(self, gid, cid, role_rep, realm="master"): """ Delete the rolemapping of a client in a specified group on the Keycloak server. @@ -723,11 +815,10 @@ class KeycloakAPI(object): """ available_rolemappings_url = URL_CLIENT_GROUP_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=gid, client=cid) try: - open_url(available_rolemappings_url, method="DELETE", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep), - validate_certs=self.validate_certs, timeout=self.connection_timeout) + self._request(available_rolemappings_url, method="DELETE", data=json.dumps(role_rep)) except Exception as e: - self.fail_open_url(e, msg="Could not delete available rolemappings for client %s in group %s, realm %s: %s" - % (cid, gid, realm, str(e))) + self.fail_request(e, msg="Could not delete available rolemappings for client %s in group %s, realm %s: %s" + % (cid, gid, realm, str(e))) def get_client_user_rolemapping_by_id(self, uid, cid, rid, realm='master'): """ Obtain client representation by id @@ -740,15 +831,13 @@ class KeycloakAPI(object): """ rolemappings_url = URL_CLIENT_USER_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=uid, client=cid) try: - rolemappings = json.loads(to_native(open_url(rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + rolemappings = self._request_and_deserialize(rolemappings_url, method="GET") for role in rolemappings: if rid == role['id']: return role except Exception as e: - self.fail_open_url(e, msg="Could not fetch rolemappings for client %s and user %s, realm %s: %s" - % (cid, uid, realm, str(e))) + self.fail_request(e, msg="Could not fetch rolemappings for client %s and user %s, realm %s: %s" + % (cid, uid, realm, str(e))) return None def get_client_user_available_rolemappings(self, uid, cid, realm="master"): @@ -761,12 +850,10 @@ class KeycloakAPI(object): """ available_rolemappings_url = URL_CLIENT_USER_ROLEMAPPINGS_AVAILABLE.format(url=self.baseurl, realm=realm, id=uid, client=cid) try: - return json.loads(to_native(open_url(available_rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(available_rolemappings_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch effective rolemappings for client %s and user %s, realm %s: %s" - % (cid, uid, realm, str(e))) + self.fail_request(e, msg="Could not fetch effective rolemappings for client %s and user %s, realm %s: %s" + % (cid, uid, realm, str(e))) def get_client_user_composite_rolemappings(self, uid, cid, realm="master"): """ Fetch the composite role of a client for a specified user on the Keycloak server. @@ -778,12 +865,10 @@ class KeycloakAPI(object): """ composite_rolemappings_url = URL_CLIENT_USER_ROLEMAPPINGS_COMPOSITE.format(url=self.baseurl, realm=realm, id=uid, client=cid) try: - return json.loads(to_native(open_url(composite_rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(composite_rolemappings_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch available rolemappings for user %s of realm %s: %s" - % (uid, realm, str(e))) + self.fail_request(e, msg="Could not fetch available rolemappings for user %s of realm %s: %s" + % (uid, realm, str(e))) def get_realm_user_rolemapping_by_id(self, uid, rid, realm='master'): """ Obtain role representation by id @@ -795,15 +880,13 @@ class KeycloakAPI(object): """ rolemappings_url = URL_REALM_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=uid) try: - rolemappings = json.loads(to_native(open_url(rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + rolemappings = self._request_and_deserialize(rolemappings_url, method="GET") for role in rolemappings: if rid == role['id']: return role except Exception as e: - self.fail_open_url(e, msg="Could not fetch rolemappings for user %s, realm %s: %s" - % (uid, realm, str(e))) + self.fail_request(e, msg="Could not fetch rolemappings for user %s, realm %s: %s" + % (uid, realm, str(e))) return None def get_realm_user_available_rolemappings(self, uid, realm="master"): @@ -815,12 +898,10 @@ class KeycloakAPI(object): """ available_rolemappings_url = URL_REALM_ROLEMAPPINGS_AVAILABLE.format(url=self.baseurl, realm=realm, id=uid) try: - return json.loads(to_native(open_url(available_rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(available_rolemappings_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch available rolemappings for user %s of realm %s: %s" - % (uid, realm, str(e))) + self.fail_request(e, msg="Could not fetch available rolemappings for user %s of realm %s: %s" + % (uid, realm, str(e))) def get_realm_user_composite_rolemappings(self, uid, realm="master"): """ Fetch the composite role of a realm for a specified user on the Keycloak server. @@ -831,12 +912,10 @@ class KeycloakAPI(object): """ composite_rolemappings_url = URL_REALM_ROLEMAPPINGS_COMPOSITE.format(url=self.baseurl, realm=realm, id=uid) try: - return json.loads(to_native(open_url(composite_rolemappings_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(composite_rolemappings_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch effective rolemappings for user %s, realm %s: %s" - % (uid, realm, str(e))) + self.fail_request(e, msg="Could not fetch effective rolemappings for user %s, realm %s: %s" + % (uid, realm, str(e))) def get_user_by_username(self, username, realm="master"): """ Fetch a keycloak user within a realm based on its username. @@ -849,9 +928,7 @@ class KeycloakAPI(object): users_url += '?username=%s&exact=true' % username try: userrep = None - users = json.loads(to_native(open_url(users_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + users = self._request_and_deserialize(users_url, method='GET') for user in users: if user['username'] == username: userrep = user @@ -862,8 +939,8 @@ class KeycloakAPI(object): self.module.fail_json(msg='API returned incorrect JSON when trying to obtain the user for realm %s and username %s: %s' % (realm, username, str(e))) except Exception as e: - self.fail_open_url(e, msg='Could not obtain the user for realm %s and username %s: %s' - % (realm, username, str(e))) + self.fail_request(e, msg='Could not obtain the user for realm %s and username %s: %s' + % (realm, username, str(e))) def get_service_account_user_by_client_id(self, client_id, realm="master"): """ Fetch a keycloak service account user within a realm based on its client_id. @@ -876,15 +953,13 @@ class KeycloakAPI(object): service_account_user_url = URL_CLIENT_SERVICE_ACCOUNT_USER.format(url=self.baseurl, realm=realm, id=cid) try: - return json.loads(to_native(open_url(service_account_user_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(service_account_user_url, method='GET') except ValueError as e: self.module.fail_json(msg='API returned incorrect JSON when trying to obtain the service-account-user for realm %s and client_id %s: %s' % (realm, client_id, str(e))) except Exception as e: - self.fail_open_url(e, msg='Could not obtain the service-account-user for realm %s and client_id %s: %s' - % (realm, client_id, str(e))) + self.fail_request(e, msg='Could not obtain the service-account-user for realm %s and client_id %s: %s' + % (realm, client_id, str(e))) def add_user_rolemapping(self, uid, cid, role_rep, realm="master"): """ Assign a realm or client role to a specified user on the Keycloak server. @@ -898,19 +973,17 @@ class KeycloakAPI(object): if cid is None: user_realm_rolemappings_url = URL_REALM_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=uid) try: - open_url(user_realm_rolemappings_url, method="POST", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep), - validate_certs=self.validate_certs, timeout=self.connection_timeout) + self._request(user_realm_rolemappings_url, method="POST", data=json.dumps(role_rep)) except Exception as e: - self.fail_open_url(e, msg="Could not map roles to userId %s for realm %s and roles %s: %s" - % (uid, realm, json.dumps(role_rep), str(e))) + self.fail_request(e, msg="Could not map roles to userId %s for realm %s and roles %s: %s" + % (uid, realm, json.dumps(role_rep), str(e))) else: user_client_rolemappings_url = URL_CLIENT_USER_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=uid, client=cid) try: - open_url(user_client_rolemappings_url, method="POST", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep), - validate_certs=self.validate_certs, timeout=self.connection_timeout) + self._request(user_client_rolemappings_url, method="POST", data=json.dumps(role_rep)) except Exception as e: - self.fail_open_url(e, msg="Could not map roles to userId %s for client %s, realm %s and roles %s: %s" - % (cid, uid, realm, json.dumps(role_rep), str(e))) + self.fail_request(e, msg="Could not map roles to userId %s for client %s, realm %s and roles %s: %s" + % (cid, uid, realm, json.dumps(role_rep), str(e))) def delete_user_rolemapping(self, uid, cid, role_rep, realm="master"): """ Delete the rolemapping of a client in a specified user on the Keycloak server. @@ -924,19 +997,17 @@ class KeycloakAPI(object): if cid is None: user_realm_rolemappings_url = URL_REALM_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=uid) try: - open_url(user_realm_rolemappings_url, method="DELETE", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep), - validate_certs=self.validate_certs, timeout=self.connection_timeout) + self._request(user_realm_rolemappings_url, method="DELETE", data=json.dumps(role_rep)) except Exception as e: - self.fail_open_url(e, msg="Could not remove roles %s from userId %s, realm %s: %s" - % (json.dumps(role_rep), uid, realm, str(e))) + self.fail_request(e, msg="Could not remove roles %s from userId %s, realm %s: %s" + % (json.dumps(role_rep), uid, realm, str(e))) else: user_client_rolemappings_url = URL_CLIENT_USER_ROLEMAPPINGS.format(url=self.baseurl, realm=realm, id=uid, client=cid) try: - open_url(user_client_rolemappings_url, method="DELETE", http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(role_rep), - validate_certs=self.validate_certs, timeout=self.connection_timeout) + self._request(user_client_rolemappings_url, method="DELETE", data=json.dumps(role_rep)) except Exception as e: - self.fail_open_url(e, msg="Could not remove roles %s for client %s from userId %s, realm %s: %s" - % (json.dumps(role_rep), cid, uid, realm, str(e))) + self.fail_request(e, msg="Could not remove roles %s for client %s from userId %s, realm %s: %s" + % (json.dumps(role_rep), cid, uid, realm, str(e))) def get_client_templates(self, realm='master'): """ Obtains client template representations for client templates in a realm @@ -947,14 +1018,13 @@ class KeycloakAPI(object): url = URL_CLIENTTEMPLATES.format(url=self.baseurl, realm=realm) try: - return json.loads(to_native(open_url(url, method='GET', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(url, method='GET') except ValueError as e: self.module.fail_json(msg='API returned incorrect JSON when trying to obtain list of client templates for realm %s: %s' % (realm, str(e))) except Exception as e: - self.fail_open_url(e, msg='Could not obtain list of client templates for realm %s: %s' - % (realm, str(e))) + self.fail_request(e, msg='Could not obtain list of client templates for realm %s: %s' + % (realm, str(e))) def get_client_template_by_id(self, id, realm='master'): """ Obtain client template representation by id @@ -966,14 +1036,13 @@ class KeycloakAPI(object): url = URL_CLIENTTEMPLATE.format(url=self.baseurl, id=id, realm=realm) try: - return json.loads(to_native(open_url(url, method='GET', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(url, method='GET') except ValueError as e: self.module.fail_json(msg='API returned incorrect JSON when trying to obtain client templates %s for realm %s: %s' % (id, realm, str(e))) except Exception as e: - self.fail_open_url(e, msg='Could not obtain client template %s for realm %s: %s' - % (id, realm, str(e))) + self.fail_request(e, msg='Could not obtain client template %s for realm %s: %s' + % (id, realm, str(e))) def get_client_template_by_name(self, name, realm='master'): """ Obtain client template representation by name @@ -1012,11 +1081,10 @@ class KeycloakAPI(object): url = URL_CLIENTTEMPLATE.format(url=self.baseurl, realm=realm, id=id) try: - return open_url(url, method='PUT', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(clienttrep), validate_certs=self.validate_certs) + return self._request(url, method='PUT', data=json.dumps(clienttrep)) except Exception as e: - self.fail_open_url(e, msg='Could not update client template %s in realm %s: %s' - % (id, realm, str(e))) + self.fail_request(e, msg='Could not update client template %s in realm %s: %s' + % (id, realm, str(e))) def create_client_template(self, clienttrep, realm="master"): """ Create a client in keycloak @@ -1027,11 +1095,10 @@ class KeycloakAPI(object): url = URL_CLIENTTEMPLATES.format(url=self.baseurl, realm=realm) try: - return open_url(url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(clienttrep), validate_certs=self.validate_certs) + return self._request(url, method='POST', data=json.dumps(clienttrep)) except Exception as e: - self.fail_open_url(e, msg='Could not create client template %s in realm %s: %s' - % (clienttrep['clientId'], realm, str(e))) + self.fail_request(e, msg='Could not create client template %s in realm %s: %s' + % (clienttrep['clientId'], realm, str(e))) def delete_client_template(self, id, realm="master"): """ Delete a client template from Keycloak @@ -1043,11 +1110,10 @@ class KeycloakAPI(object): url = URL_CLIENTTEMPLATE.format(url=self.baseurl, realm=realm, id=id) try: - return open_url(url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Could not delete client template %s in realm %s: %s' - % (id, realm, str(e))) + self.fail_request(e, msg='Could not delete client template %s in realm %s: %s' + % (id, realm, str(e))) def get_clientscopes(self, realm="master"): """ Fetch the name and ID of all clientscopes on the Keycloak server. @@ -1060,12 +1126,10 @@ class KeycloakAPI(object): """ clientscopes_url = URL_CLIENTSCOPES.format(url=self.baseurl, realm=realm) try: - return json.loads(to_native(open_url(clientscopes_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(clientscopes_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch list of clientscopes in realm %s: %s" - % (realm, str(e))) + self.fail_request(e, msg="Could not fetch list of clientscopes in realm %s: %s" + % (realm, str(e))) def get_clientscope_by_clientscopeid(self, cid, realm="master"): """ Fetch a keycloak clientscope from the provided realm using the clientscope's unique ID. @@ -1078,16 +1142,14 @@ class KeycloakAPI(object): """ clientscope_url = URL_CLIENTSCOPE.format(url=self.baseurl, realm=realm, id=cid) try: - return json.loads(to_native(open_url(clientscope_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(clientscope_url, method="GET") except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg="Could not fetch clientscope %s in realm %s: %s" - % (cid, realm, str(e))) + self.fail_request(e, msg="Could not fetch clientscope %s in realm %s: %s" + % (cid, realm, str(e))) except Exception as e: self.module.fail_json(msg="Could not clientscope group %s in realm %s: %s" % (cid, realm, str(e))) @@ -1124,11 +1186,10 @@ class KeycloakAPI(object): """ clientscopes_url = URL_CLIENTSCOPES.format(url=self.baseurl, realm=realm) try: - return open_url(clientscopes_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(clientscoperep), validate_certs=self.validate_certs) + return self._request(clientscopes_url, method='POST', data=json.dumps(clientscoperep)) except Exception as e: - self.fail_open_url(e, msg="Could not create clientscope %s in realm %s: %s" - % (clientscoperep['name'], realm, str(e))) + self.fail_request(e, msg="Could not create clientscope %s in realm %s: %s" + % (clientscoperep['name'], realm, str(e))) def update_clientscope(self, clientscoperep, realm="master"): """ Update an existing clientscope. @@ -1139,12 +1200,11 @@ class KeycloakAPI(object): clientscope_url = URL_CLIENTSCOPE.format(url=self.baseurl, realm=realm, id=clientscoperep['id']) try: - return open_url(clientscope_url, method='PUT', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(clientscoperep), validate_certs=self.validate_certs) + return self._request(clientscope_url, method='PUT', data=json.dumps(clientscoperep)) except Exception as e: - self.fail_open_url(e, msg='Could not update clientscope %s in realm %s: %s' - % (clientscoperep['name'], realm, str(e))) + self.fail_request(e, msg='Could not update clientscope %s in realm %s: %s' + % (clientscoperep['name'], realm, str(e))) def delete_clientscope(self, name=None, cid=None, realm="master"): """ Delete a clientscope. One of name or cid must be provided. @@ -1177,11 +1237,10 @@ class KeycloakAPI(object): # should have a good cid by here. clientscope_url = URL_CLIENTSCOPE.format(realm=realm, id=cid, url=self.baseurl) try: - return open_url(clientscope_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(clientscope_url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg="Unable to delete clientscope %s: %s" % (cid, str(e))) + self.fail_request(e, msg="Unable to delete clientscope %s: %s" % (cid, str(e))) def get_clientscope_protocolmappers(self, cid, realm="master"): """ Fetch the name and ID of all clientscopes on the Keycloak server. @@ -1195,12 +1254,10 @@ class KeycloakAPI(object): """ protocolmappers_url = URL_CLIENTSCOPE_PROTOCOLMAPPERS.format(id=cid, url=self.baseurl, realm=realm) try: - return json.loads(to_native(open_url(protocolmappers_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(protocolmappers_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch list of protocolmappers in realm %s: %s" - % (realm, str(e))) + self.fail_request(e, msg="Could not fetch list of protocolmappers in realm %s: %s" + % (realm, str(e))) def get_clientscope_protocolmapper_by_protocolmapperid(self, pid, cid, realm="master"): """ Fetch a keycloak clientscope from the provided realm using the clientscope's unique ID. @@ -1215,16 +1272,14 @@ class KeycloakAPI(object): """ protocolmapper_url = URL_CLIENTSCOPE_PROTOCOLMAPPER.format(url=self.baseurl, realm=realm, id=cid, mapper_id=pid) try: - return json.loads(to_native(open_url(protocolmapper_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(protocolmapper_url, method="GET") except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg="Could not fetch protocolmapper %s in realm %s: %s" - % (pid, realm, str(e))) + self.fail_request(e, msg="Could not fetch protocolmapper %s in realm %s: %s" + % (pid, realm, str(e))) except Exception as e: self.module.fail_json(msg="Could not fetch protocolmapper %s in realm %s: %s" % (cid, realm, str(e))) @@ -1263,11 +1318,10 @@ class KeycloakAPI(object): """ protocolmappers_url = URL_CLIENTSCOPE_PROTOCOLMAPPERS.format(url=self.baseurl, id=cid, realm=realm) try: - return open_url(protocolmappers_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(mapper_rep), validate_certs=self.validate_certs) + return self._request(protocolmappers_url, method='POST', data=json.dumps(mapper_rep)) except Exception as e: - self.fail_open_url(e, msg="Could not create protocolmapper %s in realm %s: %s" - % (mapper_rep['name'], realm, str(e))) + self.fail_request(e, msg="Could not create protocolmapper %s in realm %s: %s" + % (mapper_rep['name'], realm, str(e))) def update_clientscope_protocolmappers(self, cid, mapper_rep, realm="master"): """ Update an existing clientscope. @@ -1279,12 +1333,11 @@ class KeycloakAPI(object): protocolmapper_url = URL_CLIENTSCOPE_PROTOCOLMAPPER.format(url=self.baseurl, realm=realm, id=cid, mapper_id=mapper_rep['id']) try: - return open_url(protocolmapper_url, method='PUT', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(mapper_rep), validate_certs=self.validate_certs) + return self._request(protocolmapper_url, method='PUT', data=json.dumps(mapper_rep)) except Exception as e: - self.fail_open_url(e, msg='Could not update protocolmappers for clientscope %s in realm %s: %s' - % (mapper_rep, realm, str(e))) + self.fail_request(e, msg='Could not update protocolmappers for clientscope %s in realm %s: %s' + % (mapper_rep, realm, str(e))) def get_default_clientscopes(self, realm, client_id=None): """Fetch the name and ID of all clientscopes on the Keycloak server. @@ -1327,18 +1380,16 @@ class KeycloakAPI(object): if client_id is None: clientscopes_url = url_template.format(url=self.baseurl, realm=realm) try: - return json.loads(to_native(open_url(clientscopes_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(clientscopes_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch list of %s clientscopes in realm %s: %s" % (scope_type, realm, str(e))) + self.fail_request(e, msg="Could not fetch list of %s clientscopes in realm %s: %s" % (scope_type, realm, str(e))) else: cid = self.get_client_id(client_id=client_id, realm=realm) clientscopes_url = url_template.format(url=self.baseurl, realm=realm, cid=cid) try: - return json.loads(to_native(open_url(clientscopes_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(clientscopes_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch list of %s clientscopes in client %s: %s" % (scope_type, client_id, clientscopes_url)) + self.fail_request(e, msg="Could not fetch list of %s clientscopes in client %s: %s" % (scope_type, client_id, clientscopes_url)) def _decide_url_type_clientscope(self, client_id=None, scope_type="default"): """Decides which url to use. @@ -1404,12 +1455,11 @@ class KeycloakAPI(object): clientscope_type_url = self._decide_url_type_clientscope(client_id, scope_type).format(realm=realm, id=id, cid=cid, url=self.baseurl) try: method = 'PUT' if action == "add" else 'DELETE' - return open_url(clientscope_type_url, method=method, http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(clientscope_type_url, method=method) except Exception as e: place = 'realm' if client_id is None else 'client ' + client_id - self.fail_open_url(e, msg="Unable to %s %s clientscope %s @ %s : %s" % (action, scope_type, id, place, str(e))) + self.fail_request(e, msg="Unable to %s %s clientscope %s @ %s : %s" % (action, scope_type, id, place, str(e))) def create_clientsecret(self, id, realm="master"): """ Generate a new client secret by id @@ -1421,16 +1471,14 @@ class KeycloakAPI(object): clientsecret_url = URL_CLIENTSECRET.format(url=self.baseurl, realm=realm, id=id) try: - return json.loads(to_native(open_url(clientsecret_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(clientsecret_url, method='POST') except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg='Could not obtain clientsecret of client %s for realm %s: %s' - % (id, realm, str(e))) + self.fail_request(e, msg='Could not obtain clientsecret of client %s for realm %s: %s' + % (id, realm, str(e))) except Exception as e: self.module.fail_json(msg='Could not obtain clientsecret of client %s for realm %s: %s' % (id, realm, str(e))) @@ -1445,16 +1493,14 @@ class KeycloakAPI(object): clientsecret_url = URL_CLIENTSECRET.format(url=self.baseurl, realm=realm, id=id) try: - return json.loads(to_native(open_url(clientsecret_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(clientsecret_url, method='GET') except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg='Could not obtain clientsecret of client %s for realm %s: %s' - % (id, realm, str(e))) + self.fail_request(e, msg='Could not obtain clientsecret of client %s for realm %s: %s' + % (id, realm, str(e))) except Exception as e: self.module.fail_json(msg='Could not obtain clientsecret of client %s for realm %s: %s' % (id, realm, str(e))) @@ -1469,12 +1515,10 @@ class KeycloakAPI(object): """ groups_url = URL_GROUPS.format(url=self.baseurl, realm=realm) try: - return json.loads(to_native(open_url(groups_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(groups_url, method="GET") except Exception as e: - self.fail_open_url(e, msg="Could not fetch list of groups in realm %s: %s" - % (realm, str(e))) + self.fail_request(e, msg="Could not fetch list of groups in realm %s: %s" + % (realm, str(e))) def get_group_by_groupid(self, gid, realm="master"): """ Fetch a keycloak group from the provided realm using the group's unique ID. @@ -1487,15 +1531,13 @@ class KeycloakAPI(object): """ groups_url = URL_GROUP.format(url=self.baseurl, realm=realm, groupid=gid) try: - return json.loads(to_native(open_url(groups_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(groups_url, method="GET") except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg="Could not fetch group %s in realm %s: %s" - % (gid, realm, str(e))) + self.fail_request(e, msg="Could not fetch group %s in realm %s: %s" + % (gid, realm, str(e))) except Exception as e: self.module.fail_json(msg="Could not fetch group %s in realm %s: %s" % (gid, realm, str(e))) @@ -1509,9 +1551,7 @@ class KeycloakAPI(object): group_children = [] else: group_children_url = URL_GROUP_CHILDREN.format(url=self.baseurl, realm=realm, groupid=parent['id']) - group_children = json.loads(to_native(open_url(group_children_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + group_children = self._request_and_deserialize(group_children_url, method="GET") subgroups = group_children else: subgroups = parent['subGroups'] @@ -1529,7 +1569,6 @@ class KeycloakAPI(object): :param realm: Realm in which the group resides; default 'master' :param parents: Optional list of parents when group to look for is a subgroup """ - groups_url = URL_GROUPS.format(url=self.baseurl, realm=realm) try: if parents: parent = self.get_subgroup_direct_parent(parents, realm) @@ -1655,11 +1694,10 @@ class KeycloakAPI(object): """ groups_url = URL_GROUPS.format(url=self.baseurl, realm=realm) try: - return open_url(groups_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(grouprep), validate_certs=self.validate_certs) + return self._request(groups_url, method='POST', data=json.dumps(grouprep)) except Exception as e: - self.fail_open_url(e, msg="Could not create group %s in realm %s: %s" - % (grouprep['name'], realm, str(e))) + self.fail_request(e, msg="Could not create group %s in realm %s: %s" + % (grouprep['name'], realm, str(e))) def create_subgroup(self, parents, grouprep, realm="master"): """ Create a Keycloak subgroup. @@ -1683,11 +1721,10 @@ class KeycloakAPI(object): parent_id = parent_id["id"] url = URL_GROUP_CHILDREN.format(url=self.baseurl, realm=realm, groupid=parent_id) - return open_url(url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(grouprep), validate_certs=self.validate_certs) + return self._request(url, method='POST', data=json.dumps(grouprep)) except Exception as e: - self.fail_open_url(e, msg="Could not create subgroup %s for parent group %s in realm %s: %s" - % (grouprep['name'], parent_id, realm, str(e))) + self.fail_request(e, msg="Could not create subgroup %s for parent group %s in realm %s: %s" + % (grouprep['name'], parent_id, realm, str(e))) def update_group(self, grouprep, realm="master"): """ Update an existing group. @@ -1698,11 +1735,10 @@ class KeycloakAPI(object): group_url = URL_GROUP.format(url=self.baseurl, realm=realm, groupid=grouprep['id']) try: - return open_url(group_url, method='PUT', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(grouprep), validate_certs=self.validate_certs) + return self._request(group_url, method='PUT', data=json.dumps(grouprep)) except Exception as e: - self.fail_open_url(e, msg='Could not update group %s in realm %s: %s' - % (grouprep['name'], realm, str(e))) + self.fail_request(e, msg='Could not update group %s in realm %s: %s' + % (grouprep['name'], realm, str(e))) def delete_group(self, name=None, groupid=None, realm="master"): """ Delete a group. One of name or groupid must be provided. @@ -1735,10 +1771,9 @@ class KeycloakAPI(object): # should have a good groupid by here. group_url = URL_GROUP.format(realm=realm, groupid=groupid, url=self.baseurl) try: - return open_url(group_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(group_url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg="Unable to delete group %s: %s" % (groupid, str(e))) + self.fail_request(e, msg="Unable to delete group %s: %s" % (groupid, str(e))) def get_realm_roles(self, realm='master'): """ Obtains role representations for roles in a realm @@ -1748,15 +1783,13 @@ class KeycloakAPI(object): """ rolelist_url = URL_REALM_ROLES.format(url=self.baseurl, realm=realm) try: - return json.loads(to_native(open_url(rolelist_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(rolelist_url, method='GET') except ValueError as e: self.module.fail_json(msg='API returned incorrect JSON when trying to obtain list of roles for realm %s: %s' % (realm, str(e))) except Exception as e: - self.fail_open_url(e, msg='Could not obtain list of roles for realm %s: %s' - % (realm, str(e))) + self.fail_request(e, msg='Could not obtain list of roles for realm %s: %s' + % (realm, str(e))) def get_realm_role(self, name, realm='master'): """ Fetch a keycloak role from the provided realm using the role's name. @@ -1767,14 +1800,13 @@ class KeycloakAPI(object): """ role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(name, safe='')) try: - return json.loads(to_native(open_url(role_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(role_url, method="GET") except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg='Could not fetch role %s in realm %s: %s' - % (name, realm, str(e))) + self.fail_request(e, msg='Could not fetch role %s in realm %s: %s' + % (name, realm, str(e))) except Exception as e: self.module.fail_json(msg='Could not fetch role %s in realm %s: %s' % (name, realm, str(e))) @@ -1790,11 +1822,10 @@ class KeycloakAPI(object): if "composites" in rolerep: keycloak_compatible_composites = self.convert_role_composites(rolerep["composites"]) rolerep["composites"] = keycloak_compatible_composites - return open_url(roles_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(rolerep), validate_certs=self.validate_certs) + return self._request(roles_url, method='POST', data=json.dumps(rolerep)) except Exception as e: - self.fail_open_url(e, msg='Could not create role %s in realm %s: %s' - % (rolerep['name'], realm, str(e))) + self.fail_request(e, msg='Could not create role %s in realm %s: %s' + % (rolerep['name'], realm, str(e))) def update_realm_role(self, rolerep, realm='master'): """ Update an existing realm role. @@ -1808,14 +1839,13 @@ class KeycloakAPI(object): if "composites" in rolerep: composites = copy.deepcopy(rolerep["composites"]) del rolerep["composites"] - role_response = open_url(role_url, method='PUT', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(rolerep), validate_certs=self.validate_certs) + role_response = self._request(role_url, method='PUT', data=json.dumps(rolerep)) if composites is not None: self.update_role_composites(rolerep=rolerep, composites=composites, realm=realm) return role_response except Exception as e: - self.fail_open_url(e, msg='Could not update role %s in realm %s: %s' - % (rolerep['name'], realm, str(e))) + self.fail_request(e, msg='Could not update role %s in realm %s: %s' + % (rolerep['name'], realm, str(e))) def get_role_composites(self, rolerep, clientid=None, realm='master'): composite_url = '' @@ -1827,16 +1857,10 @@ class KeycloakAPI(object): else: composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"], safe='')) # Get existing composites - return json.loads(to_native(open_url( - composite_url, - method='GET', - http_agent=self.http_agent, - headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + self._request_and_deserialize(composite_url, method='GET') except Exception as e: - self.fail_open_url(e, msg='Could not get role %s composites in realm %s: %s' - % (rolerep['name'], realm, str(e))) + self.fail_request(e, msg='Could not get role %s composites in realm %s: %s' + % (rolerep['name'], realm, str(e))) def create_role_composites(self, rolerep, composites, clientid=None, realm='master'): composite_url = '' @@ -1849,11 +1873,10 @@ class KeycloakAPI(object): composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"], safe='')) # Get existing composites # create new composites - return open_url(composite_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(composites), validate_certs=self.validate_certs) + return self._request(composite_url, method='POST', data=json.dumps(composites)) except Exception as e: - self.fail_open_url(e, msg='Could not create role %s composites in realm %s: %s' - % (rolerep['name'], realm, str(e))) + self.fail_request(e, msg='Could not create role %s composites in realm %s: %s' + % (rolerep['name'], realm, str(e))) def delete_role_composites(self, rolerep, composites, clientid=None, realm='master'): composite_url = '' @@ -1866,11 +1889,10 @@ class KeycloakAPI(object): composite_url = URL_REALM_ROLE_COMPOSITES.format(url=self.baseurl, realm=realm, name=quote(rolerep["name"], safe='')) # Get existing composites # create new composites - return open_url(composite_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(composites), validate_certs=self.validate_certs) + return self._request(composite_url, method='DELETE', data=json.dumps(composites)) except Exception as e: - self.fail_open_url(e, msg='Could not create role %s composites in realm %s: %s' - % (rolerep['name'], realm, str(e))) + self.fail_request(e, msg='Could not create role %s composites in realm %s: %s' + % (rolerep['name'], realm, str(e))) def update_role_composites(self, rolerep, composites, clientid=None, realm='master'): # Get existing composites @@ -1930,11 +1952,10 @@ class KeycloakAPI(object): """ role_url = URL_REALM_ROLE.format(url=self.baseurl, realm=realm, name=quote(name, safe='')) try: - return open_url(role_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(role_url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Unable to delete role %s in realm %s: %s' - % (name, realm, str(e))) + self.fail_request(e, msg='Unable to delete role %s in realm %s: %s' + % (name, realm, str(e))) def get_client_roles(self, clientid, realm='master'): """ Obtains role representations for client roles in a specific client @@ -1949,15 +1970,13 @@ class KeycloakAPI(object): % (clientid, realm)) rolelist_url = URL_CLIENT_ROLES.format(url=self.baseurl, realm=realm, id=cid) try: - return json.loads(to_native(open_url(rolelist_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(rolelist_url, method='GET') except ValueError as e: self.module.fail_json(msg='API returned incorrect JSON when trying to obtain list of roles for client %s in realm %s: %s' % (clientid, realm, str(e))) except Exception as e: - self.fail_open_url(e, msg='Could not obtain list of roles for client %s in realm %s: %s' - % (clientid, realm, str(e))) + self.fail_request(e, msg='Could not obtain list of roles for client %s in realm %s: %s' + % (clientid, realm, str(e))) def get_client_role(self, name, clientid, realm='master'): """ Fetch a keycloak client role from the provided realm using the role's name. @@ -1974,14 +1993,13 @@ class KeycloakAPI(object): % (clientid, realm)) role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(name, safe='')) try: - return json.loads(to_native(open_url(role_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(role_url, method="GET") except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg='Could not fetch role %s in client %s of realm %s: %s' - % (name, clientid, realm, str(e))) + self.fail_request(e, msg='Could not fetch role %s in client %s of realm %s: %s' + % (name, clientid, realm, str(e))) except Exception as e: self.module.fail_json(msg='Could not fetch role %s for client %s in realm %s: %s' % (name, clientid, realm, str(e))) @@ -2003,11 +2021,10 @@ class KeycloakAPI(object): if "composites" in rolerep: keycloak_compatible_composites = self.convert_role_composites(rolerep["composites"]) rolerep["composites"] = keycloak_compatible_composites - return open_url(roles_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(rolerep), validate_certs=self.validate_certs) + return self._request(roles_url, method='POST', data=json.dumps(rolerep)) except Exception as e: - self.fail_open_url(e, msg='Could not create role %s for client %s in realm %s: %s' - % (rolerep['name'], clientid, realm, str(e))) + self.fail_request(e, msg='Could not create role %s for client %s in realm %s: %s' + % (rolerep['name'], clientid, realm, str(e))) def convert_role_composites(self, composites): keycloak_compatible_composites = { @@ -2042,14 +2059,13 @@ class KeycloakAPI(object): if "composites" in rolerep: composites = copy.deepcopy(rolerep["composites"]) del rolerep['composites'] - update_role_response = open_url(role_url, method='PUT', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(rolerep), validate_certs=self.validate_certs) + update_role_response = self._request(role_url, method='PUT', data=json.dumps(rolerep)) if composites is not None: self.update_role_composites(rolerep=rolerep, clientid=clientid, composites=composites, realm=realm) return update_role_response except Exception as e: - self.fail_open_url(e, msg='Could not update role %s for client %s in realm %s: %s' - % (rolerep['name'], clientid, realm, str(e))) + self.fail_request(e, msg='Could not update role %s for client %s in realm %s: %s' + % (rolerep['name'], clientid, realm, str(e))) def delete_client_role(self, name, clientid, realm="master"): """ Delete a role. One of name or roleid must be provided. @@ -2064,11 +2080,10 @@ class KeycloakAPI(object): % (clientid, realm)) role_url = URL_CLIENT_ROLE.format(url=self.baseurl, realm=realm, id=cid, name=quote(name, safe='')) try: - return open_url(role_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(role_url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Unable to delete role %s for client %s in realm %s: %s' - % (name, clientid, realm, str(e))) + self.fail_request(e, msg='Unable to delete role %s for client %s in realm %s: %s' + % (name, clientid, realm, str(e))) def get_authentication_flow_by_alias(self, alias, realm='master'): """ @@ -2080,16 +2095,14 @@ class KeycloakAPI(object): try: authentication_flow = {} # Check if the authentication flow exists on the Keycloak serveraders - authentications = json.load(open_url(URL_AUTHENTICATION_FLOWS.format(url=self.baseurl, realm=realm), method='GET', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, validate_certs=self.validate_certs)) + authentications = json.load(self._request(URL_AUTHENTICATION_FLOWS.format(url=self.baseurl, realm=realm), method='GET')) for authentication in authentications: if authentication["alias"] == alias: authentication_flow = authentication break return authentication_flow except Exception as e: - self.fail_open_url(e, msg="Unable get authentication flow %s: %s" % (alias, str(e))) + self.fail_request(e, msg="Unable get authentication flow %s: %s" % (alias, str(e))) def delete_authentication_flow_by_id(self, id, realm='master'): """ @@ -2101,11 +2114,10 @@ class KeycloakAPI(object): flow_url = URL_AUTHENTICATION_FLOW.format(url=self.baseurl, realm=realm, id=id) try: - return open_url(flow_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(flow_url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Could not delete authentication flow %s in realm %s: %s' - % (id, realm, str(e))) + self.fail_request(e, msg='Could not delete authentication flow %s in realm %s: %s' + % (id, realm, str(e))) def copy_auth_flow(self, config, realm='master'): """ @@ -2118,31 +2130,25 @@ class KeycloakAPI(object): new_name = dict( newName=config["alias"] ) - open_url( + self._request( URL_AUTHENTICATION_FLOW_COPY.format( url=self.baseurl, realm=realm, copyfrom=quote(config["copyFrom"], safe='')), method='POST', - http_agent=self.http_agent, headers=self.restheaders, - data=json.dumps(new_name), - timeout=self.connection_timeout, - validate_certs=self.validate_certs) + data=json.dumps(new_name)) flow_list = json.load( - open_url( + self._request( URL_AUTHENTICATION_FLOWS.format(url=self.baseurl, realm=realm), - method='GET', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs)) + method='GET')) for flow in flow_list: if flow["alias"] == config["alias"]: return flow return None except Exception as e: - self.fail_open_url(e, msg='Could not copy authentication flow %s in realm %s: %s' - % (config["alias"], realm, str(e))) + self.fail_request(e, msg='Could not copy authentication flow %s in realm %s: %s' + % (config["alias"], realm, str(e))) def create_empty_auth_flow(self, config, realm='master'): """ @@ -2158,31 +2164,25 @@ class KeycloakAPI(object): description=config["description"], topLevel=True ) - open_url( + self._request( URL_AUTHENTICATION_FLOWS.format( url=self.baseurl, realm=realm), method='POST', - http_agent=self.http_agent, headers=self.restheaders, - data=json.dumps(new_flow), - timeout=self.connection_timeout, - validate_certs=self.validate_certs) + data=json.dumps(new_flow)) flow_list = json.load( - open_url( + self._request( URL_AUTHENTICATION_FLOWS.format( url=self.baseurl, realm=realm), - method='GET', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs)) + method='GET')) for flow in flow_list: if flow["alias"] == config["alias"]: return flow return None except Exception as e: - self.fail_open_url(e, msg='Could not create empty authentication flow %s in realm %s: %s' - % (config["alias"], realm, str(e))) + self.fail_request(e, msg='Could not create empty authentication flow %s in realm %s: %s' + % (config["alias"], realm, str(e))) def update_authentication_executions(self, flowAlias, updatedExec, realm='master'): """ Update authentication executions @@ -2192,19 +2192,16 @@ class KeycloakAPI(object): :return: HTTPResponse object on success """ try: - open_url( + self._request( URL_AUTHENTICATION_FLOW_EXECUTIONS.format( url=self.baseurl, realm=realm, flowalias=quote(flowAlias, safe='')), method='PUT', - http_agent=self.http_agent, headers=self.restheaders, - data=json.dumps(updatedExec), - timeout=self.connection_timeout, - validate_certs=self.validate_certs) + data=json.dumps(updatedExec)) except HTTPError as e: - self.fail_open_url(e, msg="Unable to update execution '%s': %s: %s %s" - % (flowAlias, repr(e), ";".join([e.url, e.msg, str(e.code), str(e.hdrs)]), str(updatedExec))) + self.fail_request(e, msg="Unable to update execution '%s': %s: %s %s" + % (flowAlias, repr(e), ";".join([e.url, e.msg, str(e.code), str(e.hdrs)]), str(updatedExec))) except Exception as e: self.module.fail_json(msg="Unable to update executions %s: %s" % (updatedExec, str(e))) @@ -2216,18 +2213,15 @@ class KeycloakAPI(object): :return: HTTPResponse object on success """ try: - open_url( + self._request( URL_AUTHENTICATION_EXECUTION_CONFIG.format( url=self.baseurl, realm=realm, id=executionId), method='POST', - http_agent=self.http_agent, headers=self.restheaders, - data=json.dumps(authenticationConfig), - timeout=self.connection_timeout, - validate_certs=self.validate_certs) + data=json.dumps(authenticationConfig)) except Exception as e: - self.fail_open_url(e, msg="Unable to add authenticationConfig %s: %s" % (executionId, str(e))) + self.fail_request(e, msg="Unable to add authenticationConfig %s: %s" % (executionId, str(e))) def create_subflow(self, subflowName, flowAlias, realm='master', flowType='basic-flow'): """ Create new sublow on the flow @@ -2241,18 +2235,15 @@ class KeycloakAPI(object): newSubFlow["alias"] = subflowName newSubFlow["provider"] = "registration-page-form" newSubFlow["type"] = flowType - open_url( + self._request( URL_AUTHENTICATION_FLOW_EXECUTIONS_FLOW.format( url=self.baseurl, realm=realm, flowalias=quote(flowAlias, safe='')), method='POST', - http_agent=self.http_agent, headers=self.restheaders, - data=json.dumps(newSubFlow), - timeout=self.connection_timeout, - validate_certs=self.validate_certs) + data=json.dumps(newSubFlow)) except Exception as e: - self.fail_open_url(e, msg="Unable to create new subflow %s: %s" % (subflowName, str(e))) + self.fail_request(e, msg="Unable to create new subflow %s: %s" % (subflowName, str(e))) def create_execution(self, execution, flowAlias, realm='master'): """ Create new execution on the flow @@ -2265,19 +2256,16 @@ class KeycloakAPI(object): newExec = {} newExec["provider"] = execution["providerId"] newExec["requirement"] = execution["requirement"] - open_url( + self._request( URL_AUTHENTICATION_FLOW_EXECUTIONS_EXECUTION.format( url=self.baseurl, realm=realm, flowalias=quote(flowAlias, safe='')), method='POST', - http_agent=self.http_agent, headers=self.restheaders, - data=json.dumps(newExec), - timeout=self.connection_timeout, - validate_certs=self.validate_certs) + data=json.dumps(newExec)) except HTTPError as e: - self.fail_open_url(e, msg="Unable to create new execution '%s' %s: %s: %s %s" - % (flowAlias, execution["providerId"], repr(e), ";".join([e.url, e.msg, str(e.code), str(e.hdrs)]), str(newExec))) + self.fail_request(e, msg="Unable to create new execution '%s' %s: %s: %s %s" + % (flowAlias, execution["providerId"], repr(e), ";".join([e.url, e.msg, str(e.code), str(e.hdrs)]), str(newExec))) except Exception as e: self.module.fail_json(msg="Unable to create new execution '%s' %s: %s" % (flowAlias, execution["providerId"], repr(e))) @@ -2292,28 +2280,22 @@ class KeycloakAPI(object): try: if diff > 0: for i in range(diff): - open_url( + self._request( URL_AUTHENTICATION_EXECUTION_RAISE_PRIORITY.format( url=self.baseurl, realm=realm, id=executionId), - method='POST', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs) + method='POST') elif diff < 0: for i in range(-diff): - open_url( + self._request( URL_AUTHENTICATION_EXECUTION_LOWER_PRIORITY.format( url=self.baseurl, realm=realm, id=executionId), - method='POST', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs) + method='POST') except Exception as e: - self.fail_open_url(e, msg="Unable to change execution priority %s: %s" % (executionId, str(e))) + self.fail_request(e, msg="Unable to change execution priority %s: %s" % (executionId, str(e))) def get_executions_representation(self, config, realm='master'): """ @@ -2325,33 +2307,27 @@ class KeycloakAPI(object): try: # Get executions created executions = json.load( - open_url( + self._request( URL_AUTHENTICATION_FLOW_EXECUTIONS.format( url=self.baseurl, realm=realm, flowalias=quote(config["alias"], safe='')), - method='GET', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs)) + method='GET')) for execution in executions: if "authenticationConfig" in execution: execConfigId = execution["authenticationConfig"] execConfig = json.load( - open_url( + self._request( URL_AUTHENTICATION_CONFIG.format( url=self.baseurl, realm=realm, id=execConfigId), - method='GET', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs)) + method='GET')) execution["authenticationConfig"] = execConfig return executions except Exception as e: - self.fail_open_url(e, msg='Could not get executions for authentication flow %s in realm %s: %s' - % (config["alias"], realm, str(e))) + self.fail_request(e, msg='Could not get executions for authentication flow %s in realm %s: %s' + % (config["alias"], realm, str(e))) def get_required_actions(self, realm='master'): """ @@ -2362,15 +2338,12 @@ class KeycloakAPI(object): try: required_actions = json.load( - open_url( + self._request( URL_AUTHENTICATION_REQUIRED_ACTIONS.format( url=self.baseurl, realm=realm ), - method='GET', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs + method='GET' ) ) @@ -2392,19 +2365,16 @@ class KeycloakAPI(object): } try: - return open_url( + return self._request( URL_AUTHENTICATION_REGISTER_REQUIRED_ACTION.format( url=self.baseurl, realm=realm ), method='POST', - http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(data), - timeout=self.connection_timeout, - validate_certs=self.validate_certs ) except Exception as e: - self.fail_open_url( + self.fail_request( e, msg='Unable to register required action %s in realm %s: %s' % (rep["name"], realm, str(e)) @@ -2420,20 +2390,17 @@ class KeycloakAPI(object): """ try: - return open_url( + return self._request( URL_AUTHENTICATION_REQUIRED_ACTIONS_ALIAS.format( url=self.baseurl, alias=quote(alias, safe=''), realm=realm ), method='PUT', - http_agent=self.http_agent, headers=self.restheaders, data=json.dumps(rep), - timeout=self.connection_timeout, - validate_certs=self.validate_certs ) except Exception as e: - self.fail_open_url( + self.fail_request( e, msg='Unable to update required action %s in realm %s: %s' % (alias, realm, str(e)) @@ -2448,19 +2415,16 @@ class KeycloakAPI(object): """ try: - return open_url( + return self._request( URL_AUTHENTICATION_REQUIRED_ACTIONS_ALIAS.format( url=self.baseurl, alias=quote(alias, safe=''), realm=realm ), method='DELETE', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs ) except Exception as e: - self.fail_open_url( + self.fail_request( e, msg='Unable to delete required action %s in realm %s: %s' % (alias, realm, str(e)) @@ -2473,14 +2437,13 @@ class KeycloakAPI(object): """ idps_url = URL_IDENTITY_PROVIDERS.format(url=self.baseurl, realm=realm) try: - return json.loads(to_native(open_url(idps_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(idps_url, method='GET') except ValueError as e: self.module.fail_json(msg='API returned incorrect JSON when trying to obtain list of identity providers for realm %s: %s' % (realm, str(e))) except Exception as e: - self.fail_open_url(e, msg='Could not obtain list of identity providers for realm %s: %s' - % (realm, str(e))) + self.fail_request(e, msg='Could not obtain list of identity providers for realm %s: %s' + % (realm, str(e))) def get_identity_provider(self, alias, realm='master'): """ Fetch identity provider representation from a realm using the idp's alias. @@ -2490,14 +2453,13 @@ class KeycloakAPI(object): """ idp_url = URL_IDENTITY_PROVIDER.format(url=self.baseurl, realm=realm, alias=alias) try: - return json.loads(to_native(open_url(idp_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(idp_url, method="GET") except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg='Could not fetch identity provider %s in realm %s: %s' - % (alias, realm, str(e))) + self.fail_request(e, msg='Could not fetch identity provider %s in realm %s: %s' + % (alias, realm, str(e))) except Exception as e: self.module.fail_json(msg='Could not fetch identity provider %s in realm %s: %s' % (alias, realm, str(e))) @@ -2510,11 +2472,10 @@ class KeycloakAPI(object): """ idps_url = URL_IDENTITY_PROVIDERS.format(url=self.baseurl, realm=realm) try: - return open_url(idps_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(idprep), validate_certs=self.validate_certs) + return self._request(idps_url, method='POST', data=json.dumps(idprep)) except Exception as e: - self.fail_open_url(e, msg='Could not create identity provider %s in realm %s: %s' - % (idprep['alias'], realm, str(e))) + self.fail_request(e, msg='Could not create identity provider %s in realm %s: %s' + % (idprep['alias'], realm, str(e))) def update_identity_provider(self, idprep, realm='master'): """ Update an existing identity provider. @@ -2524,11 +2485,10 @@ class KeycloakAPI(object): """ idp_url = URL_IDENTITY_PROVIDER.format(url=self.baseurl, realm=realm, alias=idprep['alias']) try: - return open_url(idp_url, method='PUT', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(idprep), validate_certs=self.validate_certs) + return self._request(idp_url, method='PUT', data=json.dumps(idprep)) except Exception as e: - self.fail_open_url(e, msg='Could not update identity provider %s in realm %s: %s' - % (idprep['alias'], realm, str(e))) + self.fail_request(e, msg='Could not update identity provider %s in realm %s: %s' + % (idprep['alias'], realm, str(e))) def delete_identity_provider(self, alias, realm='master'): """ Delete an identity provider. @@ -2537,11 +2497,10 @@ class KeycloakAPI(object): """ idp_url = URL_IDENTITY_PROVIDER.format(url=self.baseurl, realm=realm, alias=alias) try: - return open_url(idp_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(idp_url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Unable to delete identity provider %s in realm %s: %s' - % (alias, realm, str(e))) + self.fail_request(e, msg='Unable to delete identity provider %s in realm %s: %s' + % (alias, realm, str(e))) def get_identity_provider_mappers(self, alias, realm='master'): """ Fetch representations for identity provider mappers @@ -2551,15 +2510,13 @@ class KeycloakAPI(object): """ mappers_url = URL_IDENTITY_PROVIDER_MAPPERS.format(url=self.baseurl, realm=realm, alias=alias) try: - return json.loads(to_native(open_url(mappers_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(mappers_url, method='GET') except ValueError as e: self.module.fail_json(msg='API returned incorrect JSON when trying to obtain list of identity provider mappers for idp %s in realm %s: %s' % (alias, realm, str(e))) except Exception as e: - self.fail_open_url(e, msg='Could not obtain list of identity provider mappers for idp %s in realm %s: %s' - % (alias, realm, str(e))) + self.fail_request(e, msg='Could not obtain list of identity provider mappers for idp %s in realm %s: %s' + % (alias, realm, str(e))) def get_identity_provider_mapper(self, mid, alias, realm='master'): """ Fetch identity provider representation from a realm using the idp's alias. @@ -2570,15 +2527,13 @@ class KeycloakAPI(object): """ mapper_url = URL_IDENTITY_PROVIDER_MAPPER.format(url=self.baseurl, realm=realm, alias=alias, id=mid) try: - return json.loads(to_native(open_url(mapper_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(mapper_url, method="GET") except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg='Could not fetch mapper %s for identity provider %s in realm %s: %s' - % (mid, alias, realm, str(e))) + self.fail_request(e, msg='Could not fetch mapper %s for identity provider %s in realm %s: %s' + % (mid, alias, realm, str(e))) except Exception as e: self.module.fail_json(msg='Could not fetch mapper %s for identity provider %s in realm %s: %s' % (mid, alias, realm, str(e))) @@ -2592,11 +2547,10 @@ class KeycloakAPI(object): """ mappers_url = URL_IDENTITY_PROVIDER_MAPPERS.format(url=self.baseurl, realm=realm, alias=alias) try: - return open_url(mappers_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(mapper), validate_certs=self.validate_certs) + return self._request(mappers_url, method='POST', data=json.dumps(mapper)) except Exception as e: - self.fail_open_url(e, msg='Could not create identity provider mapper %s for idp %s in realm %s: %s' - % (mapper['name'], alias, realm, str(e))) + self.fail_request(e, msg='Could not create identity provider mapper %s for idp %s in realm %s: %s' + % (mapper['name'], alias, realm, str(e))) def update_identity_provider_mapper(self, mapper, alias, realm='master'): """ Update an existing identity provider. @@ -2607,11 +2561,10 @@ class KeycloakAPI(object): """ mapper_url = URL_IDENTITY_PROVIDER_MAPPER.format(url=self.baseurl, realm=realm, alias=alias, id=mapper['id']) try: - return open_url(mapper_url, method='PUT', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(mapper), validate_certs=self.validate_certs) + return self._request(mapper_url, method='PUT', data=json.dumps(mapper)) except Exception as e: - self.fail_open_url(e, msg='Could not update mapper %s for identity provider %s in realm %s: %s' - % (mapper['id'], alias, realm, str(e))) + self.fail_request(e, msg='Could not update mapper %s for identity provider %s in realm %s: %s' + % (mapper['id'], alias, realm, str(e))) def delete_identity_provider_mapper(self, mid, alias, realm='master'): """ Delete an identity provider. @@ -2621,11 +2574,10 @@ class KeycloakAPI(object): """ mapper_url = URL_IDENTITY_PROVIDER_MAPPER.format(url=self.baseurl, realm=realm, alias=alias, id=mid) try: - return open_url(mapper_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(mapper_url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Unable to delete mapper %s for identity provider %s in realm %s: %s' - % (mid, alias, realm, str(e))) + self.fail_request(e, msg='Unable to delete mapper %s for identity provider %s in realm %s: %s' + % (mid, alias, realm, str(e))) def get_components(self, filter=None, realm='master'): """ Fetch representations for components in a realm @@ -2638,14 +2590,13 @@ class KeycloakAPI(object): comps_url += '?%s' % filter try: - return json.loads(to_native(open_url(comps_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(comps_url, method='GET') except ValueError as e: self.module.fail_json(msg='API returned incorrect JSON when trying to obtain list of components for realm %s: %s' % (realm, str(e))) except Exception as e: - self.fail_open_url(e, msg='Could not obtain list of components for realm %s: %s' - % (realm, str(e))) + self.fail_request(e, msg='Could not obtain list of components for realm %s: %s' + % (realm, str(e))) def get_component(self, cid, realm='master'): """ Fetch component representation from a realm using its cid. @@ -2655,14 +2606,13 @@ class KeycloakAPI(object): """ comp_url = URL_COMPONENT.format(url=self.baseurl, realm=realm, id=cid) try: - return json.loads(to_native(open_url(comp_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(comp_url, method="GET") except HTTPError as e: if e.code == 404: return None else: - self.fail_open_url(e, msg='Could not fetch component %s in realm %s: %s' - % (cid, realm, str(e))) + self.fail_request(e, msg='Could not fetch component %s in realm %s: %s' + % (cid, realm, str(e))) except Exception as e: self.module.fail_json(msg='Could not fetch component %s in realm %s: %s' % (cid, realm, str(e))) @@ -2675,17 +2625,15 @@ class KeycloakAPI(object): """ comps_url = URL_COMPONENTS.format(url=self.baseurl, realm=realm) try: - resp = open_url(comps_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(comprep), validate_certs=self.validate_certs) + resp = self._request(comps_url, method='POST', data=json.dumps(comprep)) comp_url = resp.getheader('Location') if comp_url is None: self.module.fail_json(msg='Could not create component in realm %s: %s' % (realm, 'unexpected response')) - return json.loads(to_native(open_url(comp_url, method="GET", http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(comp_url, method="GET") except Exception as e: - self.fail_open_url(e, msg='Could not create component in realm %s: %s' - % (realm, str(e))) + self.fail_request(e, msg='Could not create component in realm %s: %s' + % (realm, str(e))) def update_component(self, comprep, realm='master'): """ Update an existing component. @@ -2698,11 +2646,10 @@ class KeycloakAPI(object): self.module.fail_json(msg='Cannot update component without id') comp_url = URL_COMPONENT.format(url=self.baseurl, realm=realm, id=cid) try: - return open_url(comp_url, method='PUT', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(comprep), validate_certs=self.validate_certs) + return self._request(comp_url, method='PUT', data=json.dumps(comprep)) except Exception as e: - self.fail_open_url(e, msg='Could not update component %s in realm %s: %s' - % (cid, realm, str(e))) + self.fail_request(e, msg='Could not update component %s in realm %s: %s' + % (cid, realm, str(e))) def delete_component(self, cid, realm='master'): """ Delete an component. @@ -2711,20 +2658,17 @@ class KeycloakAPI(object): """ comp_url = URL_COMPONENT.format(url=self.baseurl, realm=realm, id=cid) try: - return open_url(comp_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(comp_url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Unable to delete component %s in realm %s: %s' - % (cid, realm, str(e))) + self.fail_request(e, msg='Unable to delete component %s in realm %s: %s' + % (cid, realm, str(e))) def get_authz_authorization_scope_by_name(self, name, client_id, realm): url = URL_AUTHZ_AUTHORIZATION_SCOPES.format(url=self.baseurl, client_id=client_id, realm=realm) search_url = "%s/search?name=%s" % (url, quote(name, safe='')) try: - return json.loads(to_native(open_url(search_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(search_url, method='GET') except Exception: return False @@ -2733,30 +2677,27 @@ class KeycloakAPI(object): url = URL_AUTHZ_AUTHORIZATION_SCOPES.format(url=self.baseurl, client_id=client_id, realm=realm) try: - return open_url(url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(payload), validate_certs=self.validate_certs) + return self._request(url, method='POST', data=json.dumps(payload)) except Exception as e: - self.fail_open_url(e, msg='Could not create authorization scope %s for client %s in realm %s: %s' % (payload['name'], client_id, realm, str(e))) + self.fail_request(e, msg='Could not create authorization scope %s for client %s in realm %s: %s' % (payload['name'], client_id, realm, str(e))) def update_authz_authorization_scope(self, payload, id, client_id, realm): """Update an authorization scope for a Keycloak client""" url = URL_AUTHZ_AUTHORIZATION_SCOPE.format(url=self.baseurl, id=id, client_id=client_id, realm=realm) try: - return open_url(url, method='PUT', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(payload), validate_certs=self.validate_certs) + return self._request(url, method='PUT', data=json.dumps(payload)) except Exception as e: - self.fail_open_url(e, msg='Could not create update scope %s for client %s in realm %s: %s' % (payload['name'], client_id, realm, str(e))) + self.fail_request(e, msg='Could not create update scope %s for client %s in realm %s: %s' % (payload['name'], client_id, realm, str(e))) def remove_authz_authorization_scope(self, id, client_id, realm): """Remove an authorization scope from a Keycloak client""" url = URL_AUTHZ_AUTHORIZATION_SCOPE.format(url=self.baseurl, id=id, client_id=client_id, realm=realm) try: - return open_url(url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Could not delete scope %s for client %s in realm %s: %s' % (id, client_id, realm, str(e))) + self.fail_request(e, msg='Could not delete scope %s for client %s in realm %s: %s' % (id, client_id, realm, str(e))) def get_user_by_id(self, user_id, realm='master'): """ @@ -2771,16 +2712,13 @@ class KeycloakAPI(object): realm=realm, id=user_id) userrep = json.load( - open_url( + self._request( user_url, - method='GET', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs)) + method='GET')) return userrep except Exception as e: - self.fail_open_url(e, msg='Could not get user %s in realm %s: %s' - % (user_id, realm, str(e))) + self.fail_request(e, msg='Could not get user %s in realm %s: %s' + % (user_id, realm, str(e))) def create_user(self, userrep, realm='master'): """ @@ -2796,19 +2734,16 @@ class KeycloakAPI(object): users_url = URL_USERS.format( url=self.baseurl, realm=realm) - open_url(users_url, - method='POST', - http_agent=self.http_agent, headers=self.restheaders, - data=json.dumps(userrep), - timeout=self.connection_timeout, - validate_certs=self.validate_certs) + self._request(users_url, + method='POST', + data=json.dumps(userrep)) created_user = self.get_user_by_username( username=userrep['username'], realm=realm) return created_user except Exception as e: - self.fail_open_url(e, msg='Could not create user %s in realm %s: %s' - % (userrep['username'], realm, str(e))) + self.fail_request(e, msg='Could not create user %s in realm %s: %s' + % (userrep['username'], realm, str(e))) def convert_user_attributes_to_keycloak_dict(self, attributes): keycloak_user_attributes_dict = {} @@ -2841,20 +2776,17 @@ class KeycloakAPI(object): url=self.baseurl, realm=realm, id=userrep["id"]) - open_url( + self._request( user_url, method='PUT', - http_agent=self.http_agent, headers=self.restheaders, - data=json.dumps(userrep), - timeout=self.connection_timeout, - validate_certs=self.validate_certs) + data=json.dumps(userrep)) updated_user = self.get_user_by_id( user_id=userrep['id'], realm=realm) return updated_user except Exception as e: - self.fail_open_url(e, msg='Could not update user %s in realm %s: %s' - % (userrep['username'], realm, str(e))) + self.fail_request(e, msg='Could not update user %s in realm %s: %s' + % (userrep['username'], realm, str(e))) def delete_user(self, user_id, realm='master'): """ @@ -2868,15 +2800,12 @@ class KeycloakAPI(object): url=self.baseurl, realm=realm, id=user_id) - return open_url( + return self._request( user_url, - method='DELETE', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs) + method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Could not delete user %s in realm %s: %s' - % (user_id, realm, str(e))) + self.fail_request(e, msg='Could not delete user %s in realm %s: %s' + % (user_id, realm, str(e))) def get_user_groups(self, user_id, realm='master'): """ @@ -2892,18 +2821,15 @@ class KeycloakAPI(object): realm=realm, id=user_id) user_groups = json.load( - open_url( + self._request( user_groups_url, - method='GET', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs)) + method='GET')) for user_group in user_groups: groups.append(user_group["name"]) return groups except Exception as e: - self.fail_open_url(e, msg='Could not get groups for user %s in realm %s: %s' - % (user_id, realm, str(e))) + self.fail_request(e, msg='Could not get groups for user %s in realm %s: %s' + % (user_id, realm, str(e))) def add_user_in_group(self, user_id, group_id, realm='master'): """ @@ -2919,15 +2845,12 @@ class KeycloakAPI(object): realm=realm, id=user_id, group_id=group_id) - return open_url( + return self._request( user_group_url, - method='PUT', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs) + method='PUT') except Exception as e: - self.fail_open_url(e, msg='Could not add user %s in group %s in realm %s: %s' - % (user_id, group_id, realm, str(e))) + self.fail_request(e, msg='Could not add user %s in group %s in realm %s: %s' + % (user_id, group_id, realm, str(e))) def remove_user_from_group(self, user_id, group_id, realm='master'): """ @@ -2943,15 +2866,12 @@ class KeycloakAPI(object): realm=realm, id=user_id, group_id=group_id) - return open_url( + return self._request( user_group_url, - method='DELETE', - http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs) + method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Could not remove user %s from group %s in realm %s: %s' - % (user_id, group_id, realm, str(e))) + self.fail_request(e, msg='Could not remove user %s from group %s in realm %s: %s' + % (user_id, group_id, realm, str(e))) def update_user_groups_membership(self, userrep, groups, realm='master'): """ @@ -3019,10 +2939,9 @@ class KeycloakAPI(object): url = URL_AUTHZ_CUSTOM_POLICY.format(url=self.baseurl, policy_type=policy_type, client_id=client_id, realm=realm) try: - return open_url(url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(payload), validate_certs=self.validate_certs) + return self._request(url, method='POST', data=json.dumps(payload)) except Exception as e: - self.fail_open_url(e, msg='Could not create permission %s for client %s in realm %s: %s' % (payload['name'], client_id, realm, str(e))) + self.fail_request(e, msg='Could not create permission %s for client %s in realm %s: %s' % (payload['name'], client_id, realm, str(e))) def remove_authz_custom_policy(self, policy_id, client_id, realm): """Remove a custom policy from a Keycloak client""" @@ -3030,10 +2949,9 @@ class KeycloakAPI(object): delete_url = "%s/%s" % (url, policy_id) try: - return open_url(delete_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(delete_url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Could not delete custom policy %s for client %s in realm %s: %s' % (id, client_id, realm, str(e))) + self.fail_request(e, msg='Could not delete custom policy %s for client %s in realm %s: %s' % (id, client_id, realm, str(e))) def get_authz_permission_by_name(self, name, client_id, realm): """Get authorization permission by name""" @@ -3041,9 +2959,7 @@ class KeycloakAPI(object): search_url = "%s/search?name=%s" % (url, name.replace(' ', '%20')) try: - return json.loads(to_native(open_url(search_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(search_url, method='GET') except Exception: return False @@ -3052,30 +2968,27 @@ class KeycloakAPI(object): url = URL_AUTHZ_PERMISSIONS.format(url=self.baseurl, permission_type=permission_type, client_id=client_id, realm=realm) try: - return open_url(url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(payload), validate_certs=self.validate_certs) + return self._request(url, method='POST', data=json.dumps(payload)) except Exception as e: - self.fail_open_url(e, msg='Could not create permission %s for client %s in realm %s: %s' % (payload['name'], client_id, realm, str(e))) + self.fail_request(e, msg='Could not create permission %s for client %s in realm %s: %s' % (payload['name'], client_id, realm, str(e))) def remove_authz_permission(self, id, client_id, realm): """Create an authorization permission for a Keycloak client""" url = URL_AUTHZ_POLICY.format(url=self.baseurl, id=id, client_id=client_id, realm=realm) try: - return open_url(url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - validate_certs=self.validate_certs) + return self._request(url, method='DELETE') except Exception as e: - self.fail_open_url(e, msg='Could not delete permission %s for client %s in realm %s: %s' % (id, client_id, realm, str(e))) + self.fail_request(e, msg='Could not delete permission %s for client %s in realm %s: %s' % (id, client_id, realm, str(e))) def update_authz_permission(self, payload, permission_type, id, client_id, realm): """Update a permission for a Keycloak client""" url = URL_AUTHZ_PERMISSION.format(url=self.baseurl, permission_type=permission_type, id=id, client_id=client_id, realm=realm) try: - return open_url(url, method='PUT', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(payload), validate_certs=self.validate_certs) + return self._request(url, method='PUT', data=json.dumps(payload)) except Exception as e: - self.fail_open_url(e, msg='Could not create update permission %s for client %s in realm %s: %s' % (payload['name'], client_id, realm, str(e))) + self.fail_request(e, msg='Could not create update permission %s for client %s in realm %s: %s' % (payload['name'], client_id, realm, str(e))) def create_authz_resource(self, payload, client_id, realm): """Create an authorization resource for a Keycloak client""" @@ -3093,9 +3006,7 @@ class KeycloakAPI(object): search_url = "%s/search?name=%s" % (url, name.replace(' ', '%20')) try: - return json.loads(to_native(open_url(search_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(search_url, method='GET') except Exception: return False @@ -3125,9 +3036,7 @@ class KeycloakAPI(object): search_url = "%s/search?name=%s&permission=false" % (url, name.replace(' ', '%20')) try: - return json.loads(to_native(open_url(search_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(search_url, method='GET') except Exception: return False @@ -3140,11 +3049,9 @@ class KeycloakAPI(object): """ client_role_scope_url = URL_CLIENT_ROLE_SCOPE_CLIENTS.format(url=self.baseurl, realm=realm, id=clientid, scopeid=clientscopeid) try: - return json.loads(to_native(open_url(client_role_scope_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(client_role_scope_url, method='GET') except Exception as e: - self.fail_open_url(e, msg='Could not fetch roles scope for client %s in realm %s: %s' % (clientid, realm, str(e))) + self.fail_request(e, msg='Could not fetch roles scope for client %s in realm %s: %s' % (clientid, realm, str(e))) def update_client_role_scope_from_client(self, payload, clientid, clientscopeid, realm="master"): """ Update and fetch the roles associated with the client's scope on the Keycloak server. @@ -3156,11 +3063,10 @@ class KeycloakAPI(object): """ client_role_scope_url = URL_CLIENT_ROLE_SCOPE_CLIENTS.format(url=self.baseurl, realm=realm, id=clientid, scopeid=clientscopeid) try: - open_url(client_role_scope_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(payload), validate_certs=self.validate_certs) + self._request(client_role_scope_url, method='POST', data=json.dumps(payload)) except Exception as e: - self.fail_open_url(e, msg='Could not update roles scope for client %s in realm %s: %s' % (clientid, realm, str(e))) + self.fail_request(e, msg='Could not update roles scope for client %s in realm %s: %s' % (clientid, realm, str(e))) return self.get_client_role_scope_from_client(clientid, clientscopeid, realm) @@ -3174,11 +3080,10 @@ class KeycloakAPI(object): """ client_role_scope_url = URL_CLIENT_ROLE_SCOPE_CLIENTS.format(url=self.baseurl, realm=realm, id=clientid, scopeid=clientscopeid) try: - open_url(client_role_scope_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(payload), validate_certs=self.validate_certs) + self._request(client_role_scope_url, method='DELETE', data=json.dumps(payload)) except Exception as e: - self.fail_open_url(e, msg='Could not delete roles scope for client %s in realm %s: %s' % (clientid, realm, str(e))) + self.fail_request(e, msg='Could not delete roles scope for client %s in realm %s: %s' % (clientid, realm, str(e))) return self.get_client_role_scope_from_client(clientid, clientscopeid, realm) @@ -3190,11 +3095,9 @@ class KeycloakAPI(object): """ client_role_scope_url = URL_CLIENT_ROLE_SCOPE_REALM.format(url=self.baseurl, realm=realm, id=clientid) try: - return json.loads(to_native(open_url(client_role_scope_url, method='GET', http_agent=self.http_agent, headers=self.restheaders, - timeout=self.connection_timeout, - validate_certs=self.validate_certs).read())) + return self._request_and_deserialize(client_role_scope_url, method='GET') except Exception as e: - self.fail_open_url(e, msg='Could not fetch roles scope for client %s in realm %s: %s' % (clientid, realm, str(e))) + self.fail_request(e, msg='Could not fetch roles scope for client %s in realm %s: %s' % (clientid, realm, str(e))) def update_client_role_scope_from_realm(self, payload, clientid, realm="master"): """ Update and fetch the realm roles from the client's scope on the Keycloak server. @@ -3205,11 +3108,10 @@ class KeycloakAPI(object): """ client_role_scope_url = URL_CLIENT_ROLE_SCOPE_REALM.format(url=self.baseurl, realm=realm, id=clientid) try: - open_url(client_role_scope_url, method='POST', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(payload), validate_certs=self.validate_certs) + self._request(client_role_scope_url, method='POST', data=json.dumps(payload)) except Exception as e: - self.fail_open_url(e, msg='Could not update roles scope for client %s in realm %s: %s' % (clientid, realm, str(e))) + self.fail_request(e, msg='Could not update roles scope for client %s in realm %s: %s' % (clientid, realm, str(e))) return self.get_client_role_scope_from_realm(clientid, realm) @@ -3222,18 +3124,42 @@ class KeycloakAPI(object): """ client_role_scope_url = URL_CLIENT_ROLE_SCOPE_REALM.format(url=self.baseurl, realm=realm, id=clientid) try: - open_url(client_role_scope_url, method='DELETE', http_agent=self.http_agent, headers=self.restheaders, timeout=self.connection_timeout, - data=json.dumps(payload), validate_certs=self.validate_certs) + self._request(client_role_scope_url, method='DELETE', data=json.dumps(payload)) except Exception as e: - self.fail_open_url(e, msg='Could not delete roles scope for client %s in realm %s: %s' % (clientid, realm, str(e))) + self.fail_request(e, msg='Could not delete roles scope for client %s in realm %s: %s' % (clientid, realm, str(e))) return self.get_client_role_scope_from_realm(clientid, realm) - def fail_open_url(self, e, msg, **kwargs): + def fail_request(self, e, msg, **kwargs): + """ Triggers a module failure. This should be called + when an exception occurs during/after a request. + Attempts to parse the exception e as an HTTP error + and append it to msg. + + :param e: exception which triggered the failure + :param msg: error message to display to the user + :param kwargs: additional arguments to pass to module.fail_json + :return: None + """ try: if isinstance(e, HTTPError): msg = "%s: %s" % (msg, to_native(e.read())) - except Exception as ingore: + except Exception: pass self.module.fail_json(msg, **kwargs) + + def fail_open_url(self, e, msg, **kwargs): + """ DEPRECATED: Use fail_request instead. + + Triggers a module failure. This should be called + when an exception occurs during/after a request. + Attempts to parse the exception e as an HTTP error + and append it to msg. + + :param e: exception which triggered the failure + :param msg: error message to display to the user + :param kwargs: additional arguments to pass to module.fail_json + :return: None + """ + return self.fail_request(e, msg, **kwargs) diff --git a/plugins/module_utils/known_hosts.py b/plugins/module_utils/known_hosts.py index 25dd3e174e..9a17355b4e 100644 --- a/plugins/module_utils/known_hosts.py +++ b/plugins/module_utils/known_hosts.py @@ -103,13 +103,11 @@ def not_in_host_file(self, host): continue try: - host_fh = open(hf) + with open(hf) as host_fh: + data = host_fh.read() except IOError: hfiles_not_found += 1 continue - else: - data = host_fh.read() - host_fh.close() for line in data.split("\n"): if line is None or " " not in line: diff --git a/plugins/module_utils/mh/base.py b/plugins/module_utils/mh/base.py index b10762eaba..cf054f59fd 100644 --- a/plugins/module_utils/mh/base.py +++ b/plugins/module_utils/mh/base.py @@ -15,6 +15,7 @@ from ansible_collections.community.general.plugins.module_utils.mh.deco import m class ModuleHelperBase(object): module = None ModuleHelperException = _MHE + # in 12.0.0 add 'debug' to the tuple _delegated_to_module = ( 'check_mode', 'get_bin_path', 'warn', 'deprecate', ) @@ -28,6 +29,18 @@ class ModuleHelperBase(object): if not isinstance(self.module, AnsibleModule): self.module = AnsibleModule(**self.module) + # in 12.0.0 remove this if statement entirely + if hasattr(self, 'debug'): + msg = ( + "This class ({cls}) has an attribute 'debug' defined and that is deprecated. " + "Method 'debug' will be an integral part of ModuleHelper in community.general " + "12.0.0, delegated to the underlying AnsibleModule object. " + "Please rename the existing attribute to prevent this message from showing.".format(cls=self.__class__.__name__) + ) + self.deprecate(msg, version="12.0.0", collection_name="community.general") + else: + self._delegated_to_module = self._delegated_to_module + ('debug',) + @property def diff_mode(self): return self.module._diff diff --git a/plugins/module_utils/proxmox.py b/plugins/module_utils/proxmox.py index b0037dacb3..bdd575ac26 100644 --- a/plugins/module_utils/proxmox.py +++ b/plugins/module_utils/proxmox.py @@ -144,7 +144,7 @@ class ProxmoxAnsible(object): return None self.module.fail_json(msg='No VM with name %s found' % name) - elif len(vms) > 1: + elif len(vms) > 1 and not choose_first_if_multiple: self.module.fail_json(msg='Multiple VMs with name %s found, provide vmid instead' % name) return vms[0] @@ -170,6 +170,15 @@ class ProxmoxAnsible(object): except Exception as e: self.module.fail_json(msg='Unable to retrieve API task ID from node %s: %s' % (node, e)) + def api_task_failed(self, node, taskid): + """ Explicitly check if the task stops but exits with a failed status + """ + try: + status = self.proxmox_api.nodes(node).tasks(taskid).status.get() + return status['status'] == 'stopped' and status['exitstatus'] != 'OK' + except Exception as e: + self.module.fail_json(msg='Unable to retrieve API task ID from node %s: %s' % (node, e)) + def api_task_complete(self, node_name, task_id, timeout): """Wait until the task stops or times out. diff --git a/plugins/module_utils/snap.py b/plugins/module_utils/snap.py index 253269b9a9..e55a3a13a5 100644 --- a/plugins/module_utils/snap.py +++ b/plugins/module_utils/snap.py @@ -41,8 +41,15 @@ def snap_runner(module, **kwargs): options=cmd_runner_fmt.as_list(), info=cmd_runner_fmt.as_fixed("info"), dangerous=cmd_runner_fmt.as_bool("--dangerous"), + version=cmd_runner_fmt.as_fixed("version"), ), check_rc=False, **kwargs ) return runner + + +def get_version(runner): + with runner("version") as ctx: + rc, out, err = ctx.run() + return dict(x.split() for x in out.splitlines() if len(x.split()) == 2) diff --git a/plugins/modules/apache2_mod_proxy.py b/plugins/modules/apache2_mod_proxy.py index f70294bad1..a5a33d1add 100644 --- a/plugins/modules/apache2_mod_proxy.py +++ b/plugins/modules/apache2_mod_proxy.py @@ -40,14 +40,18 @@ options: type: str description: - (IPv4|IPv6|FQDN) of the balancer member to get or to set attributes to. Port number is autodetected and should not - be specified here. If undefined, apache2_mod_proxy module will return a members list of dictionaries of all the current + be specified here. + - If undefined, the M(community.general.apache2_mod_proxy) module will return a members list of dictionaries of all the current balancer pool members' attributes. state: - type: str + type: list + elements: str + choices: [present, absent, enabled, disabled, drained, hot_standby, ignore_errors] description: - - Desired state of the member host. (absent|disabled),drained,hot_standby,ignore_errors can be simultaneously invoked - by separating them with a comma (for example V(state=drained,ignore_errors)). - - 'Accepted state values: [V(present), V(absent), V(enabled), V(disabled), V(drained), V(hot_standby), V(ignore_errors)].' + - Desired state of the member host. + - States can be simultaneously invoked by separating them with a comma (for example V(state=drained,ignore_errors)), + but it is recommended to specify them as a proper YAML list. + - States V(present) and V(absent) must be used without any other state. tls: description: - Use https to access balancer management page. @@ -109,7 +113,7 @@ EXAMPLES = r""" RETURN = r""" member: - description: specific balancer member information dictionary, returned when apache2_mod_proxy module is invoked with C(member_host) parameter. + description: Specific balancer member information dictionary, returned when the module is invoked with O(member_host) parameter. type: dict returned: success sample: @@ -140,7 +144,8 @@ member: } } members: - description: list of member (defined above) dictionaries, returned when apache2_mod_proxy is invoked with no C(member_host) and state args. + description: List of member (defined above) dictionaries, returned when the module is invoked with no O(member_host) and + O(state) args. returned: success type: list sample: @@ -199,33 +204,27 @@ members: """ import re -import traceback -from ansible.module_utils.basic import AnsibleModule, missing_required_lib +from ansible_collections.community.general.plugins.module_utils import deps +from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.urls import fetch_url from ansible.module_utils.six import iteritems -BEAUTIFUL_SOUP_IMP_ERR = None -try: +with deps.declare("BeautifulSoup"): from BeautifulSoup import BeautifulSoup -except ImportError: - BEAUTIFUL_SOUP_IMP_ERR = traceback.format_exc() - HAS_BEAUTIFULSOUP = False -else: - HAS_BEAUTIFULSOUP = True # balancer member attributes extraction regexp: -EXPRESSION = r"(b=([\w\.\-]+)&w=(https?|ajp|wss?|ftp|[sf]cgi)://([\w\.\-]+):?(\d*)([/\w\.\-]*)&?[\w\-\=]*)" +EXPRESSION = re.compile(r"(b=([\w\.\-]+)&w=(https?|ajp|wss?|ftp|[sf]cgi)://([\w\.\-]+):?(\d*)([/\w\.\-]*)&?[\w\-\=]*)") # Apache2 server version extraction regexp: -APACHE_VERSION_EXPRESSION = r"SERVER VERSION: APACHE/([\d.]+)" +APACHE_VERSION_EXPRESSION = re.compile(r"SERVER VERSION: APACHE/([\d.]+)") def regexp_extraction(string, _regexp, groups=1): """ Returns the capture group (default=1) specified in the regexp, applied to the string """ - regexp_search = re.search(string=str(string), pattern=str(_regexp)) + regexp_search = _regexp.search(string) if regexp_search: if regexp_search.group(groups) != '': - return str(regexp_search.group(groups)) + return regexp_search.group(groups) return None @@ -246,26 +245,26 @@ class BalancerMember(object): """ def __init__(self, management_url, balancer_url, module): - self.host = regexp_extraction(management_url, str(EXPRESSION), 4) - self.management_url = str(management_url) + self.host = regexp_extraction(management_url, EXPRESSION, 4) + self.management_url = management_url self.protocol = regexp_extraction(management_url, EXPRESSION, 3) self.port = regexp_extraction(management_url, EXPRESSION, 5) self.path = regexp_extraction(management_url, EXPRESSION, 6) - self.balancer_url = str(balancer_url) + self.balancer_url = balancer_url self.module = module def get_member_attributes(self): """ Returns a dictionary of a balancer member's attributes.""" - balancer_member_page = fetch_url(self.module, self.management_url) + resp, info = fetch_url(self.module, self.management_url) - if balancer_member_page[1]['status'] != 200: - self.module.fail_json(msg="Could not get balancer_member_page, check for connectivity! " + balancer_member_page[1]) + if info['status'] != 200: + self.module.fail_json(msg="Could not get balancer_member_page, check for connectivity! {0}".format(info)) else: try: - soup = BeautifulSoup(balancer_member_page[0]) + soup = BeautifulSoup(resp) except TypeError as exc: - self.module.fail_json(msg="Cannot parse balancer_member_page HTML! " + str(exc)) + self.module.fail_json(msg="Cannot parse balancer_member_page HTML! {0}".format(exc)) else: subsoup = soup.findAll('table')[1].findAll('tr') keys = subsoup[0].findAll('th') @@ -280,7 +279,7 @@ class BalancerMember(object): 'drained': 'Drn', 'hot_standby': 'Stby', 'ignore_errors': 'Ign'} - actual_status = str(self.attributes['Status']) + actual_status = self.attributes['Status'] status = {mode: patt in actual_status for mode, patt in iteritems(status_mapping)} return status @@ -292,60 +291,70 @@ class BalancerMember(object): 'ignore_errors': '&w_status_I'} request_body = regexp_extraction(self.management_url, EXPRESSION, 1) - values_url = "".join("{0}={1}".format(url_param, 1 if values[mode] else 0) for mode, url_param in iteritems(values_mapping)) + values_url = "".join("{0}={1}".format(url_param, 1 if values[mode] else 0) for mode, url_param in values_mapping.items()) request_body = "{0}{1}".format(request_body, values_url) - response = fetch_url(self.module, self.management_url, data=request_body) - if response[1]['status'] != 200: - self.module.fail_json(msg="Could not set the member status! " + self.host + " " + response[1]['status']) + response, info = fetch_url(self.module, self.management_url, data=request_body) + if info['status'] != 200: + self.module.fail_json(msg="Could not set the member status! " + self.host + " " + info['status']) attributes = property(get_member_attributes) status = property(get_member_status, set_member_status) + def as_dict(self): + return { + "host": self.host, + "status": self.status, + "protocol": self.protocol, + "port": self.port, + "path": self.path, + "attributes": self.attributes, + "management_url": self.management_url, + "balancer_url": self.balancer_url + } + class Balancer(object): """ Apache httpd 2.4 mod_proxy balancer object""" - def __init__(self, host, suffix, module, members=None, tls=False): + def __init__(self, host, suffix, module, tls=False): if tls: - self.base_url = 'https://' + str(host) - self.url = 'https://' + str(host) + str(suffix) + self.base_url = 'https://{0}'.format(host) + self.url = 'https://{0}{1}'.format(host, suffix) else: - self.base_url = 'http://' + str(host) - self.url = 'http://' + str(host) + str(suffix) + self.base_url = 'http://{0}'.format(host) + self.url = 'http://{0}{1}'.format(host, suffix) self.module = module self.page = self.fetch_balancer_page() - if members is None: - self._members = [] def fetch_balancer_page(self): """ Returns the balancer management html page as a string for later parsing.""" - page = fetch_url(self.module, str(self.url)) - if page[1]['status'] != 200: - self.module.fail_json(msg="Could not get balancer page! HTTP status response: " + str(page[1]['status'])) + resp, info = fetch_url(self.module, self.url) + if info['status'] != 200: + self.module.fail_json(msg="Could not get balancer page! HTTP status response: {0}".format(info['status'])) else: - content = page[0].read() + content = resp.read() apache_version = regexp_extraction(content.upper(), APACHE_VERSION_EXPRESSION, 1) if apache_version: if not re.search(pattern=r"2\.4\.[\d]*", string=apache_version): self.module.fail_json(msg="This module only acts on an Apache2 2.4+ instance, current Apache2 version: " + str(apache_version)) return content - else: - self.module.fail_json(msg="Could not get the Apache server version from the balancer-manager") + + self.module.fail_json(msg="Could not get the Apache server version from the balancer-manager") def get_balancer_members(self): """ Returns members of the balancer as a generator object for later iteration.""" try: soup = BeautifulSoup(self.page) except TypeError: - self.module.fail_json(msg="Cannot parse balancer page HTML! " + str(self.page)) + self.module.fail_json(msg="Cannot parse balancer page HTML! {0}".format(self.page)) else: for element in soup.findAll('a')[1::1]: balancer_member_suffix = str(element.get('href')) if not balancer_member_suffix: self.module.fail_json(msg="Argument 'balancer_member_suffix' is empty!") else: - yield BalancerMember(str(self.base_url + balancer_member_suffix), str(self.url), self.module) + yield BalancerMember(self.base_url + balancer_member_suffix, self.url, self.module) members = property(get_balancer_members) @@ -357,26 +366,19 @@ def main(): balancer_vhost=dict(required=True, type='str'), balancer_url_suffix=dict(default="/balancer-manager/", type='str'), member_host=dict(type='str'), - state=dict(type='str'), + state=dict(type='list', elements='str', choices=['present', 'absent', 'enabled', 'disabled', 'drained', 'hot_standby', 'ignore_errors']), tls=dict(default=False, type='bool'), validate_certs=dict(default=True, type='bool') ), supports_check_mode=True ) - if HAS_BEAUTIFULSOUP is False: - module.fail_json(msg=missing_required_lib('BeautifulSoup'), exception=BEAUTIFUL_SOUP_IMP_ERR) + deps.validate(module) if module.params['state'] is not None: - states = module.params['state'].split(',') + states = module.params['state'] if (len(states) > 1) and (("present" in states) or ("enabled" in states)): module.fail_json(msg="state present/enabled is mutually exclusive with other states!") - else: - for _state in states: - if _state not in ['present', 'absent', 'enabled', 'disabled', 'drained', 'hot_standby', 'ignore_errors']: - module.fail_json( - msg="State can only take values amongst 'present', 'absent', 'enabled', 'disabled', 'drained', 'hot_standby', 'ignore_errors'." - ) else: states = ['None'] @@ -388,16 +390,7 @@ def main(): if module.params['member_host'] is None: json_output_list = [] for member in mybalancer.members: - json_output_list.append({ - "host": member.host, - "status": member.status, - "protocol": member.protocol, - "port": member.port, - "path": member.path, - "attributes": member.attributes, - "management_url": member.management_url, - "balancer_url": member.balancer_url - }) + json_output_list.append(member.as_dict()) module.exit_json( changed=False, members=json_output_list @@ -406,7 +399,7 @@ def main(): changed = False member_exists = False member_status = {'disabled': False, 'drained': False, 'hot_standby': False, 'ignore_errors': False} - for mode in member_status.keys(): + for mode in member_status: for state in states: if mode == state: member_status[mode] = True @@ -414,7 +407,7 @@ def main(): member_status[mode] = True for member in mybalancer.members: - if str(member.host) == str(module.params['member_host']): + if str(member.host) == module.params['member_host']: member_exists = True if module.params['state'] is not None: member_status_before = member.status @@ -424,23 +417,19 @@ def main(): member_status_after = member_status if member_status_before != member_status_after: changed = True - json_output = { - "host": member.host, - "status": member.status, - "protocol": member.protocol, - "port": member.port, - "path": member.path, - "attributes": member.attributes, - "management_url": member.management_url, - "balancer_url": member.balancer_url - } + json_output = member.as_dict() if member_exists: module.exit_json( changed=changed, member=json_output ) else: - module.fail_json(msg=str(module.params['member_host']) + ' is not a member of the balancer ' + str(module.params['balancer_vhost']) + '!') + module.fail_json( + msg='{member_host} is not a member of the balancer {balancer_vhost}!'.format( + member_host=module.params['member_host'], + balancer_vhost=module.params['balancer_vhost'], + ) + ) if __name__ == '__main__': diff --git a/plugins/modules/cloud_init_data_facts.py b/plugins/modules/cloud_init_data_facts.py index 360b4119ef..dd9825858e 100644 --- a/plugins/modules/cloud_init_data_facts.py +++ b/plugins/modules/cloud_init_data_facts.py @@ -105,9 +105,8 @@ def gather_cloud_init_data_facts(module): json_file = os.path.join(CLOUD_INIT_PATH, i + '.json') if os.path.exists(json_file): - f = open(json_file, 'rb') - contents = to_text(f.read(), errors='surrogate_or_strict') - f.close() + with open(json_file, 'rb') as f: + contents = to_text(f.read(), errors='surrogate_or_strict') if contents: res['cloud_init_data_facts'][i] = module.from_json(contents) diff --git a/plugins/modules/cloudflare_dns.py b/plugins/modules/cloudflare_dns.py index 6ce2ff8bb4..a51337e328 100644 --- a/plugins/modules/cloudflare_dns.py +++ b/plugins/modules/cloudflare_dns.py @@ -685,6 +685,7 @@ class CloudflareAPI(object): else: search_value = content + zone_id = self._get_zone_id(params['zone']) records = self.get_dns_records(params['zone'], params['type'], search_record, search_value) for rr in records: @@ -692,11 +693,11 @@ class CloudflareAPI(object): if not ((rr['type'] == params['type']) and (rr['name'] == search_record) and (rr['content'] == content)): self.changed = True if not self.module.check_mode: - result, info = self._cf_api_call('/zones/{0}/dns_records/{1}'.format(rr['zone_id'], rr['id']), 'DELETE') + result, info = self._cf_api_call('/zones/{0}/dns_records/{1}'.format(zone_id, rr['id']), 'DELETE') else: self.changed = True if not self.module.check_mode: - result, info = self._cf_api_call('/zones/{0}/dns_records/{1}'.format(rr['zone_id'], rr['id']), 'DELETE') + result, info = self._cf_api_call('/zones/{0}/dns_records/{1}'.format(zone_id, rr['id']), 'DELETE') return self.changed def ensure_dns_record(self, **kwargs): diff --git a/plugins/modules/cpanm.py b/plugins/modules/cpanm.py index 3f708581ac..356cbbb215 100644 --- a/plugins/modules/cpanm.py +++ b/plugins/modules/cpanm.py @@ -56,6 +56,22 @@ options: - Only install dependencies. type: bool default: false + install_recommendations: + description: + - If V(true), installs dependencies declared as recommends per META spec. + - If V(false), it ensures the dependencies declared as recommends are not installed, overriding any decision made earlier in E(PERL_CPANM_OPT). + - If parameter is not set, C(cpanm) will use its existing defaults. + - When these dependencies fail to install, cpanm continues the installation, since they are just recommendation. + type: bool + version_added: 10.3.0 + install_suggestions: + description: + - If V(true), installs dependencies declared as suggests per META spec. + - If V(false), it ensures the dependencies declared as suggests are not installed, overriding any decision made earlier in E(PERL_CPANM_OPT). + - If parameter is not set, C(cpanm) will use its existing defaults. + - When these dependencies fail to install, cpanm continues the installation, since they are just suggestion. + type: bool + version_added: 10.3.0 version: description: - Version specification for the perl module. When O(mode) is V(new), C(cpanm) version operators are accepted. @@ -167,6 +183,8 @@ class CPANMinus(ModuleHelper): mirror=dict(type='str'), mirror_only=dict(type='bool', default=False), installdeps=dict(type='bool', default=False), + install_recommendations=dict(type='bool'), + install_suggestions=dict(type='bool'), executable=dict(type='path'), mode=dict(type='str', default='new', choices=['compatibility', 'new']), name_check=dict(type='str') @@ -181,6 +199,8 @@ class CPANMinus(ModuleHelper): mirror=cmd_runner_fmt.as_opt_val('--mirror'), mirror_only=cmd_runner_fmt.as_bool("--mirror-only"), installdeps=cmd_runner_fmt.as_bool("--installdeps"), + install_recommendations=cmd_runner_fmt.as_bool("--with-recommends", "--without-recommends", ignore_none=True), + install_suggestions=cmd_runner_fmt.as_bool("--with-suggests", "--without-suggests", ignore_none=True), pkg_spec=cmd_runner_fmt.as_list(), cpanm_version=cmd_runner_fmt.as_fixed("--version"), ) @@ -254,7 +274,16 @@ class CPANMinus(ModuleHelper): return pkg_spec = self.sanitize_pkg_spec_version(v[pkg_param], v.version) - with self.runner(['notest', 'locallib', 'mirror', 'mirror_only', 'installdeps', 'pkg_spec'], output_process=process) as ctx: + with self.runner([ + 'notest', + 'locallib', + 'mirror', + 'mirror_only', + 'installdeps', + 'install_recommendations', + 'install_suggestions', + 'pkg_spec' + ], output_process=process) as ctx: self.changed = ctx.run(pkg_spec=pkg_spec) diff --git a/plugins/modules/cronvar.py b/plugins/modules/cronvar.py index 488e739704..4f00aef07c 100644 --- a/plugins/modules/cronvar.py +++ b/plugins/modules/cronvar.py @@ -146,9 +146,8 @@ class CronVar(object): if self.cron_file: # read the cronfile try: - f = open(self.cron_file, 'r') - self.lines = f.read().splitlines() - f.close() + with open(self.cron_file, 'r') as f: + self.lines = f.read().splitlines() except IOError: # cron file does not exist return diff --git a/plugins/modules/crypttab.py b/plugins/modules/crypttab.py index b6a0e52cc3..f728e39ade 100644 --- a/plugins/modules/crypttab.py +++ b/plugins/modules/crypttab.py @@ -154,11 +154,8 @@ def main(): changed, reason = existing_line.opts.remove(opts) if changed and not module.check_mode: - try: - f = open(path, 'wb') + with open(path, 'wb') as f: f.write(to_bytes(crypttab, errors='surrogate_or_strict')) - finally: - f.close() module.exit_json(changed=changed, msg=reason, **module.params) @@ -173,12 +170,9 @@ class Crypttab(object): os.makedirs(os.path.dirname(path)) open(path, 'a').close() - try: - f = open(path, 'r') + with open(path, 'r') as f: for line in f.readlines(): self._lines.append(Line(line)) - finally: - f.close() def add(self, line): self._lines.append(line) diff --git a/plugins/modules/jira.py b/plugins/modules/jira.py index cc3136c3bf..dcedc47928 100644 --- a/plugins/modules/jira.py +++ b/plugins/modules/jira.py @@ -122,6 +122,14 @@ options: required: false description: - Only used when O(operation) is V(transition), and a bit of a misnomer, it actually refers to the transition name. + - This is mutually exclusive with O(status_id). + status_id: + type: str + required: false + description: + - Only used when O(operation) is V(transition), and refers to the transition ID. + - This is mutually exclusive with O(status). + version_added: 10.3.0 assignee: type: str required: false @@ -483,6 +491,7 @@ class JIRA(StateModuleHelper): value=dict(type='str', required=True) )), status=dict(type='str', ), + status_id=dict(type='str', ), assignee=dict(type='str', ), fields=dict(default={}, type='dict'), linktype=dict(type='str', ), @@ -498,6 +507,7 @@ class JIRA(StateModuleHelper): ['username', 'token'], ['password', 'token'], ['assignee', 'account_id'], + ['status', 'status_id'] ], required_together=[ ['username', 'password'], @@ -511,7 +521,8 @@ class JIRA(StateModuleHelper): ('operation', 'comment', ['issue', 'comment']), ('operation', 'workflow', ['issue', 'comment']), ('operation', 'fetch', ['issue']), - ('operation', 'transition', ['issue', 'status']), + ('operation', 'transition', ['issue']), + ('operation', 'transition', ['status', 'status_id'], True), ('operation', 'link', ['linktype', 'inwardissue', 'outwardissue']), ('operation', 'search', ['jql']), ), @@ -616,14 +627,27 @@ class JIRA(StateModuleHelper): turl = self.vars.restbase + '/issue/' + self.vars.issue + "/transitions" tmeta = self.get(turl) - target = self.vars.status tid = None + target = None + + if self.vars.status is not None: + target = self.vars.status.strip() + elif self.vars.status_id is not None: + tid = self.vars.status_id.strip() + for t in tmeta['transitions']: - if t['name'] == target: - tid = t['id'] - break + if target is not None: + if t['name'] == target: + tid = t['id'] + break + else: + if tid == t['id']: + break else: - raise ValueError("Failed find valid transition for '%s'" % target) + if target is not None: + raise ValueError("Failed find valid transition for '%s'" % target) + else: + raise ValueError("Failed find valid transition for ID '%s'" % tid) fields = dict(self.vars.fields) if self.vars.summary is not None: diff --git a/plugins/modules/keycloak_authentication.py b/plugins/modules/keycloak_authentication.py index 58878c069d..a117c730e6 100644 --- a/plugins/modules/keycloak_authentication.py +++ b/plugins/modules/keycloak_authentication.py @@ -359,7 +359,8 @@ def main(): module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_one_of=([['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']]) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, ) result = dict(changed=False, msg='', flow={}) diff --git a/plugins/modules/keycloak_authentication_required_actions.py b/plugins/modules/keycloak_authentication_required_actions.py index 60b47d7a6a..147acf9a1e 100644 --- a/plugins/modules/keycloak_authentication_required_actions.py +++ b/plugins/modules/keycloak_authentication_required_actions.py @@ -238,7 +238,8 @@ def main(): argument_spec=argument_spec, supports_check_mode=True, required_one_of=([['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']]) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, ) result = dict(changed=False, msg='', end_state={}, diff=dict(before={}, after={})) diff --git a/plugins/modules/keycloak_authz_authorization_scope.py b/plugins/modules/keycloak_authz_authorization_scope.py index 16f4149d68..6b2e3c30f6 100644 --- a/plugins/modules/keycloak_authz_authorization_scope.py +++ b/plugins/modules/keycloak_authz_authorization_scope.py @@ -154,7 +154,9 @@ def main(): supports_check_mode=True, required_one_of=( [['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', end_state={}, diff=dict(before={}, after={})) diff --git a/plugins/modules/keycloak_authz_custom_policy.py b/plugins/modules/keycloak_authz_custom_policy.py index c20adbc03f..5e1a2a6a2d 100644 --- a/plugins/modules/keycloak_authz_custom_policy.py +++ b/plugins/modules/keycloak_authz_custom_policy.py @@ -140,7 +140,9 @@ def main(): supports_check_mode=True, required_one_of=( [['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', end_state={}) diff --git a/plugins/modules/keycloak_authz_permission.py b/plugins/modules/keycloak_authz_permission.py index aee1b1a50f..683b5f8c18 100644 --- a/plugins/modules/keycloak_authz_permission.py +++ b/plugins/modules/keycloak_authz_permission.py @@ -254,7 +254,9 @@ def main(): supports_check_mode=True, required_one_of=( [['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) # Convenience variables state = module.params.get('state') diff --git a/plugins/modules/keycloak_authz_permission_info.py b/plugins/modules/keycloak_authz_permission_info.py index b57b7675a0..0271dfd4c4 100644 --- a/plugins/modules/keycloak_authz_permission_info.py +++ b/plugins/modules/keycloak_authz_permission_info.py @@ -135,7 +135,9 @@ def main(): supports_check_mode=True, required_one_of=( [['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) # Convenience variables name = module.params.get('name') diff --git a/plugins/modules/keycloak_client.py b/plugins/modules/keycloak_client.py index 68696fd404..bb51a6a9b3 100644 --- a/plugins/modules/keycloak_client.py +++ b/plugins/modules/keycloak_client.py @@ -775,8 +775,11 @@ def sanitize_cr(clientrep): result['secret'] = 'no_log' if 'attributes' in result: attributes = result['attributes'] - if isinstance(attributes, dict) and 'saml.signing.private.key' in attributes: - attributes['saml.signing.private.key'] = 'no_log' + if isinstance(attributes, dict): + if 'saml.signing.private.key' in attributes: + attributes['saml.signing.private.key'] = 'no_log' + if 'saml.encryption.private.key' in attributes: + attributes['saml.encryption.private.key'] = 'no_log' return normalise_cr(result) @@ -921,7 +924,9 @@ def main(): supports_check_mode=True, required_one_of=([['client_id', 'id'], ['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', diff={}, proposed={}, existing={}, end_state={}) diff --git a/plugins/modules/keycloak_client_rolemapping.py b/plugins/modules/keycloak_client_rolemapping.py index dff8c633b6..cb1cad8291 100644 --- a/plugins/modules/keycloak_client_rolemapping.py +++ b/plugins/modules/keycloak_client_rolemapping.py @@ -269,7 +269,9 @@ def main(): module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_one_of=([['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', diff={}, proposed={}, existing={}, end_state={}) diff --git a/plugins/modules/keycloak_clientscope.py b/plugins/modules/keycloak_clientscope.py index b36c390ae1..4c452d4f2e 100644 --- a/plugins/modules/keycloak_clientscope.py +++ b/plugins/modules/keycloak_clientscope.py @@ -355,7 +355,9 @@ def main(): supports_check_mode=True, required_one_of=([['id', 'name'], ['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', diff={}, proposed={}, existing={}, end_state={}) diff --git a/plugins/modules/keycloak_clientscope_type.py b/plugins/modules/keycloak_clientscope_type.py index 3923d5fb43..0e742f676c 100644 --- a/plugins/modules/keycloak_clientscope_type.py +++ b/plugins/modules/keycloak_clientscope_type.py @@ -149,11 +149,13 @@ def keycloak_clientscope_type_module(): ['default_clientscopes', 'optional_clientscopes'] ]), required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, mutually_exclusive=[ ['token', 'auth_realm'], ['token', 'auth_username'], ['token', 'auth_password'] - ]) + ], + ) return module diff --git a/plugins/modules/keycloak_clienttemplate.py b/plugins/modules/keycloak_clienttemplate.py index 66e96f5a50..ae6e61380e 100644 --- a/plugins/modules/keycloak_clienttemplate.py +++ b/plugins/modules/keycloak_clienttemplate.py @@ -297,7 +297,9 @@ def main(): supports_check_mode=True, required_one_of=([['id', 'name'], ['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', diff={}, proposed={}, existing={}, end_state={}) diff --git a/plugins/modules/keycloak_component.py b/plugins/modules/keycloak_component.py index 5c7e3cd56b..d5a3be2a8e 100644 --- a/plugins/modules/keycloak_component.py +++ b/plugins/modules/keycloak_component.py @@ -156,7 +156,9 @@ def main(): module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_one_of=([['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', end_state={}, diff=dict(before={}, after={})) diff --git a/plugins/modules/keycloak_group.py b/plugins/modules/keycloak_group.py index b6b267e906..08d2555745 100644 --- a/plugins/modules/keycloak_group.py +++ b/plugins/modules/keycloak_group.py @@ -335,7 +335,9 @@ def main(): supports_check_mode=True, required_one_of=([['id', 'name'], ['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', diff={}, group='') diff --git a/plugins/modules/keycloak_identity_provider.py b/plugins/modules/keycloak_identity_provider.py index e2c61a4a7a..68a31a227b 100644 --- a/plugins/modules/keycloak_identity_provider.py +++ b/plugins/modules/keycloak_identity_provider.py @@ -498,7 +498,9 @@ def main(): module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_one_of=([['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', diff={}, proposed={}, existing={}, end_state={}) diff --git a/plugins/modules/keycloak_realm.py b/plugins/modules/keycloak_realm.py index 7c505d8d37..adca01eb27 100644 --- a/plugins/modules/keycloak_realm.py +++ b/plugins/modules/keycloak_realm.py @@ -707,7 +707,9 @@ def main(): supports_check_mode=True, required_one_of=([['id', 'realm', 'enabled'], ['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', diff={}, proposed={}, existing={}, end_state={}) diff --git a/plugins/modules/keycloak_realm_key.py b/plugins/modules/keycloak_realm_key.py index 425206bf98..97e0af6da5 100644 --- a/plugins/modules/keycloak_realm_key.py +++ b/plugins/modules/keycloak_realm_key.py @@ -264,7 +264,9 @@ def main(): module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_one_of=([['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) # Initialize the result object. Only "changed" seems to have special # meaning for Ansible. diff --git a/plugins/modules/keycloak_realm_keys_metadata_info.py b/plugins/modules/keycloak_realm_keys_metadata_info.py index f76cabfd36..9946bd88ba 100644 --- a/plugins/modules/keycloak_realm_keys_metadata_info.py +++ b/plugins/modules/keycloak_realm_keys_metadata_info.py @@ -105,7 +105,8 @@ def main(): argument_spec=argument_spec, supports_check_mode=True, required_one_of=([["token", "auth_realm", "auth_username", "auth_password"]]), - required_together=([["auth_realm", "auth_username", "auth_password"]]), + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, ) result = dict(changed=False, msg="", keys_metadata="") diff --git a/plugins/modules/keycloak_realm_rolemapping.py b/plugins/modules/keycloak_realm_rolemapping.py index 4217e7e581..2937ed0ec0 100644 --- a/plugins/modules/keycloak_realm_rolemapping.py +++ b/plugins/modules/keycloak_realm_rolemapping.py @@ -253,7 +253,9 @@ def main(): module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_one_of=([['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', diff={}, proposed={}, existing={}, end_state={}) diff --git a/plugins/modules/keycloak_role.py b/plugins/modules/keycloak_role.py index 267682d31c..93705e2b4e 100644 --- a/plugins/modules/keycloak_role.py +++ b/plugins/modules/keycloak_role.py @@ -248,7 +248,9 @@ def main(): module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_one_of=([['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', diff={}, proposed={}, existing={}, end_state={}) diff --git a/plugins/modules/keycloak_user.py b/plugins/modules/keycloak_user.py index 65880548ab..9c2c110903 100644 --- a/plugins/modules/keycloak_user.py +++ b/plugins/modules/keycloak_user.py @@ -408,7 +408,9 @@ def main(): module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_one_of=([['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', diff={}, proposed={}, existing={}, end_state={}) diff --git a/plugins/modules/keycloak_user_federation.py b/plugins/modules/keycloak_user_federation.py index a631145600..78b8b0fdeb 100644 --- a/plugins/modules/keycloak_user_federation.py +++ b/plugins/modules/keycloak_user_federation.py @@ -839,7 +839,9 @@ def main(): supports_check_mode=True, required_one_of=([['id', 'name'], ['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', diff={}, proposed={}, existing={}, end_state={}) diff --git a/plugins/modules/keycloak_user_rolemapping.py b/plugins/modules/keycloak_user_rolemapping.py index f8690d70c9..c7af801706 100644 --- a/plugins/modules/keycloak_user_rolemapping.py +++ b/plugins/modules/keycloak_user_rolemapping.py @@ -244,7 +244,9 @@ def main(): supports_check_mode=True, required_one_of=([['token', 'auth_realm', 'auth_username', 'auth_password'], ['uid', 'target_username', 'service_account_user_client_id']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) result = dict(changed=False, msg='', diff={}, proposed={}, existing={}, end_state={}) diff --git a/plugins/modules/keycloak_userprofile.py b/plugins/modules/keycloak_userprofile.py index f54cd7183a..f637271497 100644 --- a/plugins/modules/keycloak_userprofile.py +++ b/plugins/modules/keycloak_userprofile.py @@ -534,7 +534,9 @@ def main(): module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True, required_one_of=([['token', 'auth_realm', 'auth_username', 'auth_password']]), - required_together=([['auth_realm', 'auth_username', 'auth_password']])) + required_together=([['auth_realm', 'auth_username', 'auth_password']]), + required_by={'refresh_token': 'auth_realm'}, + ) # Initialize the result object. Only "changed" seems to have special # meaning for Ansible. diff --git a/plugins/modules/lvol.py b/plugins/modules/lvol.py index c66098c354..6166e437f2 100644 --- a/plugins/modules/lvol.py +++ b/plugins/modules/lvol.py @@ -60,7 +60,7 @@ options: default: true force: description: - - Shrink or remove operations of volumes requires this switch. Ensures that that filesystems get never corrupted/destroyed + - Shrink or remove operations of volumes requires this switch. Ensures that filesystems never get corrupted/destroyed by mistake. type: bool default: false diff --git a/plugins/modules/nmcli.py b/plugins/modules/nmcli.py index 0daf667160..3aff17ea6e 100644 --- a/plugins/modules/nmcli.py +++ b/plugins/modules/nmcli.py @@ -379,6 +379,12 @@ options: - This is only used with bond - xmit_hash_policy type. type: str version_added: 5.6.0 + fail_over_mac: + description: + - This is only used with bond - fail_over_mac. + type: str + choices: [none, active, follow] + version_added: 10.3.0 arp_interval: description: - This is only used with bond - ARP interval. @@ -1691,6 +1697,7 @@ class Nmcli(object): self.downdelay = module.params['downdelay'] self.updelay = module.params['updelay'] self.xmit_hash_policy = module.params['xmit_hash_policy'] + self.fail_over_mac = module.params['fail_over_mac'] self.arp_interval = module.params['arp_interval'] self.arp_ip_target = module.params['arp_ip_target'] self.slavepriority = module.params['slavepriority'] @@ -1839,6 +1846,7 @@ class Nmcli(object): 'primary': self.primary, 'updelay': self.updelay, 'xmit_hash_policy': self.xmit_hash_policy, + 'fail_over_mac': self.fail_over_mac, }) elif self.type == 'bond-slave': if self.slave_type and self.slave_type != 'bond': @@ -2286,6 +2294,9 @@ class Nmcli(object): if key == 'xmit_hash_policy': cmd.extend(['+bond.options', 'xmit_hash_policy=%s' % value]) continue + if key == 'fail_over_mac': + cmd.extend(['+bond.options', 'fail_over_mac=%s' % value]) + continue cmd.extend([key, value]) return self.execute_command(cmd) @@ -2602,6 +2613,7 @@ def main(): downdelay=dict(type='int'), updelay=dict(type='int'), xmit_hash_policy=dict(type='str'), + fail_over_mac=dict(type='str', choices=['none', 'active', 'follow']), arp_interval=dict(type='int'), arp_ip_target=dict(type='str'), primary=dict(type='str'), diff --git a/plugins/modules/one_template.py b/plugins/modules/one_template.py index fd2e18dc5e..71db2c1d2c 100644 --- a/plugins/modules/one_template.py +++ b/plugins/modules/one_template.py @@ -51,6 +51,16 @@ options: choices: ["present", "absent"] default: present type: str + filter: + description: + - V(user_primary_group) - Resources belonging to the user's primary group. + - V(user) - Resources belonging to the user. + - V(all) - All resources. + - V(user_groups) - Resources belonging to the user and any of his groups. + choices: [user_primary_group, user, all, user_groups] + default: user + type: str + version_added: 10.3.0 extends_documentation_fragment: - community.general.opennebula @@ -157,6 +167,7 @@ class TemplateModule(OpenNebulaModule): name=dict(type='str', required=False), state=dict(type='str', choices=['present', 'absent'], default='present'), template=dict(type='str', required=False), + filter=dict(type='str', required=False, choices=['user_primary_group', 'user', 'all', 'user_groups'], default='user'), ) mutually_exclusive = [ @@ -182,10 +193,11 @@ class TemplateModule(OpenNebulaModule): name = params.get('name') desired_state = params.get('state') template_data = params.get('template') + filter = params.get('filter') self.result = {} - template = self.get_template_instance(id, name) + template = self.get_template_instance(id, name, filter) needs_creation = False if not template and desired_state != 'absent': if id: @@ -197,16 +209,19 @@ class TemplateModule(OpenNebulaModule): self.result = self.delete_template(template) else: if needs_creation: - self.result = self.create_template(name, template_data) + self.result = self.create_template(name, template_data, filter) else: - self.result = self.update_template(template, template_data) + self.result = self.update_template(template, template_data, filter) self.exit() - def get_template(self, predicate): - # -3 means "Resources belonging to the user" + def get_template(self, predicate, filter): + # filter was included, for discussions see: + # Issue: https://github.com/ansible-collections/community.general/issues/9278 + # PR: https://github.com/ansible-collections/community.general/pull/9547 # the other two parameters are used for pagination, -1 for both essentially means "return all" - pool = self.one.templatepool.info(-3, -1, -1) + filter_values = {'user_primary_group': -4, 'user': -3, 'all': -2, 'user_groups': -1} + pool = self.one.templatepool.info(filter_values[filter], -1, -1) for template in pool.VMTEMPLATE: if predicate(template): @@ -214,17 +229,17 @@ class TemplateModule(OpenNebulaModule): return None - def get_template_by_id(self, template_id): - return self.get_template(lambda template: (template.ID == template_id)) + def get_template_by_id(self, template_id, filter): + return self.get_template(lambda template: (template.ID == template_id), filter) - def get_template_by_name(self, name): - return self.get_template(lambda template: (template.NAME == name)) + def get_template_by_name(self, name, filter): + return self.get_template(lambda template: (template.NAME == name), filter) - def get_template_instance(self, requested_id, requested_name): + def get_template_instance(self, requested_id, requested_name, filter): if requested_id: - return self.get_template_by_id(requested_id) + return self.get_template_by_id(requested_id, filter) else: - return self.get_template_by_name(requested_name) + return self.get_template_by_name(requested_name, filter) def get_template_info(self, template): info = { @@ -239,21 +254,21 @@ class TemplateModule(OpenNebulaModule): return info - def create_template(self, name, template_data): + def create_template(self, name, template_data, filter): if not self.module.check_mode: self.one.template.allocate("NAME = \"" + name + "\"\n" + template_data) - result = self.get_template_info(self.get_template_by_name(name)) + result = self.get_template_info(self.get_template_by_name(name, filter)) result['changed'] = True return result - def update_template(self, template, template_data): + def update_template(self, template, template_data, filter): if not self.module.check_mode: # 0 = replace the whole template self.one.template.update(template.ID, template_data, 0) - result = self.get_template_info(self.get_template_by_id(template.ID)) + result = self.get_template_info(self.get_template_by_id(template.ID, filter)) if self.module.check_mode: # Unfortunately it is not easy to detect if the template would have changed, therefore always report a change here. result['changed'] = True diff --git a/plugins/modules/pagerduty_alert.py b/plugins/modules/pagerduty_alert.py index bd0a4266db..347e849822 100644 --- a/plugins/modules/pagerduty_alert.py +++ b/plugins/modules/pagerduty_alert.py @@ -73,24 +73,20 @@ options: type: str description: - The name of the monitoring client that is triggering this event. - required: false client_url: type: str description: - The URL of the monitoring client that is triggering this event. - required: false component: type: str description: - Component of the source machine that is responsible for the event, for example C(mysql) or C(eth0). - required: false version_added: 7.4.0 custom_details: type: dict description: - Additional details about the event and affected system. - A dictionary with custom keys and values. - required: false version_added: 7.4.0 desc: type: str @@ -100,13 +96,11 @@ options: tables in the PagerDuty UI. The maximum length is 1024 characters. - For O(state=acknowledged) or O(state=resolved) - Text that will appear in the incident's log associated with this event. - required: false default: Created via Ansible incident_class: type: str description: - The class/type of the event, for example C(ping failure) or C(cpu load). - required: false version_added: 7.4.0 incident_key: type: str @@ -118,25 +112,21 @@ options: be generated by PagerDuty. - For O(state=acknowledged) or O(state=resolved) - This should be the incident_key you received back when the incident was first opened by a trigger event. Acknowledge events referencing resolved or nonexistent incidents will be discarded. - required: false link_url: type: str description: - Relevant link URL to the alert. For example, the website or the job link. - required: false version_added: 7.4.0 link_text: type: str description: - A short description of the O(link_url). - required: false version_added: 7.4.0 source: type: str description: - The unique location of the affected system, preferably a hostname or FQDN. - Required in case of O(state=trigger) and O(api_version=v2). - required: false version_added: 7.4.0 severity: type: str @@ -332,25 +322,25 @@ def send_event_v2(module, service_key, event_type, payload, link, def main(): module = AnsibleModule( argument_spec=dict( - name=dict(required=False), - api_key=dict(required=False, no_log=True), - integration_key=dict(required=False, no_log=True), - service_id=dict(required=False), - service_key=dict(required=False, no_log=True), + name=dict(), + api_key=dict(no_log=True), + integration_key=dict(no_log=True), + service_id=dict(), + service_key=dict(no_log=True), state=dict( required=True, choices=['triggered', 'acknowledged', 'resolved'] ), api_version=dict(type='str', default='v1', choices=['v1', 'v2']), - client=dict(required=False), - client_url=dict(required=False), - component=dict(required=False), - custom_details=dict(required=False, type='dict'), - desc=dict(required=False, default='Created via Ansible'), - incident_class=dict(required=False), - incident_key=dict(required=False, no_log=False), - link_url=dict(required=False), - link_text=dict(required=False), - source=dict(required=False), + client=dict(), + client_url=dict(), + component=dict(), + custom_details=dict(type='dict'), + desc=dict(default='Created via Ansible'), + incident_class=dict(), + incident_key=dict(no_log=False), + link_url=dict(), + link_text=dict(), + source=dict(), severity=dict( default='critical', choices=['critical', 'warning', 'error', 'info'] ), diff --git a/plugins/modules/parted.py b/plugins/modules/parted.py index a9e83eb2b0..98f8f4d647 100644 --- a/plugins/modules/parted.py +++ b/plugins/modules/parted.py @@ -588,11 +588,8 @@ def read_record(file_path, default=None): Reads the first line of a file and returns it. """ try: - f = open(file_path, 'r') - try: + with open(file_path, 'r') as f: return f.readline().strip() - finally: - f.close() except IOError: return default diff --git a/plugins/modules/pipx.py b/plugins/modules/pipx.py index 85c88086a8..e7806d4e75 100644 --- a/plugins/modules/pipx.py +++ b/plugins/modules/pipx.py @@ -382,12 +382,12 @@ class PipX(StateModuleHelper): def state_latest(self): if not self.vars.application or self.vars.force: self.changed = True - args_order = 'state index_url install_deps force python system_site_packages editable pip_args suffix name_source' + args_order = 'state global index_url install_deps force python system_site_packages editable pip_args suffix name_source' with self.runner(args_order, check_mode_skip=True) as ctx: ctx.run(state='install', name_source=[self.vars.name, self.vars.source]) self._capture_results(ctx) - with self.runner('state include_injected index_url force editable pip_args name', check_mode_skip=True) as ctx: + with self.runner('state global include_injected index_url force editable pip_args name', check_mode_skip=True) as ctx: ctx.run(state='upgrade') self._capture_results(ctx) diff --git a/plugins/modules/proxmox.py b/plugins/modules/proxmox.py index bd33245ead..4f25b90ad0 100644 --- a/plugins/modules/proxmox.py +++ b/plugins/modules/proxmox.py @@ -6,6 +6,7 @@ # SPDX-License-Identifier: GPL-3.0-or-later from __future__ import absolute_import, division, print_function + __metaclass__ = type DOCUMENTATION = r""" @@ -45,10 +46,11 @@ options: - Older versions of Proxmox will accept a numeric value for size using the O(storage) parameter to automatically choose which storage to allocate from, however new versions enforce the C(:) syntax. - Additional options are available by using some combination of the following key-value pairs as a comma-delimited list - C([volume=] [,acl=<1|0>] [,mountoptions=] [,quota=<1|0>] [,replicate=<1|0>] [,ro=<1|0>] [,shared=<1|0>] + C([volume=] + [,acl=<1|0>] [,mountoptions=] [,replicate=<1|0>] [,ro=<1|0>] [,shared=<1|0>] [,size=]). - See U(https://pve.proxmox.com/wiki/Linux_Container) for a full description. - - This option is mutually exclusive with O(storage) and O(disk_volume). + - This option is mutually exclusive with O(disk_volume). type: str disk_volume: description: @@ -68,13 +70,13 @@ options: - O(disk_volume.volume) is the name of an existing volume. - If not defined, the module will check if one exists. If not, a new volume will be created. - If defined, the volume must exist under that name. - - Required only if O(disk_volume.storage) is defined and mutually exclusive with O(disk_volume.host_path). + - Required only if O(disk_volume.storage) is defined, and mutually exclusive with O(disk_volume.host_path). type: str size: description: - O(disk_volume.size) is the size of the storage to use. - - The size is given in GB. - - Required only if O(disk_volume.storage) is defined and mutually exclusive with O(disk_volume.host_path). + - The size is given in GiB. + - Required only if O(disk_volume.storage) is defined, and mutually exclusive with O(disk_volume.host_path). type: int host_path: description: @@ -157,7 +159,7 @@ options: size: description: - O(mount_volumes[].size) is the size of the storage to use. - - The size is given in GB. + - The size is given in GiB. - Required only if O(mount_volumes[].storage) is defined and mutually exclusive with O(mount_volumes[].host_path). type: int host_path: @@ -186,7 +188,7 @@ options: storage: description: - Target storage. - - This Option is mutually exclusive with O(disk) and O(disk_volume). + - This option is mutually exclusive with O(disk_volume) and O(mount_volumes). type: str default: 'local' ostype: @@ -226,8 +228,9 @@ options: update: description: - If V(true), the container will be updated with new values. + - The current default value of V(false) is deprecated and should will change to V(true) in community.general 11.0.0. + Please set O(update) explicitly to V(false) or V(true) to avoid surprises and get rid of the deprecation warning. type: bool - default: false version_added: 8.1.0 force: description: @@ -605,397 +608,25 @@ EXAMPLES = r""" import re import time -from ansible_collections.community.general.plugins.module_utils.version import LooseVersion - from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.common.text.converters import to_native - - from ansible_collections.community.general.plugins.module_utils.proxmox import ( - ansible_to_proxmox_bool, proxmox_auth_argument_spec, ProxmoxAnsible) - -VZ_TYPE = None + ProxmoxAnsible, + ansible_to_proxmox_bool, + proxmox_auth_argument_spec, +) +from ansible_collections.community.general.plugins.module_utils.version import LooseVersion -class ProxmoxLxcAnsible(ProxmoxAnsible): - def content_check(self, node, ostemplate, template_store): - return [True for cnt in self.proxmox_api.nodes(node).storage(template_store).content.get() if cnt['volid'] == ostemplate] - - def is_template_container(self, node, vmid): - """Check if the specified container is a template.""" - proxmox_node = self.proxmox_api.nodes(node) - config = getattr(proxmox_node, VZ_TYPE)(vmid).config.get() - return config.get('template', False) - - def update_config(self, vmid, node, disk, cpus, memory, swap, **kwargs): - if VZ_TYPE != "lxc": - self.module.fail_json( - changed=False, - msg="Updating configuration is only supported for LXC enabled proxmox clusters.", - ) - - def parse_disk_string(disk_string): - # Example strings: - # "acl=0,thin1:base-100-disk-1,size=8G" - # "thin1:10,backup=0" - # "local:20" - # "volume=local-lvm:base-100-disk-1,size=20G" - # "/mnt/bindmounts/shared,mp=/shared" - # "volume=/dev/USB01,mp=/mnt/usb01" - args = disk_string.split(",") - # If the volume is not explicitly defined but implicit by only passing a key, - # add the "volume=" key prefix for ease of parsing. - args = ["volume=" + arg if "=" not in arg else arg for arg in args] - # Then create a dictionary from the arguments - disk_kwargs = dict(map(lambda item: item.split("="), args)) - - VOLUME_PATTERN = r"""(?x) - (?:(?P[\w\-.]+): - (?:(?P\d+)| - (?P[^,\s]+)) - )| - (?P[^,\s]+) - """ - # DISCLAIMER: - # There are two things called a "volume": - # 1. The "volume" key which describes the storage volume, device or directory to mount into the container. - # 2. The storage volume of a storage-backed mount point in the PVE storage sub system. - # In this section, we parse the "volume" key and check which type of mount point we are dealing with. - pattern = re.compile(VOLUME_PATTERN) - match_dict = pattern.match(disk_kwargs.pop("volume")).groupdict() - match_dict = {k: v for k, v in match_dict.items() if v is not None} - - if "storage" in match_dict and "volume" in match_dict: - disk_kwargs["storage"] = match_dict["storage"] - disk_kwargs["volume"] = match_dict["volume"] - elif "storage" in match_dict and "size" in match_dict: - disk_kwargs["storage"] = match_dict["storage"] - disk_kwargs["size"] = match_dict["size"] - elif "host_path" in match_dict: - disk_kwargs["host_path"] = match_dict["host_path"] - - # Pattern matching only available in Python 3.10+ - # match match_dict: - # case {"storage": storage, "volume": volume}: - # disk_kwargs["storage"] = storage - # disk_kwargs["volume"] = volume - - # case {"storage": storage, "size": size}: - # disk_kwargs["storage"] = storage - # disk_kwargs["size"] = size - - # case {"host_path": host_path}: - # disk_kwargs["host_path"] = host_path - - return disk_kwargs - - def convert_mounts(mount_dict): - return_list = [] - for mount_key, mount_value in mount_dict.items(): - mount_config = parse_disk_string(mount_value) - return_list.append(dict(id=mount_key, **mount_config)) - - return return_list - - def build_volume( - key, - storage=None, - volume=None, - host_path=None, - size=None, - mountpoint=None, - options=None, - **kwargs - ): - if size is not None and isinstance(size, str): - size = size.strip("G") - # 1. Handle volume checks/creation - # 1.1 Check if defined volume exists - if volume is not None: - storage_content = self.get_storage_content(node, storage, vmid=vmid) - vol_ids = [vol["volid"] for vol in storage_content] - volid = "{storage}:{volume}".format(storage=storage, volume=volume) - if volid not in vol_ids: - self.module.fail_json( - changed=False, - msg="Storage {storage} does not contain volume {volume}".format( - storage=storage, - volume=volume, - ), - ) - vol_string = "{storage}:{volume},size={size}G".format( - storage=storage, volume=volume, size=size - ) - # 1.2 If volume not defined (but storage is), check if it exists - elif storage is not None: - api_node = self.proxmox_api.nodes( - node - ) # The node must exist, but not the LXC - try: - vol = api_node.lxc(vmid).get("config").get(key) - volume = parse_disk_string(vol).get("volume") - vol_string = "{storage}:{volume},size={size}G".format( - storage=storage, volume=volume, size=size - ) - - # If not, we have proxmox create one using the special syntax - except Exception: - vol_string = "{storage}:{size}".format(storage=storage, size=size) - else: - raise AssertionError('Internal error') - - # 1.3 If we have a host_path, we don't have storage, a volume, or a size - vol_string = ",".join( - [vol_string] + - ([] if host_path is None else [host_path]) + - ([] if mountpoint is None else ["mp={0}".format(mountpoint)]) + - ([] if options is None else ["{0}={1}".format(k, v) for k, v in options.items()]) + - ([] if not kwargs else ["{0}={1}".format(k, v) for k, v in kwargs.items()]) - ) - - return {key: vol_string} - - # Version limited features - minimum_version = {"tags": "6.1", "timezone": "6.3"} - proxmox_node = self.proxmox_api.nodes(node) - - pve_version = self.version() - - # Fail on unsupported features - for option, version in minimum_version.items(): - if pve_version < LooseVersion(version) and option in kwargs: - self.module.fail_json( - changed=False, - msg="Feature {option} is only supported in PVE {version}+, and you're using PVE {pve_version}".format( - option=option, version=version, pve_version=pve_version - ), - ) - - # Remove all empty kwarg entries - kwargs = {key: val for key, val in kwargs.items() if val is not None} - - if cpus is not None: - kwargs["cpulimit"] = cpus - if disk is not None: - kwargs["disk_volume"] = parse_disk_string(disk) - if "disk_volume" in kwargs: - disk_dict = build_volume(key="rootfs", **kwargs.pop("disk_volume")) - kwargs.update(disk_dict) - if memory is not None: - kwargs["memory"] = memory - if swap is not None: - kwargs["swap"] = swap - if "netif" in kwargs: - kwargs.update(kwargs.pop("netif")) - if "mounts" in kwargs: - kwargs["mount_volumes"] = convert_mounts(kwargs.pop("mounts")) - if "mount_volumes" in kwargs: - mounts_list = kwargs.pop("mount_volumes") - for mount_config in mounts_list: - key = mount_config.pop("id") - mount_dict = build_volume(key=key, **mount_config) - kwargs.update(mount_dict) - # LXC tags are expected to be valid and presented as a comma/semi-colon delimited string - if "tags" in kwargs: - re_tag = re.compile(r"^[a-z0-9_][a-z0-9_\-\+\.]*$") - for tag in kwargs["tags"]: - if not re_tag.match(tag): - self.module.fail_json(msg="%s is not a valid tag" % tag) - kwargs["tags"] = ",".join(kwargs["tags"]) - - # fetch the current config - current_config = getattr(proxmox_node, VZ_TYPE)(vmid).config.get() - - # compare the requested config against the current - update_config = False - for (arg, value) in kwargs.items(): - # if the arg isn't in the current config, it needs to be updated - if arg not in current_config: - update_config = True - break - # some values are lists, the order isn't always the same, so split them and compare by key - if isinstance(value, str): - current_values = current_config[arg].split(",") - requested_values = value.split(",") - for new_value in requested_values: - if new_value not in current_values: - update_config = True - break - # if it is not a list (or string) just compare the current value - else: - # some types don't match with the API, so forcing to string for comparison - if str(value) != str(current_config[arg]): - update_config = True - break - - if update_config: - getattr(proxmox_node, VZ_TYPE)(vmid).config.put(vmid=vmid, node=node, **kwargs) - else: - self.module.exit_json(changed=False, msg="Container config is already up to date") - - def create_instance(self, vmid, node, disk, storage, cpus, memory, swap, timeout, clone, **kwargs): - - # Version limited features - minimum_version = { - 'tags': '6.1', - 'timezone': '6.3' - } - proxmox_node = self.proxmox_api.nodes(node) - - # Remove all empty kwarg entries - kwargs = {k: v for k, v in kwargs.items() if v is not None} - - pve_version = self.version() - - # Fail on unsupported features - for option, version in minimum_version.items(): - if pve_version < LooseVersion(version) and option in kwargs: - self.module.fail_json(changed=False, msg="Feature {option} is only supported in PVE {version}+, and you're using PVE {pve_version}". - format(option=option, version=version, pve_version=pve_version)) - - if VZ_TYPE == 'lxc': - kwargs['cpulimit'] = cpus - kwargs['rootfs'] = disk - if 'netif' in kwargs: - kwargs.update(kwargs['netif']) - del kwargs['netif'] - if 'mounts' in kwargs: - kwargs.update(kwargs['mounts']) - del kwargs['mounts'] - if 'pubkey' in kwargs: - if self.version() >= LooseVersion('4.2'): - kwargs['ssh-public-keys'] = kwargs['pubkey'] - del kwargs['pubkey'] - else: - kwargs['cpus'] = cpus - kwargs['disk'] = disk - - # LXC tags are expected to be valid and presented as a comma/semi-colon delimited string - if 'tags' in kwargs: - re_tag = re.compile(r'^[a-z0-9_][a-z0-9_\-\+\.]*$') - for tag in kwargs['tags']: - if not re_tag.match(tag): - self.module.fail_json(msg='%s is not a valid tag' % tag) - kwargs['tags'] = ",".join(kwargs['tags']) - - if kwargs.get('ostype') == 'auto': - kwargs.pop('ostype') - - if clone is not None: - if VZ_TYPE != 'lxc': - self.module.fail_json(changed=False, msg="Clone operator is only supported for LXC enabled proxmox clusters.") - - clone_is_template = self.is_template_container(node, clone) - - # By default, create a full copy only when the cloned container is not a template. - create_full_copy = not clone_is_template - - # Only accept parameters that are compatible with the clone endpoint. - valid_clone_parameters = ['hostname', 'pool', 'description'] - if self.module.params['storage'] is not None and clone_is_template: - # Cloning a template, so create a full copy instead of a linked copy - create_full_copy = True - elif self.module.params['storage'] is None and not clone_is_template: - # Not cloning a template, but also no defined storage. This isn't possible. - self.module.fail_json(changed=False, msg="Cloned container is not a template, storage needs to be specified.") - - if self.module.params['clone_type'] == 'linked': - if not clone_is_template: - self.module.fail_json(changed=False, msg="'linked' clone type is specified, but cloned container is not a template container.") - # Don't need to do more, by default create_full_copy is set to false already - elif self.module.params['clone_type'] == 'opportunistic': - if not clone_is_template: - # Cloned container is not a template, so we need our 'storage' parameter - valid_clone_parameters.append('storage') - elif self.module.params['clone_type'] == 'full': - create_full_copy = True - valid_clone_parameters.append('storage') - - clone_parameters = {} - - if create_full_copy: - clone_parameters['full'] = '1' - else: - clone_parameters['full'] = '0' - for param in valid_clone_parameters: - if self.module.params[param] is not None: - clone_parameters[param] = self.module.params[param] - - taskid = getattr(proxmox_node, VZ_TYPE)(clone).clone.post(newid=vmid, **clone_parameters) - else: - taskid = getattr(proxmox_node, VZ_TYPE).create(vmid=vmid, storage=storage, memory=memory, swap=swap, **kwargs) - - while timeout: - if self.api_task_ok(node, taskid): - return True - timeout -= 1 - if timeout == 0: - self.module.fail_json(vmid=vmid, node=node, msg='Reached timeout while waiting for creating VM. Last line in task before timeout: %s' % - proxmox_node.tasks(taskid).log.get()[:1]) - - time.sleep(1) - return False - - def start_instance(self, vm, vmid, timeout): - taskid = getattr(self.proxmox_api.nodes(vm['node']), VZ_TYPE)(vmid).status.start.post() - while timeout: - if self.api_task_ok(vm['node'], taskid): - return True - timeout -= 1 - if timeout == 0: - self.module.fail_json(vmid=vmid, taskid=taskid, msg='Reached timeout while waiting for starting VM. Last line in task before timeout: %s' % - self.proxmox_api.nodes(vm['node']).tasks(taskid).log.get()[:1]) - - time.sleep(1) - return False - - def stop_instance(self, vm, vmid, timeout, force): - if force: - taskid = getattr(self.proxmox_api.nodes(vm['node']), VZ_TYPE)(vmid).status.shutdown.post(forceStop=1) - else: - taskid = getattr(self.proxmox_api.nodes(vm['node']), VZ_TYPE)(vmid).status.shutdown.post() - while timeout: - if self.api_task_ok(vm['node'], taskid): - return True - timeout -= 1 - if timeout == 0: - self.module.fail_json(vmid=vmid, taskid=taskid, msg='Reached timeout while waiting for stopping VM. Last line in task before timeout: %s' % - self.proxmox_api.nodes(vm['node']).tasks(taskid).log.get()[:1]) - - time.sleep(1) - return False - - def convert_to_template(self, vm, vmid, timeout, force): - if getattr(self.proxmox_api.nodes(vm['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'running' and force: - self.stop_instance(vm, vmid, timeout, force) - # not sure why, but templating a container doesn't return a taskid - getattr(self.proxmox_api.nodes(vm['node']), VZ_TYPE)(vmid).template.post() - return True - - def umount_instance(self, vm, vmid, timeout): - taskid = getattr(self.proxmox_api.nodes(vm['node']), VZ_TYPE)(vmid).status.umount.post() - while timeout: - if self.api_task_ok(vm['node'], taskid): - return True - timeout -= 1 - if timeout == 0: - self.module.fail_json(vmid=vmid, taskid=taskid, msg='Reached timeout while waiting for unmounting VM. Last line in task before timeout: %s' % - self.proxmox_api.nodes(vm['node']).tasks(taskid).log.get()[:1]) - - time.sleep(1) - return False - - -def main(): - module_args = proxmox_auth_argument_spec() - proxmox_args = dict( - vmid=dict(type='int', required=False), +def get_proxmox_args(): + return dict( + vmid=dict(type="int", required=False), node=dict(), pool=dict(), password=dict(no_log=True), hostname=dict(), ostemplate=dict(), - disk=dict(type='str'), + disk=dict(type="str"), disk_volume=dict( type="dict", options=dict( @@ -1015,12 +646,12 @@ def main(): ("host_path", "size"), ], ), - cores=dict(type='int'), - cpus=dict(type='int'), - memory=dict(type='int'), - swap=dict(type='int'), - netif=dict(type='dict'), - mounts=dict(type='dict'), + cores=dict(type="int"), + cpus=dict(type="int"), + memory=dict(type="int"), + swap=dict(type="int"), + netif=dict(type="dict"), + mounts=dict(type="dict"), mount_volumes=dict( type="list", elements="dict", @@ -1044,282 +675,1064 @@ def main(): ], ), ip_address=dict(), - ostype=dict(default='auto', choices=[ - 'auto', 'debian', 'devuan', 'ubuntu', 'centos', 'fedora', 'opensuse', 'archlinux', 'alpine', 'gentoo', 'nixos', 'unmanaged' - ]), - onboot=dict(type='bool'), - features=dict(type='list', elements='str'), - startup=dict(type='list', elements='str'), - storage=dict(default='local'), - cpuunits=dict(type='int'), + ostype=dict( + default="auto", + choices=[ + "auto", + "debian", + "devuan", + "ubuntu", + "centos", + "fedora", + "opensuse", + "archlinux", + "alpine", + "gentoo", + "nixos", + "unmanaged", + ], + ), + onboot=dict(type="bool"), + features=dict(type="list", elements="str"), + startup=dict(type="list", elements="str"), + storage=dict(default="local"), + cpuunits=dict(type="int"), nameserver=dict(), searchdomain=dict(), - timeout=dict(type='int', default=30), - update=dict(type='bool', default=False), - force=dict(type='bool', default=False), - purge=dict(type='bool', default=False), - state=dict(default='present', choices=['present', 'absent', 'stopped', 'started', 'restarted', 'template']), - pubkey=dict(type='str'), - unprivileged=dict(type='bool', default=True), - description=dict(type='str'), - hookscript=dict(type='str'), - timezone=dict(type='str'), - clone=dict(type='int'), - clone_type=dict(default='opportunistic', choices=['full', 'linked', 'opportunistic']), - tags=dict(type='list', elements='str') + timeout=dict(type="int", default=30), + update=dict(type="bool"), + force=dict(type="bool", default=False), + purge=dict(type="bool", default=False), + state=dict( + default="present", + choices=[ + "present", + "absent", + "stopped", + "started", + "restarted", + "template", + ], + ), + pubkey=dict(type="str"), + unprivileged=dict(type="bool", default=True), + description=dict(type="str"), + hookscript=dict(type="str"), + timezone=dict(type="str"), + clone=dict(type="int"), + clone_type=dict( + default="opportunistic", choices=["full", "linked", "opportunistic"] + ), + tags=dict(type="list", elements="str"), ) - module_args.update(proxmox_args) - module = AnsibleModule( + +def get_ansible_module(): + module_args = proxmox_auth_argument_spec() + module_args.update(get_proxmox_args()) + + return AnsibleModule( argument_spec=module_args, required_if=[ - ('state', 'present', ['node', 'hostname']), - # Require one of clone, ostemplate, or update. Together with mutually_exclusive this ensures that we - # either clone a container or create a new one from a template file. - ('state', 'present', ('clone', 'ostemplate', 'update'), True), + ("state", "present", ["node", "hostname"]), + # Require one of clone, ostemplate, or update. + # Together with mutually_exclusive this ensures that we either + # clone a container or create a new one from a template file. + ("state", "present", ("clone", "ostemplate", "update"), True), ], required_together=[("api_token_id", "api_token_secret")], - required_one_of=[("api_password", "api_token_id")], + required_one_of=[ + ("api_password", "api_token_id"), + ("vmid", "hostname"), + ], mutually_exclusive=[ - ( - "clone", - "ostemplate", - "update", - ), # Creating a new container is done either by cloning an existing one, or based on a template. - ("disk", "disk_volume", "storage"), + # Creating a new container is done either by cloning an existing one, or based on a template. + ("clone", "ostemplate", "update"), + ("disk", "disk_volume"), + ("storage", "disk_volume"), ("mounts", "mount_volumes"), ], ) + +class ProxmoxLxcAnsible(ProxmoxAnsible): + MINIMUM_VERSIONS = { + "disk_volume": "5.0", + "mount_volumes": "5.0", + "tags": "6.1", + "timezone": "6.3", + } + + def __init__(self, module): + super(ProxmoxLxcAnsible, self).__init__(module) + + self.VZ_TYPE = "openvz" if self.version() < LooseVersion("4.0") else "lxc" + self.params = self.module.params + + def run(self): + self.check_supported_features() + + state = self.params.get("state") + + vmid = self.params.get("vmid") + hostname = self.params.get("hostname") + + if not vmid and not hostname: + self.module.fail_json(msg="Either VMID or hostname must be provided.") + + if state == "present": + self.lxc_present( + vmid, + hostname, + node=self.params.get("node"), + update=self.params.get("update"), + force=self.params.get("force"), + ) + elif state == "absent": + self.lxc_absent( + vmid, + hostname, + node=self.params.get("node"), + timeout=self.params.get("timeout"), + purge=self.params.get("purge"), + ) + elif state == "started": + self.lxc_started( + vmid, + hostname, + node=self.params.get("node"), + timeout=self.params.get("timeout"), + ) + elif state == "stopped": + self.lxc_stopped( + vmid, + hostname, + node=self.params.get("node"), + timeout=self.params.get("timeout"), + force=self.params.get("force"), + ) + elif state == "restarted": + self.lxc_restarted( + vmid, + hostname, + node=self.params.get("node"), + timeout=self.params.get("timeout"), + force=self.params.get("force"), + ) + elif state == "template": + self.lxc_to_template( + vmid, + hostname, + node=self.params.get("node"), + timeout=self.params.get("timeout"), + force=self.params.get("force"), + ) + + def lxc_present(self, vmid, hostname, node, update, force): + try: + lxc = self.get_lxc_resource(vmid, hostname) + vmid = vmid or lxc["id"].split("/")[-1] + node = node or lxc["node"] + except LookupError: + lxc = None + vmid = vmid or self.get_nextvmid() + + if node is None: + raise ValueError( + "Argument 'node' is None, but should be found from VMID/hostname or provided." + ) + + # check if the container exists already + if lxc is not None: + if update is None: + # TODO: Remove deprecation warning in version 11.0.0 + self.module.deprecate( + msg="The default value of false for 'update' has been deprecated and will be changed to true in version 11.0.0.", + version="11.0.0", + collection_name="community.general", + ) + update = False + + if update: + # Update it if we should + identifier = self.format_vm_identifier(vmid, hostname) + self.update_lxc_instance( + vmid, + node, + cores=self.params.get("cores"), + cpus=self.params.get("cpus"), + cpuunits=self.params.get("cpuunits"), + description=self.params.get("description"), + disk=self.params.get("disk"), + disk_volume=self.params.get("disk_volume"), + features=self.params.get("features"), + hookscript=self.params.get("hookscript"), + hostname=self.params.get("hostname"), + ip_address=self.params.get("ip_address"), + memory=self.params.get("memory"), + mounts=self.params.get("mounts"), + mount_volumes=self.params.get("mount_volumes"), + nameserver=self.params.get("nameserver"), + netif=self.params.get("netif"), + onboot=ansible_to_proxmox_bool(self.params.get("onboot")), + searchdomain=self.params.get("searchdomain"), + startup=self.params.get("startup"), + swap=self.params.get("swap"), + tags=self.params.get("tags"), + timezone=self.params.get("timezone"), + ) + self.module.exit_json( + changed=True, vmid=vmid, msg="VM %s has been updated." % identifier + ) + elif not force: + # We're done if it shouldn't be forcefully created + identifier = self.format_vm_identifier(vmid, lxc["name"]) + self.module.exit_json( + changed=False, vmid=vmid, msg="VM %s already exists." % identifier + ) + self.module.debug( + "VM %s already exists, but we don't update and instead forcefully recreate it." + % identifier + ) + + self.new_lxc_instance( + vmid, + hostname, + node=self.params.get("node"), + clone_from=self.params.get("clone"), + ostemplate=self.params.get("ostemplate"), + force=force, + ) + + def lxc_absent(self, vmid, hostname, node, timeout, purge): + try: + lxc = self.get_lxc_resource(vmid, hostname) + except LookupError: + identifier = self.format_vm_identifier(vmid, hostname) + self.module.exit_json( + changed=False, vmid=vmid, msg="VM %s is already absent." % (identifier) + ) + + vmid = vmid or lxc["id"].split("/")[-1] + node = node or lxc["node"] + + lxc_status = self.get_lxc_status(vmid, node) + identifier = self.format_vm_identifier(vmid, hostname) + + if lxc_status == "running": + self.module.exit_json( + changed=False, + vmid=vmid, + msg="VM %s is running. Stop it before deletion." % identifier, + ) + if lxc_status == "mounted": + self.module.exit_json( + changed=False, + vmid=vmid, + msg="VM %s is mounted. Stop it with force option before deletion." + % identifier, + ) + + self.remove_lxc_instance(vmid, node, timeout, purge) + self.module.exit_json( + changed=True, vmid=vmid, msg="VM %s removed." % identifier + ) + + def lxc_started(self, vmid, hostname, node, timeout): + lxc = self.get_lxc_resource(vmid, hostname) + vmid = vmid or lxc["id"].split("/")[-1] + hostname = hostname or lxc["name"] + identifier = self.format_vm_identifier(vmid, hostname) + node = node or lxc["node"] + lxc_status = self.get_lxc_status(vmid, lxc["node"]) + + if lxc_status == "running": + self.module.exit_json( + changed=False, vmid=vmid, msg="VM %s is already running." % identifier + ) + + self.start_lxc_instance(vmid, node, timeout) + self.module.exit_json( + changed=True, vmid=vmid, msg="VM %s started." % identifier + ) + + def lxc_stopped(self, vmid, hostname, node, timeout, force): + lxc = self.get_lxc_resource(vmid, hostname) + vmid = vmid or lxc["id"].split("/")[-1] + hostname = hostname or lxc["name"] + identifier = self.format_vm_identifier(vmid, hostname) + node = node or lxc["node"] + lxc_status = self.get_lxc_status(vmid, node) + + if lxc_status == "mounted": + if force: + self.umount_lxc_instance(vmid, hostname, timeout) + else: + self.module.exit_json( + changed=False, + vmid=vmid, + msg="VM %s is already stopped, but mounted. Use force option to umount it." + % identifier, + ) + + if lxc_status == "stopped": + self.module.exit_json( + changed=False, vmid=vmid, msg="VM %s is already stopped." % identifier + ) + + self.stop_lxc_instance(vmid, node, timeout, force) + self.module.exit_json( + changed=True, vmid=vmid, msg="VM %s stopped." % identifier + ) + + def lxc_restarted(self, vmid, hostname, node, timeout, force): + lxc = self.get_lxc_resource(vmid, hostname) + + vmid = vmid or lxc["id"].split("/")[-1] + hostname = hostname or lxc["name"] + node = node or lxc["node"] + + identifier = self.format_vm_identifier(vmid, hostname) + lxc_status = self.get_lxc_status(vmid, node) + + if lxc_status in ["stopped", "mounted"]: + self.module.exit_json( + changed=False, vmid=vmid, msg="VM %s is not running." % identifier + ) + + self.stop_lxc_instance(vmid, node, timeout, force) + self.start_lxc_instance(vmid, node, timeout) + self.module.exit_json( + changed=True, vmid=vmid, msg="VM %s is restarted." % identifier + ) + + def lxc_to_template(self, vmid, hostname, node, timeout, force): + lxc = self.get_lxc_resource(vmid, hostname) + vmid = vmid or lxc["id"].split("/")[-1] + hostname = hostname or lxc["name"] + node = node or lxc["node"] + identifier = self.format_vm_identifier(vmid, hostname) + + if self.is_template_container(node, vmid): + self.module.exit_json( + changed=False, + vmid=vmid, + msg="VM %s is already a template." % identifier, + ) + + lxc_status = self.get_lxc_status(vmid, node) + if lxc_status == "running" and force: + self.stop_instance(vmid, hostname, node, timeout, force) + + proxmox_node = self.proxmox_api.nodes(node) + getattr(proxmox_node, self.VZ_TYPE)(vmid).template.post() + self.module.exit_json( + changed=True, vmid=vmid, msg="VM %s converted to template." % identifier + ) + + def update_lxc_instance(self, vmid, node, **kwargs): + if self.VZ_TYPE != "lxc": + self.module.fail_json( + msg="Updating LXC containers is only supported for LXC-enabled clusters in PVE 4.0 and above." + ) + + kwargs = {k: v for k, v in kwargs.items() if v is not None} + + self.validate_tags(kwargs.get("tags", [])) + + if "features" in kwargs: + kwargs["features"] = ",".join(kwargs.pop("features")) + if "startup" in kwargs: + kwargs["startup"] = ",".join(kwargs.pop("startup")) + + disk_updates = self.process_disk_keys( + vmid, + node, + kwargs.pop("disk", None), + kwargs.pop("disk_volume", None), + ) + mounts_updates = self.process_mount_keys( + vmid, + node, + kwargs.pop("mounts", None), + kwargs.pop("mount_volumes", None), + ) + kwargs.update(disk_updates) + kwargs.update(mounts_updates) + + if "cpus" in kwargs: + kwargs["cpulimit"] = kwargs.pop("cpus") + if "netif" in kwargs: + kwargs.update(kwargs.pop("netif")) + + if "pubkey" in kwargs: + pubkey = kwargs.pop("pubkey") + if self.version() >= LooseVersion("4.2"): + kwargs["ssh-public-keys"] = pubkey + else: + self.module.warn( + "'pubkey' is not supported for PVE 4.1 and below. Ignoring keyword." + ) + + # fetch current config + proxmox_node = self.proxmox_api.nodes(node) + current_config = getattr(proxmox_node, self.VZ_TYPE)(vmid).config.get() + + # create diff between the current and requested config + diff = {} + for arg, value in kwargs.items(): + # if the arg isn't in the current config, it needs to be added + if arg not in current_config: + diff[arg] = value + elif isinstance(value, str): + # compare all string values as lists as some of them may be lists separated by commas. order doesn't matter + current_values = current_config[arg].split(",") + requested_values = value.split(",") + for new_value in requested_values: + if new_value not in current_values: + diff[arg] = value + break + # if it's not a list (or string) just compare the values + # some types don't match with the API, so force a string comparison + elif str(value) != str(current_config[arg]): + diff[arg] = value + + if not diff: + self.module.exit_json( + changed=False, vmid=vmid, msg="Container config is already up to date." + ) + + # update the config + getattr(proxmox_node, self.VZ_TYPE)(vmid).config.put( + vmid=vmid, node=node, **kwargs + ) + + def new_lxc_instance(self, vmid, hostname, node, clone_from, ostemplate, force): + identifier = self.format_vm_identifier(vmid, hostname) + + if clone_from is not None: + self.clone_lxc_instance( + vmid, + node, + clone_from, + clone_type=self.params.get("clone_type"), + timeout=self.params.get("timeout"), + description=self.params.get("description"), + hostname=hostname, + pool=self.params.get("pool"), + storage=self.params.get("storage"), + ) + self.module.exit_json( + changed=True, + vmid=vmid, + msg="Cloned VM %s from %d" % (identifier, clone_from), + ) + + if ostemplate is not None: + self.create_lxc_instance( + vmid, + node, + ostemplate, + timeout=self.params.get("timeout"), + cores=self.params.get("cores"), + cpus=self.params.get("cpus"), + cpuunits=self.params.get("cpuunits"), + description=self.params.get("description"), + disk=self.params.get("disk"), + disk_volume=self.params.get("disk_volume"), + features=self.params.get("features"), + force=ansible_to_proxmox_bool(force), + hookscript=self.params.get("hookscript"), + hostname=hostname, + ip_address=self.params.get("ip_address"), + memory=self.params.get("memory"), + mounts=self.params.get("mounts"), + mount_volumes=self.params.get("mount_volumes"), + nameserver=self.params.get("nameserver"), + netif=self.params.get("netif"), + onboot=ansible_to_proxmox_bool(self.params.get("onboot")), + ostype=self.params.get("ostype"), + password=self.params.get("password"), + pool=self.params.get("pool"), + pubkey=self.params.get("pubkey"), + searchdomain=self.params.get("searchdomain"), + startup=self.params.get("startup"), + storage=self.params.get("storage"), + swap=self.params.get("swap"), + tags=self.params.get("tags"), + timezone=self.params.get("timezone"), + unprivileged=ansible_to_proxmox_bool(self.params.get("unprivileged")), + ) + self.module.exit_json( + changed=True, + vmid=vmid, + msg="Created VM %s from template %s" % (identifier, ostemplate), + ) + + self.module.fail_json( + vmid=vmid, + msg="VM %s does not exist but neither clone nor ostemplate were specified!" + % identifier, + ) + + def create_lxc_instance(self, vmid, node, ostemplate, timeout, **kwargs): + template_store = ostemplate.split(":")[0] + if not self.content_check(node, ostemplate, template_store): + self.module.fail_json( + vmid=vmid, + msg="ostemplate %s does not exist on node %s and storage %s." + % (ostemplate, node, template_store), + ) + + disk_updates = self.process_disk_keys( + vmid, + node, + kwargs.pop("disk"), + kwargs.pop("disk_volume"), + ) + mounts_updates = self.process_mount_keys( + vmid, + node, + kwargs.pop("mounts"), + kwargs.pop("mount_volumes"), + ) + kwargs.update(disk_updates) + kwargs.update(mounts_updates) + + # Remove empty values from kwargs + kwargs = {k: v for k, v in kwargs.items() if v is not None} + + if "features" in kwargs: + kwargs["features"] = ",".join(kwargs.pop("features")) + + if "startup" in kwargs: + kwargs["startup"] = ",".join(kwargs.pop("startup")) + + self.validate_tags(kwargs.get("tags", [])) + + if self.VZ_TYPE == "lxc": + if "cpus" in kwargs: + kwargs["cpuunits"] = kwargs.pop("cpus") + kwargs.update(kwargs.pop("netif", {})) + else: + if "mount_volumes" in kwargs: + kwargs.pop("mount_volumes") + self.module.warn( + "'mount_volumes' is not supported for non-LXC clusters. Ignoring keyword." + ) + + if "pubkey" in kwargs: + pubkey = kwargs.pop("pubkey") + if self.version() >= LooseVersion("4.2"): + kwargs["ssh-public-keys"] = pubkey + else: + self.module.warn( + "'pubkey' is not supported for PVE 4.1 and below. Ignoring keyword." + ) + + if kwargs.get("ostype") == "auto": + kwargs.pop("ostype") + + proxmox_node = self.proxmox_api.nodes(node) + taskid = getattr(proxmox_node, self.VZ_TYPE).create( + vmid=vmid, ostemplate=ostemplate, **kwargs + ) + self.handle_api_timeout( + vmid, + node, + taskid, + timeout, + "Reached timeout while waiting for creation of VM %s from template %s" + % (vmid, ostemplate), + ) + + def clone_lxc_instance(self, vmid, node, clone_from, clone_type, timeout, **kwargs): + if self.VZ_TYPE != "lxc": + self.module.fail_json( + msg="Cloning is only supported for LXC-enabled clusters in PVE 4.0 and above." + ) + + # Remove empty values from kwargs + kwargs = {k: v for k, v in kwargs.items() if v is not None} + + target_is_template = self.is_template_container(node, clone_from) + # By default, create a full copy only when the cloned container is not a template. + create_full_copy = not target_is_template + + # Only accept parameters that are compatible with the clone endpoint. + valid_clone_parameters = ["hostname", "pool", "description"] + + if "storage" not in kwargs and target_is_template: + # Cloning a template, so create a full copy instead of a linked copy + create_full_copy = True + elif "storage" not in kwargs and not target_is_template: + self.module.fail_json( + changed=False, + msg="Clone target container is not a template, storage needs to be specified.", + ) + + if clone_type == "linked" and not target_is_template: + self.module.fail_json( + changed=False, + msg="Cloning type 'linked' is only supported for template containers.", + ) + elif clone_type == "opportunistic" and not target_is_template: + # Cloned container is not a template, so we need our 'storage' parameter + valid_clone_parameters.append("storage") + elif clone_type == "full": + create_full_copy = True + valid_clone_parameters.append("storage") + + clone_parameters = {} + clone_parameters["full"] = ansible_to_proxmox_bool(create_full_copy) + + for param in valid_clone_parameters: + if param in kwargs: + clone_parameters[param] = kwargs[param] + + proxmox_node = self.proxmox_api.nodes(node) + taskid = getattr(proxmox_node, self.VZ_TYPE)(clone_from).clone.post( + newid=vmid, **clone_parameters + ) + self.handle_api_timeout( + vmid, + node, + taskid, + timeout, + timeout_msg="Reached timeout while waiting for VM to clone.", + ) + + def start_lxc_instance(self, vmid, node, timeout): + proxmox_node = self.proxmox_api.nodes(node) + taskid = getattr(proxmox_node, self.VZ_TYPE)(vmid).status.start.post() + + self.handle_api_timeout( + vmid, + node, + taskid, + timeout, + timeout_msg="Reached timeout while waiting for VM to start.", + ) + + def stop_lxc_instance(self, vmid, node, timeout, force): + stop_params = {} + if force: + stop_params["forceStop"] = 1 + + proxmox_node = self.proxmox_api.nodes(node) + taskid = getattr(proxmox_node, self.VZ_TYPE)(vmid).status.shutdown.post( + **stop_params + ) + + self.handle_api_timeout( + vmid, + node, + taskid, + timeout, + timeout_msg="Reached timeout while waiting for VM to stop.", + ) + + def umount_lxc_instance(self, vmid, node, timeout): + proxmox_node = self.proxmox_api.nodes(node) + taskid = getattr(proxmox_node, self.VZ_TYPE)(vmid).status.unmount.post() + + self.handle_api_timeout( + vmid, + node, + taskid, + timeout, + timeout_msg="Reached timeout while waiting for VM to be unmounted.", + ) + + def remove_lxc_instance(self, vmid, node, timeout, purge): + delete_params = {} + if purge: + delete_params["purge"] = 1 + + proxmox_node = self.proxmox_api.nodes(node) + taskid = getattr(proxmox_node, self.VZ_TYPE).delete(vmid, **delete_params) + + self.handle_api_timeout( + vmid, + node, + taskid, + timeout, + timeout_msg="Reached timeout while waiting for VM to be removed.", + ) + + def process_disk_keys(self, vmid, node, disk, disk_volume): + """ + Process disk keys and return a formatted disk volume with the `rootfs` key. + + Args: + vmid (int): VM identifier. + node (str): Node identifier. + disk (str, optional): Disk key in the format 'storage:volume'. Defaults to None. + disk_volume (Dict[str, Any], optional): Disk volume data. Defaults to None. + + Returns: + Dict[str, str]: Formatted disk volume with the `rootfs` or `disk` key (depending on the `VZ_TYPE`), or an empty dict if no disk volume is specified. + """ + if disk is None and disk_volume is None: + return {} + + disk_dict = {} + + if disk is not None: + if disk.isdigit(): + disk_dict["rootfs"] = disk + else: + disk_volume = self.parse_disk_string(disk) + + if disk_volume is not None: + disk_dict = self.build_volume(vmid, node, key="rootfs", **disk_volume) + + if self.VZ_TYPE != "lxc": + disk_dict["disk"] = disk_dict.pop("rootfs") + + return disk_dict + + def process_mount_keys(self, vmid, node, mounts, mount_volumes): + """ + Process mount keys and return a formatted mount volumes with the `mp[n]` keys. + + Args: + vmid (str): VM identifier. + node (str): Node identifier. + mounts (str, optional): Mount key in the format 'pool:volume'. Defaults to None. + mount_volumes (Dict[str, Any], optional): Mount volume data. Defaults to None. + + Returns: + Dict[str, str]: Formatted mount volumes with the `mp[n]` keys, or an empty dict if no mount volumes are specified. + """ + if mounts is not None: + mount_volumes = [] + for mount_key, mount_string in mounts.items(): + mount_config = self.parse_disk_string(mount_string) + mount_volumes.append(dict(id=mount_key, **mount_config)) + elif mount_volumes is None or mount_volumes == []: + return {} + + mounts_dict = {} + for mount_config in mount_volumes: + mount_key = mount_config.pop("id") + mount_dict = self.build_volume(vmid, node, key=mount_key, **mount_config) + mounts_dict.update(mount_dict) + + return mounts_dict + + def parse_disk_string(self, disk_string): + """ + Parse a disk string and return a dictionary with the disk details. + + Args: + disk_string (str): Disk string. + + Returns: + Dict[str, Any]: Disk details. + + Note: Below are some example disk strings that this function MUST be able to parse: + "acl=0,thin1:base-100-disk-1,size=8G" + "thin1:10,backup=0" + "local:20" + "local-lvm:0.50" + "tmp-dir:300/subvol-300-disk-0.subvol,acl=1,size=0T" + "tmplog-dir:300/vm-300-disk-0.raw,mp=/var/log,mountoptions=noatime,size=32M" + "volume=local-lvm:base-100-disk-1,size=20G" + "/mnt/bindmounts/shared,mp=/shared" + "volume=/dev/USB01,mp=/mnt/usb01" + """ + args = disk_string.split(",") + # If the volume is not explicitly defined but implicit by only passing a key, + # add the "volume=" key prefix for ease of parsing. + args = ["volume=" + arg if "=" not in arg else arg for arg in args] + # Then create a dictionary from the arguments + disk_kwargs = dict(map(lambda item: item.split("="), args)) + + VOLUME_PATTERN = r"""(?x) + ^ + (?: + (?: + (?P[\w\-.]+): + (?: + (?P\d+\.?\d*)| + (?P[^,\s]+) + ) + )| + (?P[^,\s]+) + ) + $ + """ + # DISCLAIMER: + # There are two things called a "volume": + # 1. The "volume" key which describes the storage volume, device or directory to mount into the container. + # 2. The storage volume of a storage-backed mount point in the PVE storage sub system. + # In this section, we parse the "volume" key and check which type of mount point we are dealing with. + pattern = re.compile(VOLUME_PATTERN) + volume_string = disk_kwargs.pop("volume") + match = pattern.match(volume_string) + if match is None: + raise ValueError(("Invalid volume string: %s", volume_string)) + match_dict = match.groupdict() + match_dict = {k: v for k, v in match_dict.items() if v is not None} + + if "storage" in match_dict and "volume" in match_dict: + disk_kwargs["storage"] = match_dict["storage"] + disk_kwargs["volume"] = match_dict["volume"] + elif "storage" in match_dict and "size" in match_dict: + disk_kwargs["storage"] = match_dict["storage"] + disk_kwargs["size"] = match_dict["size"] + elif "host_path" in match_dict: + disk_kwargs["host_path"] = match_dict["host_path"] + + # Pattern matching only available in Python 3.10+ + # TODO: Uncomment the following code once only Python 3.10+ is supported + # match match_dict: + # case {"storage": storage, "volume": volume}: + # disk_kwargs["storage"] = storage + # disk_kwargs["volume"] = volume + + # case {"storage": storage, "size": size}: + # disk_kwargs["storage"] = storage + # disk_kwargs["size"] = size + + # case {"host_path": host_path}: + # disk_kwargs["host_path"] = host_path + + return disk_kwargs + + def build_volume(self, vmid, node, key, storage=None, volume=None, host_path=None, size=None, mountpoint=None, options=None, **kwargs): + """ + Build a volume string for the specified VM. + + Args: + vmid (str): The VM ID. + node (str): The node where the VM resides. + key (str): The key for the volume in the VM's config. + storage (str, optional): The storage pool where the volume resides. Defaults to None. + volume (str, optional): The name of the volume. Defaults to None. + host_path (str, optional): The host path to mount. Defaults to None. + size (str | int, optional): The size of the volume in GiB. Defaults to None. + mountpoint (str, optional): The mountpoint for the volume. Defaults to None. + options (Dict[str, Any], optional): Additional options for the volume. Defaults to None. + **kwargs: Additional keyword arguments. + + Returns: + Dict[str, str]: The built volume string in the format {'volume_key': 'volume_string'}. + + Note: Further documentation can be found in the proxmox-api documentation: https://pve.proxmox.com/wiki/Linux_Container#pct_mount_points + Note: To build a valid volume string, we need ONE of the following: + A volume name, storage name, and size + Only a storage name and size (to create a new volume or assign the volume automatically) + A host directory to mount into the container + """ + if isinstance(size, int): + size = str(size) + if size is not None and isfloat(size): + size += "G" # default to GiB + # Handle volume checks/creation + # TODO: Change the code below to pattern matching once only Python 3.10+ is supported + # 1. Check if defined volume exists + if volume is not None: + storage_content = self.get_storage_content(node, storage, vmid=vmid) + vol_ids = [vol["volid"] for vol in storage_content] + volid = "{storage}:{volume}".format(storage=storage, volume=volume) + if volid not in vol_ids: + self.module.fail_json( + changed=False, + msg="Storage {storage} does not contain volume {volume}".format( + storage=storage, + volume=volume, + ), + ) + vol_string = "{storage}:{volume},size={size}".format( + storage=storage, volume=volume, size=size + ) + # 2. If volume not defined (but storage is), check if it exists + elif storage is not None: + proxmox_node = self.proxmox_api.nodes( + node + ) # The node must exist, but not the LXC + try: + vol = proxmox_node.lxc(vmid).get("config").get(key) + volume = self.parse_disk_string(vol).get("volume") + vol_string = "{storage}:{volume},size={size}".format( + storage=storage, volume=volume, size=size + ) + + # If not, we have proxmox create one using the special syntax + except Exception: + if size is None: + raise ValueError( + "Size must be provided for storage-backed volume creation." + ) + elif size.endswith("G"): + size = size.rstrip("G") + vol_string = "{storage}:{size}".format(storage=storage, size=size) + else: + raise ValueError( + "Size must be provided in GiB for storage-backed volume creation. Convert it to GiB or allocate a new storage manually." + ) + # 3. If we have a host_path, we don't have storage, a volume, or a size + # Then we don't have to do anything, just build and return the vol_string + elif host_path is not None: + vol_string = "" + else: + raise ValueError( + "Could not build a valid volume string. One of volume, storage, or host_path must be provided." + ) + + if host_path is not None: + vol_string += "," + host_path + + if mountpoint is not None: + vol_string += ",mp={}".format(mountpoint) + + if options is not None: + vol_string += "," + ",".join( + ["{0}={1}".format(k, v) for k, v in options.items()] + ) + + if kwargs: + vol_string += "," + ",".join( + ["{0}={1}".format(k, v) for k, v in kwargs.items()] + ) + return {key: vol_string} + + def get_lxc_resource(self, vmid, hostname): + if not vmid and not hostname: + self.module.fail_json(msg="Either VMID or hostname must be provided.") + + if vmid: + vm = self.get_lxc_resource_by_id(vmid) + elif hostname: + vm = self.get_lxc_resource_by_hostname(hostname) + + vmid = vm["vmid"] + if vm["type"] != self.VZ_TYPE: + identifier = self.format_vm_identifier(vmid, hostname) + self.module.fail_json( + msg="The specified VM %s is not an %s." % (identifier, self.VZ_TYPE) + ) + + return vm + + def get_lxc_resource_by_id(self, vmid): + vms = self.get_vm_resources() + + vms = [vm for vm in vms if vm["vmid"] == vmid] + if len(vms) == 0: + raise LookupError("VM with VMID %d does not exist in cluster." % vmid) + + return vms[0] + + def get_lxc_resource_by_hostname(self, hostname): + vms = self.get_vm_resources() + + vms = [vm for vm in vms if vm["name"] == hostname] + if len(vms) == 0: + raise LookupError( + "VM with hostname %s does not exist in cluster." % hostname + ) + elif len(vms) > 1: + raise ValueError( + "Multiple VMs found with hostname %s. Please specify VMID." % hostname + ) + + return vms[0] + + def get_vm_resources(self): + try: + return self.proxmox_api.cluster.resources.get(type="vm") + except Exception as e: + self.module.fail_json( + msg="Unable to retrieve list of %s VMs from cluster resources: %s" + % (self.VZ_TYPE, e) + ) + + def get_lxc_status(self, vmid, node_name): + try: + proxmox_node = self.proxmox_api.nodes(node_name) + except Exception as e: + self.module.fail_json(msg="Unable to retrieve node information: %s" % e) + return getattr(proxmox_node, self.VZ_TYPE)(vmid).status.current.get() + + def format_vm_identifier(self, vmid, hostname): + if vmid and hostname: + return "%s (%s)" % (hostname, vmid) + elif hostname: + return hostname + else: + return to_native(vmid) + + def handle_api_timeout(self, vmid, node, taskid, timeout, timeout_msg=""): + if timeout_msg != "": + timeout_msg = "%s " % timeout_msg + + while timeout > 0: + if self.api_task_ok(node, taskid): + return + timeout -= 1 + time.sleep(1) + + self.module.fail_json( + vmid=vmid, + taskid=taskid, + msg="%sLast line in task before timeout: %s" + % (timeout_msg, self.proxmox_api.nodes(node).tasks(taskid).log.get()[:1]), + ) + + def is_template_container(self, node, target): + """Check if the specified container is a template.""" + proxmox_node = self.proxmox_api.nodes(node) + config = getattr(proxmox_node, self.VZ_TYPE)(target).config.get() + return config.get("template", False) + + def content_check(self, node, ostemplate, template_store): + """Check if the specified ostemplate is present in the specified storage.""" + proxmox_node = self.proxmox_api.nodes(node) + storage_contents = proxmox_node.storage(template_store).content.get() + return any(content["volid"] == ostemplate for content in storage_contents) + + def validate_tags(self, tags): + """Check if the specified tags are valid.""" + re_tag = re.compile(r"^[a-z0-9_][a-z0-9_\-\+\.]*$") + for tag in tags: + if not re_tag.match(tag): + self.module.fail_json(msg="%s is not a valid tag" % tag) + return False + return True + + def check_supported_features(self): + for option, version in self.MINIMUM_VERSIONS.items(): + if self.version() < LooseVersion(version) and option in self.module.params: + self.module.fail_json( + changed=False, + msg="Feature {option} is only supported in PVE {version}+, and you're using PVE {pve_version}".format( + option=option, version=version, pve_version=self.version() + ), + ) + + +def isfloat(value): + if value is None: + return False + try: + float(value) + return True + except ValueError: + return False + + +def main(): + module = get_ansible_module() proxmox = ProxmoxLxcAnsible(module) - global VZ_TYPE - VZ_TYPE = 'openvz' if proxmox.version() < LooseVersion('4.0') else 'lxc' - - state = module.params['state'] - vmid = module.params['vmid'] - node = module.params['node'] - disk = module.params['disk'] - cpus = module.params['cpus'] - memory = module.params['memory'] - swap = module.params['swap'] - storage = module.params['storage'] - hostname = module.params['hostname'] - if module.params['ostemplate'] is not None: - template_store = module.params['ostemplate'].split(":")[0] - timeout = module.params['timeout'] - clone = module.params['clone'] - - # If vmid not set get the Next VM id from ProxmoxAPI - # If hostname is set get the VM id from ProxmoxAPI - if not vmid and state == 'present': - vmid = proxmox.get_nextvmid() - elif not vmid and hostname: - vmid = proxmox.get_vmid(hostname) - elif not vmid: - module.exit_json(changed=False, msg="Vmid could not be fetched for the following action: %s" % state) - - # Create a new container - if state == 'present' and clone is None: - try: - if proxmox.get_vm(vmid, ignore_missing=True): - if module.params["update"]: - try: - proxmox.update_config(vmid, node, disk, cpus, memory, swap, - cores=module.params["cores"], - hostname=module.params["hostname"], - netif=module.params["netif"], - disk_volume=module.params["disk_volume"], - mounts=module.params["mounts"], - mount_volumes=module.params["mount_volumes"], - ip_address=module.params["ip_address"], - onboot=ansible_to_proxmox_bool(module.params["onboot"]), - cpuunits=module.params["cpuunits"], - nameserver=module.params["nameserver"], - searchdomain=module.params["searchdomain"], - features=",".join(module.params["features"]) - if module.params["features"] is not None - else None, - startup=",".join(module.params["startup"]) - if module.params["startup"] is not None - else None, - description=module.params["description"], - hookscript=module.params["hookscript"], - timezone=module.params["timezone"], - tags=module.params["tags"]) - module.exit_json( - changed=True, - vmid=vmid, - msg="Configured VM %s" % (vmid), - ) - except Exception as e: - module.fail_json( - vmid=vmid, - msg="Configuration of %s VM %s failed with exception: %s" - % (VZ_TYPE, vmid, e), - ) - if not module.params["force"]: - module.exit_json( - changed=False, - vmid=vmid, - msg="VM with vmid = %s is already exists" % vmid, - ) - # If no vmid was passed, there cannot be another VM named 'hostname' - if (not module.params['vmid'] and - proxmox.get_vmid(hostname, ignore_missing=True) and - not module.params['force']): - vmid = proxmox.get_vmid(hostname) - module.exit_json(changed=False, vmid=vmid, msg="VM with hostname %s already exists and has ID number %s" % (hostname, vmid)) - elif not proxmox.get_node(node): - module.fail_json(vmid=vmid, msg="node '%s' not exists in cluster" % node) - elif not proxmox.content_check(node, module.params['ostemplate'], template_store): - module.fail_json(vmid=vmid, msg="ostemplate '%s' not exists on node %s and storage %s" - % (module.params['ostemplate'], node, template_store)) - except Exception as e: - module.fail_json(vmid=vmid, msg="Pre-creation checks of {VZ_TYPE} VM {vmid} failed with exception: {e}".format(VZ_TYPE=VZ_TYPE, vmid=vmid, e=e)) - - try: - proxmox.create_instance(vmid, node, disk, storage, cpus, memory, swap, timeout, clone, - cores=module.params['cores'], - pool=module.params['pool'], - password=module.params['password'], - hostname=module.params['hostname'], - ostemplate=module.params['ostemplate'], - netif=module.params['netif'], - disk_volume=module.params["disk_volume"], - mounts=module.params['mounts'], - mount_volumes=module.params["mount_volumes"], - ostype=module.params['ostype'], - ip_address=module.params['ip_address'], - onboot=ansible_to_proxmox_bool(module.params['onboot']), - cpuunits=module.params['cpuunits'], - nameserver=module.params['nameserver'], - searchdomain=module.params['searchdomain'], - force=ansible_to_proxmox_bool(module.params['force']), - pubkey=module.params['pubkey'], - features=",".join(module.params['features']) if module.params['features'] is not None else None, - startup=",".join(module.params['startup']) if module.params['startup'] is not None else None, - unprivileged=ansible_to_proxmox_bool(module.params['unprivileged']), - description=module.params['description'], - hookscript=module.params['hookscript'], - timezone=module.params['timezone'], - tags=module.params['tags']) - - module.exit_json(changed=True, vmid=vmid, msg="Deployed VM %s from template %s" % (vmid, module.params['ostemplate'])) - except Exception as e: - module.fail_json(vmid=vmid, msg="Creation of %s VM %s failed with exception: %s" % (VZ_TYPE, vmid, e)) - - # Clone a container - elif state == 'present' and clone is not None: - try: - if proxmox.get_vm(vmid, ignore_missing=True) and not module.params['force']: - module.exit_json(changed=False, vmid=vmid, msg="VM with vmid = %s is already exists" % vmid) - # If no vmid was passed, there cannot be another VM named 'hostname' - if (not module.params['vmid'] and - proxmox.get_vmid(hostname, ignore_missing=True) and - not module.params['force']): - vmid = proxmox.get_vmid(hostname) - module.exit_json(changed=False, vmid=vmid, msg="VM with hostname %s already exists and has ID number %s" % (hostname, vmid)) - if not proxmox.get_vm(clone, ignore_missing=True): - module.exit_json(changed=False, vmid=vmid, msg="Container to be cloned does not exist") - except Exception as e: - module.fail_json(vmid=vmid, msg="Pre-clone checks of {VZ_TYPE} VM {vmid} failed with exception: {e}".format(VZ_TYPE=VZ_TYPE, vmid=vmid, e=e)) - - try: - proxmox.create_instance(vmid, node, disk, storage, cpus, memory, swap, timeout, clone) - - module.exit_json(changed=True, vmid=vmid, msg="Cloned VM %s from %s" % (vmid, clone)) - except Exception as e: - module.fail_json(vmid=vmid, msg="Cloning %s VM %s failed with exception: %s" % (VZ_TYPE, vmid, e)) - - elif state == 'started': - try: - vm = proxmox.get_vm(vmid) - if getattr(proxmox.proxmox_api.nodes(vm['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'running': - module.exit_json(changed=False, vmid=vmid, msg="VM %s is already running" % vmid) - - if proxmox.start_instance(vm, vmid, timeout): - module.exit_json(changed=True, vmid=vmid, msg="VM %s started" % vmid) - except Exception as e: - module.fail_json(vmid=vmid, msg="starting of VM %s failed with exception: %s" % (vmid, e)) - - elif state == 'stopped': - try: - vm = proxmox.get_vm(vmid) - - if getattr(proxmox.proxmox_api.nodes(vm['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'mounted': - if module.params['force']: - if proxmox.umount_instance(vm, vmid, timeout): - module.exit_json(changed=True, vmid=vmid, msg="VM %s is shutting down" % vmid) - else: - module.exit_json(changed=False, vmid=vmid, - msg=("VM %s is already shutdown, but mounted. You can use force option to umount it.") % vmid) - - if getattr(proxmox.proxmox_api.nodes(vm['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'stopped': - module.exit_json(changed=False, vmid=vmid, msg="VM %s is already shutdown" % vmid) - - if proxmox.stop_instance(vm, vmid, timeout, force=module.params['force']): - module.exit_json(changed=True, vmid=vmid, msg="VM %s is shutting down" % vmid) - except Exception as e: - module.fail_json(vmid=vmid, msg="stopping of VM %s failed with exception: %s" % (vmid, e)) - - elif state == 'template': - try: - vm = proxmox.get_vm(vmid) - - proxmox.convert_to_template(vm, vmid, timeout, force=module.params['force']) - module.exit_json(changed=True, msg="VM %s is converted to template" % vmid) - except Exception as e: - module.fail_json(vmid=vmid, msg="conversion of VM %s to template failed with exception: %s" % (vmid, e)) - - elif state == 'restarted': - try: - vm = proxmox.get_vm(vmid) - - vm_status = getattr(proxmox.proxmox_api.nodes(vm['node']), VZ_TYPE)(vmid).status.current.get()['status'] - if vm_status in ['stopped', 'mounted']: - module.exit_json(changed=False, vmid=vmid, msg="VM %s is not running" % vmid) - - if (proxmox.stop_instance(vm, vmid, timeout, force=module.params['force']) and - proxmox.start_instance(vm, vmid, timeout)): - module.exit_json(changed=True, vmid=vmid, msg="VM %s is restarted" % vmid) - except Exception as e: - module.fail_json(vmid=vmid, msg="restarting of VM %s failed with exception: %s" % (vmid, e)) - - elif state == 'absent': - if not vmid: - module.exit_json(changed=False, vmid=vmid, msg='VM with hostname = %s is already absent' % hostname) - try: - vm = proxmox.get_vm(vmid, ignore_missing=True) - if not vm: - module.exit_json(changed=False, vmid=vmid, msg="VM %s does not exist" % vmid) - - vm_status = getattr(proxmox.proxmox_api.nodes(vm['node']), VZ_TYPE)(vmid).status.current.get()['status'] - if vm_status == 'running': - module.exit_json(changed=False, vmid=vmid, msg="VM %s is running. Stop it before deletion." % vmid) - - if vm_status == 'mounted': - module.exit_json(changed=False, vmid=vmid, msg="VM %s is mounted. Stop it with force option before deletion." % vmid) - - delete_params = {} - - if module.params['purge']: - delete_params['purge'] = 1 - - taskid = getattr(proxmox.proxmox_api.nodes(vm['node']), VZ_TYPE).delete(vmid, **delete_params) - - while timeout: - if proxmox.api_task_ok(vm['node'], taskid): - module.exit_json(changed=True, vmid=vmid, taskid=taskid, msg="VM %s removed" % vmid) - timeout -= 1 - if timeout == 0: - module.fail_json(vmid=vmid, taskid=taskid, msg='Reached timeout while waiting for removing VM. Last line in task before timeout: %s' - % proxmox.proxmox_api.nodes(vm['node']).tasks(taskid).log.get()[:1]) - - time.sleep(1) - except Exception as e: - module.fail_json(vmid=vmid, msg="deletion of VM %s failed with exception: %s" % (vmid, to_native(e))) + try: + proxmox.run() + except Exception as e: + module.fail_json(msg="An error occurred: %s" % to_native(e)) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/plugins/modules/proxmox_backup_info.py b/plugins/modules/proxmox_backup_info.py new file mode 100644 index 0000000000..0889239b37 --- /dev/null +++ b/plugins/modules/proxmox_backup_info.py @@ -0,0 +1,244 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- +# +# Copyright (c) 2024 Marzieh Raoufnezhad +# Copyright (c) 2024 Maryam Mayabi +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + +DOCUMENTATION = """ +--- +module: proxmox_backup_info + +short_description: Retrieve information on Proxmox scheduled backups + +version_added: 10.3.0 + +description: + - Retrieve information such as backup times, VM name, VM ID, mode, backup type, and backup schedule using the Proxmox Server API. + +author: + - "Marzieh Raoufnezhad (@raoufnezhad) " + - "Maryam Mayabi (@mmayabi) " + +options: + vm_name: + description: + - The name of the Proxmox VM. + - If defined, the returned list will contain backup jobs that have been parsed and filtered based on O(vm_name) value. + - Mutually exclusive with O(vm_id) and O(backup_jobs). + type: str + vm_id: + description: + - The ID of the Proxmox VM. + - If defined, the returned list will contain backup jobs that have been parsed and filtered based on O(vm_id) value. + - Mutually exclusive with O(vm_name) and O(backup_jobs). + type: str + backup_jobs: + description: + - If V(true), the module will return all backup jobs information. + - If V(false), the module will parse all backup jobs based on VM IDs and return a list of VMs' backup information. + - Mutually exclusive with O(vm_id) and O(vm_name). + default: false + type: bool + +extends_documentation_fragment: + - community.general.proxmox.documentation + - community.general.attributes + - community.general.attributes.info_module + - community.general.proxmox.actiongroup_proxmox +""" + +EXAMPLES = """ +- name: Print all backup information by VM ID and VM name + community.general.proxmox_backup_info: + api_user: 'myUser@pam' + api_password: '*******' + api_host: '192.168.20.20' + +- name: Print Proxmox backup information for a specific VM based on its name + community.general.proxmox_backup_info: + api_user: 'myUser@pam' + api_password: '*******' + api_host: '192.168.20.20' + vm_name: 'mailsrv' + +- name: Print Proxmox backup information for a specific VM based on its VM ID + community.general.proxmox_backup_info: + api_user: 'myUser@pam' + api_password: '*******' + api_host: '192.168.20.20' + vm_id: '150' + +- name: Print Proxmox all backup job information + community.general.proxmox_backup_info: + api_user: 'myUser@pam' + api_password: '*******' + api_host: '192.168.20.20' + backup_jobs: true +""" + +RETURN = """ +--- +backup_info: + description: The return value provides backup job information based on VM ID or VM name, or total backup job information. + returned: on success, but can be empty + type: list + elements: dict + contains: + bktype: + description: The type of the backup. + returned: on success + type: str + sample: vzdump + enabled: + description: V(1) if backup is enabled else V(0). + returned: on success + type: int + sample: 1 + id: + description: The backup job ID. + returned: on success + type: str + sample: backup-83831498-c631 + mode: + description: The backup job mode such as snapshot. + returned: on success + type: str + sample: snapshot + next-run: + description: The next backup time. + returned: on success + type: str + sample: "2024-12-28 11:30:00" + schedule: + description: The backup job schedule. + returned: on success + type: str + sample: "sat 15:00" + storage: + description: The backup storage location. + returned: on success + type: str + sample: local + vm_name: + description: The VM name. + returned: on success + type: str + sample: test01 + vmid: + description: The VM ID. + returned: on success + type: str + sample: "100" +""" + +from datetime import datetime +from ansible.module_utils.basic import AnsibleModule, missing_required_lib +from ansible_collections.community.general.plugins.module_utils.proxmox import ( + proxmox_auth_argument_spec, ProxmoxAnsible, HAS_PROXMOXER, PROXMOXER_IMP_ERR) + + +class ProxmoxBackupInfoAnsible(ProxmoxAnsible): + + # Get all backup information + def get_jobs_list(self): + try: + backupJobs = self.proxmox_api.cluster.backup.get() + except Exception as e: + self.module.fail_json(msg="Getting backup jobs failed: %s" % e) + return backupJobs + + # Get VM information + def get_vms_list(self): + try: + vms = self.proxmox_api.cluster.resources.get(type='vm') + except Exception as e: + self.module.fail_json(msg="Getting VMs info from cluster failed: %s" % e) + return vms + + # Get all backup information by VM ID and VM name + def vms_backup_info(self): + backupList = self.get_jobs_list() + vmInfo = self.get_vms_list() + bkInfo = [] + for backupItem in backupList: + nextrun = datetime.fromtimestamp(backupItem['next-run']) + vmids = backupItem['vmid'].split(',') + for vmid in vmids: + for vm in vmInfo: + if vm['vmid'] == int(vmid): + vmName = vm['name'] + break + bkInfoData = {'id': backupItem['id'], + 'schedule': backupItem['schedule'], + 'storage': backupItem['storage'], + 'mode': backupItem['mode'], + 'next-run': nextrun.strftime("%Y-%m-%d %H:%M:%S"), + 'enabled': backupItem['enabled'], + 'bktype': backupItem['type'], + 'vmid': vmid, + 'vm_name': vmName} + bkInfo.append(bkInfoData) + return bkInfo + + # Get proxmox backup information for a specific VM based on its VM ID or VM name + def specific_vmbackup_info(self, vm_name_id): + fullBackupInfo = self.vms_backup_info() + vmBackupJobs = [] + for vm in fullBackupInfo: + if (vm["vm_name"] == vm_name_id or vm["vmid"] == vm_name_id): + vmBackupJobs.append(vm) + return vmBackupJobs + + +def main(): + # Define module args + args = proxmox_auth_argument_spec() + backup_info_args = dict( + vm_id=dict(type='str'), + vm_name=dict(type='str'), + backup_jobs=dict(type='bool', default=False) + ) + args.update(backup_info_args) + + module = AnsibleModule( + argument_spec=args, + mutually_exclusive=[('backup_jobs', 'vm_id', 'vm_name')], + supports_check_mode=True + ) + + # Define (init) result value + result = dict( + changed=False + ) + + # Check if proxmoxer exist + if not HAS_PROXMOXER: + module.fail_json(msg=missing_required_lib('proxmoxer'), exception=PROXMOXER_IMP_ERR) + + # Start to connect to proxmox to get backup data + proxmox = ProxmoxBackupInfoAnsible(module) + vm_id = module.params['vm_id'] + vm_name = module.params['vm_name'] + backup_jobs = module.params['backup_jobs'] + + # Update result value based on what requested (module args) + if backup_jobs: + result['backup_info'] = proxmox.get_jobs_list() + elif vm_id: + result['backup_info'] = proxmox.specific_vmbackup_info(vm_id) + elif vm_name: + result['backup_info'] = proxmox.specific_vmbackup_info(vm_name) + else: + result['backup_info'] = proxmox.vms_backup_info() + + # Return result value + module.exit_json(**result) + + +if __name__ == '__main__': + main() diff --git a/plugins/modules/proxmox_template.py b/plugins/modules/proxmox_template.py index c9987a4a70..0081171878 100644 --- a/plugins/modules/proxmox_template.py +++ b/plugins/modules/proxmox_template.py @@ -71,6 +71,21 @@ options: type: str choices: ['present', 'absent'] default: present + checksum_algorithm: + description: + - Algorithm used to verify the checksum. + - If specified, O(checksum) must also be specified. + type: str + choices: ['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'] + version_added: 10.3.0 + checksum: + description: + - The checksum to validate against. + - Checksums are often provided by software distributors to verify that a download is not corrupted. + - Checksums can usually be found on the distributors download page in the form of a file or string. + - If specified, O(checksum_algorithm) must also be specified. + type: str + version_added: 10.3.0 notes: - Requires C(proxmoxer) and C(requests) modules on host. Those modules can be installed with M(ansible.builtin.pip). - C(proxmoxer) >= 1.2.0 requires C(requests_toolbelt) to upload files larger than 256 MB. @@ -82,6 +97,7 @@ extends_documentation_fragment: """ EXAMPLES = r""" +--- - name: Upload new openvz template with minimal options community.general.proxmox_template: node: uk-mc02 @@ -147,6 +163,16 @@ EXAMPLES = r""" storage: local content_type: vztmpl template: ubuntu-20.04-standard_20.04-1_amd64.tar.gz + +- name: Download and verify a template's checksum + community.general.proxmox_template: + node: uk-mc02 + api_user: root@pam + api_password: 1q2w3e + api_host: node1 + url: ubuntu-20.04-standard_20.04-1_amd64.tar.gz + checksum_algorithm: sha256 + checksum: 65d860160bdc9b98abf72407e14ca40b609417de7939897d3b58d55787aaef69 """ import os @@ -156,7 +182,7 @@ import traceback from ansible.module_utils.basic import AnsibleModule, missing_required_lib from ansible_collections.community.general.plugins.module_utils.proxmox import (proxmox_auth_argument_spec, ProxmoxAnsible) from ansible_collections.community.general.plugins.module_utils.version import LooseVersion -from ansible.module_utils.six.moves.urllib.parse import urlparse +from ansible.module_utils.six.moves.urllib.parse import urlparse, urlencode REQUESTS_TOOLBELT_ERR = None try: @@ -183,6 +209,8 @@ class ProxmoxTemplateAnsible(ProxmoxAnsible): while timeout: if self.api_task_ok(node, taskid): return True + elif self.api_task_failed(node, taskid): + self.module.fail_json(msg="Task error: %s" % self.proxmox_api.nodes(node).tasks(taskid).status.get()['exitstatus']) timeout = timeout - 1 if timeout == 0: self.module.fail_json(msg='Reached timeout while waiting for uploading/downloading template. Last line in task before timeout: %s' % @@ -235,6 +263,21 @@ class ProxmoxTemplateAnsible(ProxmoxAnsible): time.sleep(1) return False + def fetch_and_verify(self, node, storage, url, content_type, timeout, checksum, checksum_algorithm): + """ Fetch a template from a web url, then verify it using a checksum. + """ + data = { + 'url': url, + 'content': content_type, + 'filename': os.path.basename(url), + 'checksum': checksum, + 'checksum-algorithm': checksum_algorithm} + try: + taskid = self.proxmox_api.nodes(node).storage(storage).post("download-url?{}".format(urlencode(data))) + return self.task_status(node, taskid, timeout) + except Exception as e: + self.module.fail_json(msg="Checksum mismatch: %s" % (e)) + def main(): module_args = proxmox_auth_argument_spec() @@ -248,12 +291,14 @@ def main(): timeout=dict(type='int', default=30), force=dict(type='bool', default=False), state=dict(default='present', choices=['present', 'absent']), + checksum_algorithm=dict(choices=['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']), + checksum=dict(type='str'), ) module_args.update(template_args) module = AnsibleModule( argument_spec=module_args, - required_together=[('api_token_id', 'api_token_secret')], + required_together=[('api_token_id', 'api_token_secret'), ('checksum', 'checksum_algorithm')], required_one_of=[('api_password', 'api_token_id')], required_if=[('state', 'absent', ['template'])], mutually_exclusive=[("src", "url")], @@ -265,6 +310,8 @@ def main(): node = module.params['node'] storage = module.params['storage'] timeout = module.params['timeout'] + checksum = module.params['checksum'] + checksum_algorithm = module.params['checksum_algorithm'] if state == 'present': content_type = module.params['content_type'] @@ -272,7 +319,7 @@ def main(): url = module.params['url'] # download appliance template - if content_type == 'vztmpl' and not (src or url) : + if content_type == 'vztmpl' and not (src or url): template = module.params['template'] if not template: @@ -303,6 +350,9 @@ def main(): elif not proxmox.delete_template(node, storage, content_type, template, timeout): module.fail_json(changed=False, msg='failed to delete template with volid=%s:%s/%s' % (storage, content_type, template)) + if checksum: + if proxmox.fetch_and_verify(node, storage, url, content_type, timeout, checksum, checksum_algorithm): + module.exit_json(changed=True, msg="Checksum verified, template with volid=%s:%s/%s uploaded" % (storage, content_type, template)) if proxmox.fetch_template(node, storage, content_type, url, timeout): module.exit_json(changed=True, msg='template with volid=%s:%s/%s uploaded' % (storage, content_type, template)) diff --git a/plugins/modules/pulp_repo.py b/plugins/modules/pulp_repo.py index ec571b0472..0af129d26a 100644 --- a/plugins/modules/pulp_repo.py +++ b/plugins/modules/pulp_repo.py @@ -583,29 +583,20 @@ def main(): if importer_ssl_ca_cert is not None: importer_ssl_ca_cert_file_path = os.path.abspath(importer_ssl_ca_cert) if os.path.isfile(importer_ssl_ca_cert_file_path): - importer_ssl_ca_cert_file_object = open(importer_ssl_ca_cert_file_path, 'r') - try: + with open(importer_ssl_ca_cert_file_path, 'r') as importer_ssl_ca_cert_file_object: importer_ssl_ca_cert = importer_ssl_ca_cert_file_object.read() - finally: - importer_ssl_ca_cert_file_object.close() if importer_ssl_client_cert is not None: importer_ssl_client_cert_file_path = os.path.abspath(importer_ssl_client_cert) if os.path.isfile(importer_ssl_client_cert_file_path): - importer_ssl_client_cert_file_object = open(importer_ssl_client_cert_file_path, 'r') - try: + with open(importer_ssl_client_cert_file_path, 'r') as importer_ssl_client_cert_file_object: importer_ssl_client_cert = importer_ssl_client_cert_file_object.read() - finally: - importer_ssl_client_cert_file_object.close() if importer_ssl_client_key is not None: importer_ssl_client_key_file_path = os.path.abspath(importer_ssl_client_key) if os.path.isfile(importer_ssl_client_key_file_path): - importer_ssl_client_key_file_object = open(importer_ssl_client_key_file_path, 'r') - try: + with open(importer_ssl_client_key_file_path, 'r') as importer_ssl_client_key_file_object: importer_ssl_client_key = importer_ssl_client_key_file_object.read() - finally: - importer_ssl_client_key_file_object.close() server = pulp_server(module, pulp_host, repo_type, wait_for_completion=wait_for_completion) server.set_repo_list() diff --git a/plugins/modules/redhat_subscription.py b/plugins/modules/redhat_subscription.py index 0ed06bc92e..5c91c1ef5f 100644 --- a/plugins/modules/redhat_subscription.py +++ b/plugins/modules/redhat_subscription.py @@ -308,9 +308,8 @@ class Rhsm(object): else: cfg.set('main', 'enabled', '0') - fd = open(tmpfile, 'w+') - cfg.write(fd) - fd.close() + with open(tmpfile, 'w+') as fd: + cfg.write(fd) self.module.atomic_move(tmpfile, plugin_conf) def enable(self): @@ -1119,7 +1118,6 @@ def main(): module.exit_json(changed=False, msg="System already unregistered.") else: try: - rhsm.unsubscribe() rhsm.unregister() except Exception as e: module.fail_json(msg="Failed to unregister: %s" % to_native(e)) diff --git a/plugins/modules/snap.py b/plugins/modules/snap.py index 3cb34a1548..29fd08394f 100644 --- a/plugins/modules/snap.py +++ b/plugins/modules/snap.py @@ -167,6 +167,11 @@ options_changed: type: list returned: When any options have been changed/set version_added: 4.4.0 +version: + description: Versions of snap components as reported by C(snap version). + type: dict + returned: always + version_added: 10.3.0 """ import re @@ -176,7 +181,7 @@ import numbers from ansible.module_utils.common.text.converters import to_native from ansible_collections.community.general.plugins.module_utils.module_helper import StateModuleHelper -from ansible_collections.community.general.plugins.module_utils.snap import snap_runner +from ansible_collections.community.general.plugins.module_utils.snap import snap_runner, get_version class Snap(StateModuleHelper): @@ -210,6 +215,7 @@ class Snap(StateModuleHelper): def __init_module__(self): self.runner = snap_runner(self.module) + self.vars.version = get_version(self.runner) # if state=present there might be file names passed in 'name', in # which case they must be converted to their actual snap names, which # is done using the names_from_snaps() method calling 'snap info'. diff --git a/plugins/modules/snap_alias.py b/plugins/modules/snap_alias.py index 81a968730d..b7244ed74d 100644 --- a/plugins/modules/snap_alias.py +++ b/plugins/modules/snap_alias.py @@ -80,13 +80,18 @@ snap_aliases: type: list elements: str returned: always +version: + description: Versions of snap components as reported by C(snap version). + type: dict + returned: always + version_added: 10.3.0 """ import re from ansible_collections.community.general.plugins.module_utils.module_helper import StateModuleHelper -from ansible_collections.community.general.plugins.module_utils.snap import snap_runner +from ansible_collections.community.general.plugins.module_utils.snap import snap_runner, get_version class SnapAlias(StateModuleHelper): @@ -112,6 +117,7 @@ class SnapAlias(StateModuleHelper): def __init_module__(self): self.runner = snap_runner(self.module) + self.vars.version = get_version(self.runner) self.vars.set("snap_aliases", self._aliases(), change=True, diff=True) def __quit_module__(self): diff --git a/plugins/modules/solaris_zone.py b/plugins/modules/solaris_zone.py index c0959901ff..31e7919c08 100644 --- a/plugins/modules/solaris_zone.py +++ b/plugins/modules/solaris_zone.py @@ -246,24 +246,22 @@ class Zone(object): open('%s/root/noautoshutdown' % self.path, 'w').close() - node = open('%s/root/etc/nodename' % self.path, 'w') - node.write(self.name) - node.close() + with open('%s/root/etc/nodename' % self.path, 'w') as node: + node.write(self.name) - id = open('%s/root/etc/.sysIDtool.state' % self.path, 'w') - id.write('1 # System previously configured?\n') - id.write('1 # Bootparams succeeded?\n') - id.write('1 # System is on a network?\n') - id.write('1 # Extended network information gathered?\n') - id.write('0 # Autobinder succeeded?\n') - id.write('1 # Network has subnets?\n') - id.write('1 # root password prompted for?\n') - id.write('1 # locale and term prompted for?\n') - id.write('1 # security policy in place\n') - id.write('1 # NFSv4 domain configured\n') - id.write('0 # Auto Registration Configured\n') - id.write('vt100') - id.close() + with open('%s/root/etc/.sysIDtool.state' % self.path, 'w') as id: + id.write('1 # System previously configured?\n') + id.write('1 # Bootparams succeeded?\n') + id.write('1 # System is on a network?\n') + id.write('1 # Extended network information gathered?\n') + id.write('0 # Autobinder succeeded?\n') + id.write('1 # Network has subnets?\n') + id.write('1 # root password prompted for?\n') + id.write('1 # locale and term prompted for?\n') + id.write('1 # security policy in place\n') + id.write('1 # NFSv4 domain configured\n') + id.write('0 # Auto Registration Configured\n') + id.write('vt100') def configure_ssh_keys(self): rsa_key_file = '%s/root/etc/ssh/ssh_host_rsa_key' % self.path @@ -284,9 +282,8 @@ class Zone(object): def configure_password(self): shadow = '%s/root/etc/shadow' % self.path if self.root_password: - f = open(shadow, 'r') - lines = f.readlines() - f.close() + with open(shadow, 'r') as f: + lines = f.readlines() for i in range(0, len(lines)): fields = lines[i].split(':') @@ -294,10 +291,9 @@ class Zone(object): fields[1] = self.root_password lines[i] = ':'.join(fields) - f = open(shadow, 'w') - for line in lines: - f.write(line) - f.close() + with open(shadow, 'w') as f: + for line in lines: + f.write(line) def boot(self): if not self.module.check_mode: diff --git a/plugins/modules/sorcery.py b/plugins/modules/sorcery.py index 52c6e30b18..fff3f55e07 100644 --- a/plugins/modules/sorcery.py +++ b/plugins/modules/sorcery.py @@ -460,15 +460,11 @@ def match_depends(module): if depends_new: try: - try: - fl = open(sorcery_depends, 'a') - + with open(sorcery_depends, 'a') as fl: for k in depends_new: fl.write("%s:%s:%s:optional::\n" % (spell, k, depends[k])) - except IOError: - module.fail_json(msg="I/O error on the depends file") - finally: - fl.close() + except IOError: + module.fail_json(msg="I/O error on the depends file") depends_ok = False diff --git a/plugins/modules/timezone.py b/plugins/modules/timezone.py index c0eb9e5872..37eb2f94a6 100644 --- a/plugins/modules/timezone.py +++ b/plugins/modules/timezone.py @@ -396,7 +396,8 @@ class NosystemdTimezone(Timezone): self.conf_files['name'] = '/etc/sysconfig/clock' self.conf_files['hwclock'] = '/etc/sysconfig/clock' try: - f = open(self.conf_files['name'], 'r') + with open(self.conf_files['name'], 'r') as f: + sysconfig_clock = f.read() except IOError as err: if self._allow_ioerror(err, 'name'): # If the config file doesn't exist detect the distribution and set regexps. @@ -414,8 +415,6 @@ class NosystemdTimezone(Timezone): # The key for timezone might be `ZONE` or `TIMEZONE` # (the former is used in RHEL/CentOS and the latter is used in SUSE linux). # So check the content of /etc/sysconfig/clock and decide which key to use. - sysconfig_clock = f.read() - f.close() if re.search(r'^TIMEZONE\s*=', sysconfig_clock, re.MULTILINE): # For SUSE self.regexps['name'] = self.dist_regexps['SuSE'] @@ -448,15 +447,13 @@ class NosystemdTimezone(Timezone): """ # Read the file try: - file = open(filename, 'r') + with open(filename, 'r') as file: + lines = file.readlines() except IOError as err: if self._allow_ioerror(err, key): lines = [] else: self.abort('tried to configure %s using a file "%s", but could not read it' % (key, filename)) - else: - lines = file.readlines() - file.close() # Find the all matched lines matched_indices = [] for i, line in enumerate(lines): @@ -473,18 +470,17 @@ class NosystemdTimezone(Timezone): lines.insert(insert_line, value) # Write the changes try: - file = open(filename, 'w') + with open(filename, 'w') as file: + file.writelines(lines) except IOError: self.abort('tried to configure %s using a file "%s", but could not write to it' % (key, filename)) - else: - file.writelines(lines) - file.close() self.msg.append('Added 1 line and deleted %s line(s) on %s' % (len(matched_indices), filename)) def _get_value_from_config(self, key, phase): filename = self.conf_files[key] try: - file = open(filename, mode='r') + with open(filename, mode='r') as file: + status = file.read() except IOError as err: if self._allow_ioerror(err, key): if key == 'hwclock': @@ -496,8 +492,6 @@ class NosystemdTimezone(Timezone): else: self.abort('tried to configure %s using a file "%s", but could not read it' % (key, filename)) else: - status = file.read() - file.close() try: value = self.regexps[key].search(status).group(1) except AttributeError: @@ -628,11 +622,11 @@ class SmartOSTimezone(Timezone): """ if key == 'name': try: - f = open('/etc/default/init', 'r') - for line in f: - m = re.match('^TZ=(.*)$', line.strip()) - if m: - return m.groups()[0] + with open('/etc/default/init', 'r') as f: + for line in f: + m = re.match('^TZ=(.*)$', line.strip()) + if m: + return m.groups()[0] except Exception: self.module.fail_json(msg='Failed to read /etc/default/init') else: @@ -811,9 +805,8 @@ class AIXTimezone(Timezone): def __get_timezone(self): """ Return the current value of TZ= in /etc/environment """ try: - f = open('/etc/environment', 'r') - etcenvironment = f.read() - f.close() + with open('/etc/environment', 'r') as f: + etcenvironment = f.read() except Exception: self.module.fail_json(msg='Issue reading contents of /etc/environment') diff --git a/plugins/modules/ufw.py b/plugins/modules/ufw.py index bfb432a4eb..ca4e977f4f 100644 --- a/plugins/modules/ufw.py +++ b/plugins/modules/ufw.py @@ -114,8 +114,9 @@ options: proto: description: - TCP/IP protocol. + - The value V(vrrp) is supported since community.general 10.3.0. type: str - choices: [any, tcp, udp, ipv6, esp, ah, gre, igmp] + choices: [any, tcp, udp, ipv6, esp, ah, gre, igmp, vrrp] aliases: [protocol] name: description: @@ -341,7 +342,7 @@ def main(): from_port=dict(type='str'), to_ip=dict(type='str', default='any', aliases=['dest', 'to']), to_port=dict(type='str', aliases=['port']), - proto=dict(type='str', aliases=['protocol'], choices=['ah', 'any', 'esp', 'ipv6', 'tcp', 'udp', 'gre', 'igmp']), + proto=dict(type='str', aliases=['protocol'], choices=['ah', 'any', 'esp', 'ipv6', 'tcp', 'udp', 'gre', 'igmp', 'vrrp']), name=dict(type='str', aliases=['app']), comment=dict(type='str'), ), diff --git a/tests/integration/targets/connection_proxmox_pct_remote/aliases b/tests/integration/targets/connection_proxmox_pct_remote/aliases new file mode 100644 index 0000000000..d2fefd10c7 --- /dev/null +++ b/tests/integration/targets/connection_proxmox_pct_remote/aliases @@ -0,0 +1,12 @@ +# Copyright (c) 2025 Nils Stein (@mietzen) +# Copyright (c) 2025 Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +azp/posix/3 +destructive +needs/root +needs/target/connection +skip/docker +skip/alpine +skip/macos diff --git a/tests/integration/targets/connection_proxmox_pct_remote/dependencies.yml b/tests/integration/targets/connection_proxmox_pct_remote/dependencies.yml new file mode 100644 index 0000000000..c0a6718e32 --- /dev/null +++ b/tests/integration/targets/connection_proxmox_pct_remote/dependencies.yml @@ -0,0 +1,18 @@ +--- +# Copyright (c) 2025 Nils Stein (@mietzen) +# Copyright (c) 2025 Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +- hosts: localhost + gather_facts: true + serial: 1 + tasks: + - name: Copy pct mock + copy: + src: files/pct + dest: /usr/sbin/pct + mode: '0755' + - name: Install paramiko + pip: + name: "paramiko>=3.0.0" diff --git a/tests/integration/targets/connection_proxmox_pct_remote/files/pct b/tests/integration/targets/connection_proxmox_pct_remote/files/pct new file mode 100755 index 0000000000..8a40280041 --- /dev/null +++ b/tests/integration/targets/connection_proxmox_pct_remote/files/pct @@ -0,0 +1,33 @@ +#!/usr/bin/env bash +# Copyright (c) 2025 Nils Stein (@mietzen) +# Copyright (c) 2025 Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +# Shell script to mock proxmox pct behaviour + +>&2 echo "[DEBUG] INPUT: $@" + +pwd="$(pwd)" + +# Get quoted parts and restore quotes +declare -a cmd=() +for arg in "$@"; do + if [[ $arg =~ [[:space:]] ]]; then + arg="'$arg'" + fi + cmd+=("$arg") +done + +cmd="${cmd[@]:3}" +vmid="${@:2:1}" +>&2 echo "[INFO] MOCKING: pct ${@:1:3} ${cmd}" +tmp_dir="/tmp/ansible-remote/proxmox_pct_remote/integration_test/ct_${vmid}" +mkdir -p "$tmp_dir" +>&2 echo "[INFO] PWD: $tmp_dir" +>&2 echo "[INFO] CMD: ${cmd}" +cd "$tmp_dir" + +eval "${cmd}" + +cd "$pwd" diff --git a/tests/integration/targets/connection_proxmox_pct_remote/plugin-specific-tests.yml b/tests/integration/targets/connection_proxmox_pct_remote/plugin-specific-tests.yml new file mode 100644 index 0000000000..41fe06cdb9 --- /dev/null +++ b/tests/integration/targets/connection_proxmox_pct_remote/plugin-specific-tests.yml @@ -0,0 +1,32 @@ +--- +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +- hosts: "{{ target_hosts }}" + gather_facts: false + serial: 1 + tasks: + - name: create file without content + copy: + content: "" + dest: "{{ remote_tmp }}/test_empty.txt" + force: no + mode: '0644' + + - name: assert file without content exists + stat: + path: "{{ remote_tmp }}/test_empty.txt" + register: empty_file_stat + + - name: verify file without content exists + assert: + that: + - empty_file_stat.stat.exists + fail_msg: "The file {{ remote_tmp }}/test_empty.txt does not exist." + + - name: verify file without content is empty + assert: + that: + - empty_file_stat.stat.size == 0 + fail_msg: "The file {{ remote_tmp }}/test_empty.txt is not empty." diff --git a/tests/integration/targets/connection_proxmox_pct_remote/runme.sh b/tests/integration/targets/connection_proxmox_pct_remote/runme.sh new file mode 100755 index 0000000000..5d27e243d4 --- /dev/null +++ b/tests/integration/targets/connection_proxmox_pct_remote/runme.sh @@ -0,0 +1,19 @@ +#!/usr/bin/env bash +# Copyright (c) 2025 Nils Stein (@mietzen) +# Copyright (c) 2025 Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +set -eux + +ANSIBLE_ROLES_PATH=../ \ + ansible-playbook dependencies.yml -v "$@" + +./test.sh "$@" + +ansible-playbook plugin-specific-tests.yml -i "./test_connection.inventory" \ + -e target_hosts="proxmox_pct_remote" \ + -e action_prefix= \ + -e local_tmp=/tmp/ansible-local \ + -e remote_tmp=/tmp/ansible-remote \ + "$@" diff --git a/tests/integration/targets/connection_proxmox_pct_remote/test.sh b/tests/integration/targets/connection_proxmox_pct_remote/test.sh new file mode 120000 index 0000000000..70aa5dbdba --- /dev/null +++ b/tests/integration/targets/connection_proxmox_pct_remote/test.sh @@ -0,0 +1 @@ +../connection_posix/test.sh \ No newline at end of file diff --git a/tests/integration/targets/connection_proxmox_pct_remote/test_connection.inventory b/tests/integration/targets/connection_proxmox_pct_remote/test_connection.inventory new file mode 100644 index 0000000000..15592a61a6 --- /dev/null +++ b/tests/integration/targets/connection_proxmox_pct_remote/test_connection.inventory @@ -0,0 +1,14 @@ +# Copyright (c) 2025 Nils Stein (@mietzen) +# Copyright (c) 2025 Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +[proxmox_pct_remote] +proxmox_pct_remote-pipelining ansible_ssh_pipelining=true +proxmox_pct_remote-no-pipelining ansible_ssh_pipelining=false +[proxmox_pct_remote:vars] +ansible_host=localhost +ansible_user=root +ansible_python_interpreter="{{ ansible_playbook_python }}" +ansible_connection=community.general.proxmox_pct_remote +proxmox_vmid=123 diff --git a/tests/integration/targets/filter_json_patch/runme.sh b/tests/integration/targets/filter_json_patch/runme.sh new file mode 100755 index 0000000000..d591ee3289 --- /dev/null +++ b/tests/integration/targets/filter_json_patch/runme.sh @@ -0,0 +1,15 @@ +#!/usr/bin/env bash +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +set -eux + +source virtualenv.sh + +# Requirements have to be installed prior to running ansible-playbook +# because plugins and requirements are loaded before the task runs + +pip install jsonpatch + +ANSIBLE_ROLES_PATH=../ ansible-playbook runme.yml "$@" diff --git a/tests/integration/targets/filter_json_patch/runme.yml b/tests/integration/targets/filter_json_patch/runme.yml new file mode 100644 index 0000000000..f98c70f697 --- /dev/null +++ b/tests/integration/targets/filter_json_patch/runme.yml @@ -0,0 +1,8 @@ +--- +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +- hosts: localhost + roles: + - { role: filter_json_patch } diff --git a/tests/integration/targets/filter_json_patch/tasks/main.yml b/tests/integration/targets/filter_json_patch/tasks/main.yml new file mode 100644 index 0000000000..014133acad --- /dev/null +++ b/tests/integration/targets/filter_json_patch/tasks/main.yml @@ -0,0 +1,137 @@ +--- +#################################################################### +# WARNING: These are designed specifically for Ansible tests # +# and should not be used as examples of how to write Ansible roles # +#################################################################### + +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +- name: Test json_patch + assert: + that: + - > # Insert a new element into an array at a specified index + list_input | + community.general.json_patch("add", "/1", {"baz": "qux"}) + == + [{"foo": {"one": 1}}, {"baz": "qux"}, {"bar": {"two": 2}}] + - > # Insert a new key into a dictionary + dict_input | + community.general.json_patch("add", "/bar/baz", "qux") + == + {"foo": {"one": 1}, "bar": {"baz": "qux", "two": 2}} + - > # Input is a string + '{ "foo": { "one": 1 }, "bar": { "two": 2 } }' | + community.general.json_patch("add", "/bar/baz", "qux") + == + {"foo": {"one": 1}, "bar": {"baz": "qux", "two": 2}} + - > # Existing key is replaced + dict_input | + community.general.json_patch("add", "/bar", "qux") + == + {"foo": {"one": 1}, "bar": "qux"} + - > # Escaping tilde as ~0 and slash as ~1 in the path + {} | + community.general.json_patch("add", "/~0~1", "qux") + == + {"~/": "qux"} + - > # Add at the end of the array + [1, 2, 3] | + community.general.json_patch("add", "/-", 4) + == + [1, 2, 3, 4] + - > # Remove a key + dict_input | + community.general.json_patch("remove", "/bar") + == + {"foo": {"one": 1} } + - > # Replace a value + dict_input | + community.general.json_patch("replace", "/bar", 2) + == + {"foo": {"one": 1}, "bar": 2} + - > # Copy a value + dict_input | + community.general.json_patch("copy", "/baz", from="/bar") + == + {"foo": {"one": 1}, "bar": { "two": 2 }, "baz": { "two": 2 }} + - > # Move a value + dict_input | + community.general.json_patch("move", "/baz", from="/bar") + == + {"foo": {"one": 1}, "baz": { "two": 2 }} + - > # Successful test + dict_input | + community.general.json_patch("test", "/bar/two", 2) | + ternary("OK", "Failed") + == + "OK" + - > # Unuccessful test + dict_input | + community.general.json_patch("test", "/bar/two", 9) | + ternary("OK", "Failed") + == + "Failed" + vars: + list_input: + - foo: { one: 1 } + - bar: { two: 2 } + dict_input: + foo: { one: 1 } + bar: { two: 2 } + +- name: Test json_patch_recipe + assert: + that: + - > # List of operations + input | + community.general.json_patch_recipe(operations) + == + {"bar":[2],"bax":1,"bay":1,"baz":[10,20,30]} + vars: + input: {} + operations: + - op: 'add' + path: '/foo' + value: 1 + - op: 'add' + path: '/bar' + value: [] + - op: 'add' + path: '/bar/-' + value: 2 + - op: 'add' + path: '/bar/0' + value: 1 + - op: 'remove' + path: '/bar/0' + - op: 'move' + from: '/foo' + path: '/baz' + - op: 'copy' + from: '/baz' + path: '/bax' + - op: 'copy' + from: '/baz' + path: '/bay' + - op: 'replace' + path: '/baz' + value: [10, 20, 30] + +- name: Test json_diff + assert: + that: # The order in the result array is not stable, sort by path + - > + input | + community.general.json_diff(target) | + sort(attribute='path') + == + [ + {"op": "add", "path": "/baq", "value": {"baz": 2}}, + {"op": "remove", "path": "/baw/1"}, + {"op": "replace", "path": "/hello", "value": "night"}, + ] + vars: + input: {"foo": 1, "bar":{"baz": 2}, "baw": [1, 2, 3], "hello": "day"} + target: {"foo": 1, "bar": {"baz": 2}, "baw": [1, 3], "baq": {"baz": 2}, "hello": "night"} diff --git a/tests/integration/targets/keycloak_modules_authentication/README.md b/tests/integration/targets/keycloak_modules_authentication/README.md new file mode 100644 index 0000000000..a3d40a5674 --- /dev/null +++ b/tests/integration/targets/keycloak_modules_authentication/README.md @@ -0,0 +1,26 @@ + +# Running keycloak module authentication integration test + +To run the Keycloak module authentication integration test, start a keycloak server using Docker or Podman: + +```sh + podman|docker run -d --rm --name mykeycloak -p 8080:8080 -e KEYCLOAK_ADMIN=admin -e KEYCLOAK_ADMIN_PASSWORD=password quay.io/keycloak/keycloak:latest start-dev --http-relative-path /auth +``` + +Source Ansible env-setup from ansible github repository. + +Run the integration tests: + +```sh + ansible-test integration keycloak_role --python 3.10 --allow-unsupported +``` + +To cleanup, run: + +```sh + podman|docker stop mykeycloak +``` diff --git a/tests/integration/targets/keycloak_modules_authentication/aliases b/tests/integration/targets/keycloak_modules_authentication/aliases new file mode 100644 index 0000000000..bd1f024441 --- /dev/null +++ b/tests/integration/targets/keycloak_modules_authentication/aliases @@ -0,0 +1,5 @@ +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +unsupported diff --git a/tests/integration/targets/keycloak_modules_authentication/tasks/main.yml b/tests/integration/targets/keycloak_modules_authentication/tasks/main.yml new file mode 100644 index 0000000000..1553e29c1c --- /dev/null +++ b/tests/integration/targets/keycloak_modules_authentication/tasks/main.yml @@ -0,0 +1,249 @@ +--- +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +- name: Create realm + community.general.keycloak_realm: + auth_keycloak_url: "{{ url }}" + auth_realm: "{{ admin_realm }}" + auth_username: "{{ admin_user }}" + auth_password: "{{ admin_password }}" + id: "{{ realm }}" + realm: "{{ realm }}" + state: present + +- name: Create client + community.general.keycloak_client: + auth_keycloak_url: "{{ url }}" + auth_realm: "{{ admin_realm }}" + auth_username: "{{ admin_user }}" + auth_password: "{{ admin_password }}" + realm: "{{ realm }}" + client_id: "{{ client_id }}" + state: present + register: client + +- name: Create new realm role with username/password authentication + community.general.keycloak_role: + auth_keycloak_url: "{{ url }}" + auth_realm: "{{ admin_realm }}" + auth_username: "{{ admin_user }}" + auth_password: "{{ admin_password }}" + realm: "{{ realm }}" + name: "{{ role }}" + description: "{{ keycloak_role_description }}" + state: present + register: result + +- name: Debug + debug: + var: result + +- name: Remove created realm role + community.general.keycloak_role: + auth_keycloak_url: "{{ url }}" + auth_realm: "{{ admin_realm }}" + auth_username: "{{ admin_user }}" + auth_password: "{{ admin_password }}" + realm: "{{ realm }}" + name: "{{ role }}" + state: absent + register: result + +- name: Debug + debug: + var: result + +- name: Get Keycloak token + ansible.builtin.uri: + url: "{{ url }}/realms/{{ admin_realm }}/protocol/openid-connect/token" + method: POST + return_content: true + status_code: 200 + body_format: form-urlencoded + body: + grant_type: "password" + client_id: "admin-cli" + username: "{{ admin_user }}" + password: "{{ admin_password }}" + register: token_response + +- name: Extract tokens + ansible.builtin.set_fact: + access_token: "{{ token_response.json | json_query('access_token') }}" + refresh_token: "{{ token_response.json | json_query('refresh_token') }}" + +- name: Create new realm role with provided token authentication + community.general.keycloak_role: + auth_keycloak_url: "{{ url }}" + token: "{{ access_token }}" + realm: "{{ realm }}" + name: "{{ role }}" + description: "{{ keycloak_role_description }}" + state: present + register: result + +- name: Debug + debug: + var: result + +- name: Remove created realm role + community.general.keycloak_role: + auth_keycloak_url: "{{ url }}" + auth_realm: "{{ admin_realm }}" + auth_username: "{{ admin_user }}" + auth_password: "{{ admin_password }}" + realm: "{{ realm }}" + name: "{{ role }}" + state: absent + register: result + +- name: Debug + debug: + var: result + +- name: Create new realm role with invalid auth token and valid refresh token + community.general.keycloak_role: + auth_keycloak_url: "{{ url }}" + auth_realm: "{{ admin_realm }}" + auth_username: "{{ admin_user }}" + auth_password: "{{ admin_password }}" + token: "invalidtoken!!!" + refresh_token: "{{ refresh_token }}" + realm: "{{ realm }}" + name: "{{ role }}" + description: "{{ keycloak_role_description }}" + state: present + register: result + +- name: Debug + debug: + var: result + +- name: Remove created realm role + community.general.keycloak_role: + auth_keycloak_url: "{{ url }}" + auth_realm: "{{ admin_realm }}" + auth_username: "{{ admin_user }}" + auth_password: "{{ admin_password }}" + realm: "{{ realm }}" + name: "{{ role }}" + state: absent + register: result + +- name: Debug + debug: + var: result + +- name: Create new realm role with invalid auth token and valid username/password + community.general.keycloak_role: + auth_keycloak_url: "{{ url }}" + auth_realm: "{{ admin_realm }}" + auth_username: "{{ admin_user }}" + auth_password: "{{ admin_password }}" + token: "invalidtoken!!!" + realm: "{{ realm }}" + name: "{{ role }}" + description: "{{ keycloak_role_description }}" + state: present + register: result + +- name: Debug + debug: + var: result + +- name: Remove created realm role + community.general.keycloak_role: + auth_keycloak_url: "{{ url }}" + auth_realm: "{{ admin_realm }}" + auth_username: "{{ admin_user }}" + auth_password: "{{ admin_password }}" + realm: "{{ realm }}" + name: "{{ role }}" + state: absent + register: result + +- name: Debug + debug: + var: result + +- name: Create new realm role with invalid auth token, invalid refresh token, and valid username/password + community.general.keycloak_role: + auth_keycloak_url: "{{ url }}" + auth_realm: "{{ admin_realm }}" + auth_username: "{{ admin_user }}" + auth_password: "{{ admin_password }}" + token: "invalidtoken!!!" + refresh_token: "invalidrefreshtoken!!!" + realm: "{{ realm }}" + name: "{{ role }}" + description: "{{ keycloak_role_description }}" + state: present + register: result + +- name: Debug + debug: + var: result + +- name: Remove created realm role + community.general.keycloak_role: + auth_keycloak_url: "{{ url }}" + auth_realm: "{{ admin_realm }}" + auth_username: "{{ admin_user }}" + auth_password: "{{ admin_password }}" + realm: "{{ realm }}" + name: "{{ role }}" + state: absent + register: result + +- name: Debug + debug: + var: result + +### Unhappy path tests + +- name: Fail to create new realm role with invalid username/password + community.general.keycloak_role: + auth_keycloak_url: "{{ url }}" + auth_realm: "{{ admin_realm }}" + auth_username: "{{ admin_user }}" + auth_password: "invalid_password" + realm: "{{ realm }}" + name: "{{ role }}" + description: "{{ keycloak_role_description }}" + state: present + register: result + failed_when: > + (result.exception is not defined) or + ("HTTP Error 401: Unauthorized" not in result.msg) + +- name: Fail to create new realm role with invalid auth token + community.general.keycloak_role: + auth_keycloak_url: "{{ url }}" + token: "invalidtoken!!!" + realm: "{{ realm }}" + name: "{{ role }}" + description: "{{ keycloak_role_description }}" + state: present + register: result + failed_when: > + (result.exception is not defined) or + ("HTTP Error 401: Unauthorized" not in result.msg) + +- name: Fail to create new realm role with invalid auth and refresh tokens, and invalid username/password + community.general.keycloak_role: + auth_keycloak_url: "{{ url }}" + auth_realm: "{{ admin_realm }}" + auth_username: "{{ admin_user }}" + auth_password: "invalid_password" + token: "invalidtoken!!!" + refresh_token: "invalidtoken!!!" + realm: "{{ realm }}" + name: "{{ role }}" + description: "{{ keycloak_role_description }}" + state: present + register: result + failed_when: > + (result.exception is not defined) or + ("HTTP Error 401: Unauthorized" not in result.msg) diff --git a/tests/integration/targets/keycloak_modules_authentication/vars/main.yml b/tests/integration/targets/keycloak_modules_authentication/vars/main.yml new file mode 100644 index 0000000000..02ad618e1b --- /dev/null +++ b/tests/integration/targets/keycloak_modules_authentication/vars/main.yml @@ -0,0 +1,20 @@ +--- +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +url: http://localhost:8080/auth +admin_realm: master +admin_user: admin +admin_password: password +realm: myrealm +client_id: myclient +role: myrole + +keycloak_role_name: test +keycloak_role_description: test +keycloak_role_composite: false +keycloak_client_id: test-client +keycloak_client_name: test-client +keycloak_client_description: This is a client for testing purpose +role_state: present diff --git a/tests/integration/targets/pipx/tasks/main.yml b/tests/integration/targets/pipx/tasks/main.yml index 0e04826371..04086d80cd 100644 --- a/tests/integration/targets/pipx/tasks/main.yml +++ b/tests/integration/targets/pipx/tasks/main.yml @@ -267,3 +267,6 @@ - name: Include testcase for PR 9103 upgrade --global ansible.builtin.include_tasks: testcase-9103-upgrade-global.yml + + - name: Include testcase for issue 9619 latest --global + ansible.builtin.include_tasks: testcase-9619-latest-global.yml diff --git a/tests/integration/targets/pipx/tasks/testcase-9619-latest-global.yml b/tests/integration/targets/pipx/tasks/testcase-9619-latest-global.yml new file mode 100644 index 0000000000..e06ee438c0 --- /dev/null +++ b/tests/integration/targets/pipx/tasks/testcase-9619-latest-global.yml @@ -0,0 +1,38 @@ +--- +# Copyright (c) Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +- name: 9619-Ensure application hello-world is uninstalled + community.general.pipx: + name: hello-world + state: absent + global: true + +- name: 9619-Install application hello-world + community.general.pipx: + name: hello-world + source: hello-world==0.1 + global: true + register: install_hw + +- name: 9619-Upgrade application hello-world + community.general.pipx: + state: latest + name: hello-world + global: true + register: latest_hw + +- name: 9619-Ensure application pylint is uninstalled + community.general.pipx: + name: pylint + state: absent + global: true + +- name: 9619-Assertions + ansible.builtin.assert: + that: + - install_hw is changed + - latest_hw is changed + - latest_hw.cmd[-3] == "upgrade" + - latest_hw.cmd[-2] == "--global" diff --git a/tests/unit/plugins/connection/test_proxmox_pct_remote.py b/tests/unit/plugins/connection/test_proxmox_pct_remote.py new file mode 100644 index 0000000000..c0e8678cdc --- /dev/null +++ b/tests/unit/plugins/connection/test_proxmox_pct_remote.py @@ -0,0 +1,585 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2024 Nils Stein (@mietzen) +# Copyright (c) 2024 Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (annotations, absolute_import, division, print_function) +__metaclass__ = type + +import os +import pytest + +from ansible_collections.community.general.plugins.connection.proxmox_pct_remote import authenticity_msg, MyAddPolicy +from ansible_collections.community.general.plugins.module_utils._filelock import FileLock, LockTimeout +from ansible.errors import AnsibleError, AnsibleAuthenticationFailure, AnsibleConnectionFailure +from ansible.module_utils.common.text.converters import to_bytes +from ansible.module_utils.compat.paramiko import paramiko +from ansible.playbook.play_context import PlayContext +from ansible.plugins.loader import connection_loader +from io import StringIO +from pathlib import Path +from unittest.mock import patch, MagicMock, mock_open + + +@pytest.fixture +def connection(): + play_context = PlayContext() + in_stream = StringIO() + conn = connection_loader.get('community.general.proxmox_pct_remote', play_context, in_stream) + conn.set_option('remote_addr', '192.168.1.100') + conn.set_option('remote_user', 'root') + conn.set_option('password', 'password') + return conn + + +def test_connection_options(connection): + """ Test that connection options are properly set """ + assert connection.get_option('remote_addr') == '192.168.1.100' + assert connection.get_option('remote_user') == 'root' + assert connection.get_option('password') == 'password' + + +def test_authenticity_msg(): + """ Test authenticity message formatting """ + msg = authenticity_msg('test.host', 'ssh-rsa', 'AA:BB:CC:DD') + assert 'test.host' in msg + assert 'ssh-rsa' in msg + assert 'AA:BB:CC:DD' in msg + + +def test_missing_host_key(connection): + """ Test MyAddPolicy missing_host_key method """ + + client = MagicMock() + key = MagicMock() + key.get_fingerprint.return_value = b'fingerprint' + key.get_name.return_value = 'ssh-rsa' + + policy = MyAddPolicy(connection) + + connection.set_option('host_key_auto_add', True) + policy.missing_host_key(client, 'test.host', key) + assert hasattr(key, '_added_by_ansible_this_time') + + connection.set_option('host_key_auto_add', False) + connection.set_option('host_key_checking', False) + policy.missing_host_key(client, 'test.host', key) + + connection.set_option('host_key_checking', True) + connection.set_option('host_key_auto_add', False) + connection.set_option('use_persistent_connections', False) + + with patch('ansible.utils.display.Display.prompt_until', return_value='yes'): + policy.missing_host_key(client, 'test.host', key) + + with patch('ansible.utils.display.Display.prompt_until', return_value='no'): + with pytest.raises(AnsibleError, match='host connection rejected by user'): + policy.missing_host_key(client, 'test.host', key) + + +def test_set_log_channel(connection): + """ Test setting log channel """ + connection._set_log_channel('test_channel') + assert connection._log_channel == 'test_channel' + + +def test_parse_proxy_command(connection): + """ Test proxy command parsing """ + connection.set_option('proxy_command', 'ssh -W %h:%p proxy.example.com') + connection.set_option('remote_addr', 'target.example.com') + connection.set_option('remote_user', 'testuser') + + result = connection._parse_proxy_command(port=2222) + assert 'sock' in result + assert isinstance(result['sock'], paramiko.ProxyCommand) + + +@patch('paramiko.SSHClient') +def test_connect_with_rsa_sha2_disabled(mock_ssh, connection): + """ Test connection with RSA SHA2 algorithms disabled """ + connection.set_option('use_rsa_sha2_algorithms', False) + mock_client = MagicMock() + mock_ssh.return_value = mock_client + + connection._connect() + + call_kwargs = mock_client.connect.call_args[1] + assert 'disabled_algorithms' in call_kwargs + assert 'pubkeys' in call_kwargs['disabled_algorithms'] + + +@patch('paramiko.SSHClient') +def test_connect_with_bad_host_key(mock_ssh, connection): + """ Test connection with bad host key """ + mock_client = MagicMock() + mock_ssh.return_value = mock_client + mock_client.connect.side_effect = paramiko.ssh_exception.BadHostKeyException( + 'hostname', MagicMock(), MagicMock()) + + with pytest.raises(AnsibleConnectionFailure, match='host key mismatch'): + connection._connect() + + +@patch('paramiko.SSHClient') +def test_connect_with_invalid_host_key(mock_ssh, connection): + """ Test connection with bad host key """ + connection.set_option('host_key_checking', True) + mock_client = MagicMock() + mock_ssh.return_value = mock_client + mock_client.load_system_host_keys.side_effect = paramiko.hostkeys.InvalidHostKey( + "Bad Line!", Exception('Something crashed!')) + + with pytest.raises(AnsibleConnectionFailure, match="Invalid host key: Bad Line!"): + connection._connect() + + +@patch('paramiko.SSHClient') +def test_connect_success(mock_ssh, connection): + """ Test successful SSH connection establishment """ + mock_client = MagicMock() + mock_ssh.return_value = mock_client + + connection._connect() + + assert mock_client.connect.called + assert connection._connected + + +@patch('paramiko.SSHClient') +def test_connect_authentication_failure(mock_ssh, connection): + """ Test SSH connection with authentication failure """ + mock_client = MagicMock() + mock_ssh.return_value = mock_client + mock_client.connect.side_effect = paramiko.ssh_exception.AuthenticationException('Auth failed') + + with pytest.raises(AnsibleAuthenticationFailure): + connection._connect() + + +def test_any_keys_added(connection): + """ Test checking for added host keys """ + connection.ssh = MagicMock() + connection.ssh._host_keys = { + 'host1': { + 'ssh-rsa': MagicMock(_added_by_ansible_this_time=True), + 'ssh-ed25519': MagicMock(_added_by_ansible_this_time=False) + } + } + + assert connection._any_keys_added() is True + + connection.ssh._host_keys = { + 'host1': { + 'ssh-rsa': MagicMock(_added_by_ansible_this_time=False) + } + } + assert connection._any_keys_added() is False + + +@patch('os.path.exists') +@patch('os.stat') +@patch('tempfile.NamedTemporaryFile') +def test_save_ssh_host_keys(mock_tempfile, mock_stat, mock_exists, connection): + """ Test saving SSH host keys """ + mock_exists.return_value = True + mock_stat.return_value = MagicMock(st_mode=0o644, st_uid=1000, st_gid=1000) + mock_tempfile.return_value.__enter__.return_value.name = '/tmp/test_keys' + + connection.ssh = MagicMock() + connection.ssh._host_keys = { + 'host1': { + 'ssh-rsa': MagicMock( + get_base64=lambda: 'KEY1', + _added_by_ansible_this_time=True + ) + } + } + + mock_open_obj = mock_open() + with patch('builtins.open', mock_open_obj): + connection._save_ssh_host_keys('/tmp/test_keys') + + mock_open_obj().write.assert_called_with('host1 ssh-rsa KEY1\n') + + +def test_build_pct_command(connection): + """ Test PCT command building with different users """ + connection.set_option('vmid', '100') + + cmd = connection._build_pct_command('/bin/sh -c "ls -la"') + assert cmd == '/usr/sbin/pct exec 100 -- /bin/sh -c "ls -la"' + + connection.set_option('remote_user', 'user') + connection.set_option('proxmox_become_method', 'sudo') + cmd = connection._build_pct_command('/bin/sh -c "ls -la"') + assert cmd == 'sudo /usr/sbin/pct exec 100 -- /bin/sh -c "ls -la"' + + +@patch('paramiko.SSHClient') +def test_exec_command_success(mock_ssh, connection): + """ Test successful command execution """ + mock_client = MagicMock() + mock_ssh.return_value = mock_client + mock_channel = MagicMock() + mock_transport = MagicMock() + + mock_client.get_transport.return_value = mock_transport + mock_transport.open_session.return_value = mock_channel + mock_channel.recv_exit_status.return_value = 0 + mock_channel.makefile.return_value = [to_bytes('stdout')] + mock_channel.makefile_stderr.return_value = [to_bytes("")] + + connection._connected = True + connection.ssh = mock_client + + returncode, stdout, stderr = connection.exec_command('ls -la') + + mock_transport.open_session.assert_called_once() + mock_channel.get_pty.assert_called_once() + mock_transport.set_keepalive.assert_called_once_with(5) + + +@patch('paramiko.SSHClient') +def test_exec_command_pct_not_found(mock_ssh, connection): + """ Test command execution when PCT is not found """ + mock_client = MagicMock() + mock_ssh.return_value = mock_client + mock_channel = MagicMock() + mock_transport = MagicMock() + + mock_client.get_transport.return_value = mock_transport + mock_transport.open_session.return_value = mock_channel + mock_channel.recv_exit_status.return_value = 1 + mock_channel.makefile.return_value = [to_bytes("")] + mock_channel.makefile_stderr.return_value = [to_bytes('pct: not found')] + + connection._connected = True + connection.ssh = mock_client + + with pytest.raises(AnsibleError, match='pct not found in path of host'): + connection.exec_command('ls -la') + + +@patch('paramiko.SSHClient') +def test_exec_command_session_open_failure(mock_ssh, connection): + """ Test exec_command when session opening fails """ + mock_client = MagicMock() + mock_transport = MagicMock() + mock_transport.open_session.side_effect = Exception('Failed to open session') + mock_client.get_transport.return_value = mock_transport + + connection._connected = True + connection.ssh = mock_client + + with pytest.raises(AnsibleConnectionFailure, match='Failed to open session'): + connection.exec_command('test command') + + +@patch('paramiko.SSHClient') +def test_exec_command_with_privilege_escalation(mock_ssh, connection): + """ Test exec_command with privilege escalation """ + mock_client = MagicMock() + mock_channel = MagicMock() + mock_transport = MagicMock() + + mock_client.get_transport.return_value = mock_transport + mock_transport.open_session.return_value = mock_channel + connection._connected = True + connection.ssh = mock_client + + connection.become = MagicMock() + connection.become.expect_prompt.return_value = True + connection.become.check_success.return_value = False + connection.become.check_password_prompt.return_value = True + connection.become.get_option.return_value = 'sudo_password' + + mock_channel.recv.return_value = b'[sudo] password:' + mock_channel.recv_exit_status.return_value = 0 + mock_channel.makefile.return_value = [b""] + mock_channel.makefile_stderr.return_value = [b""] + + returncode, stdout, stderr = connection.exec_command('sudo test command') + + mock_channel.sendall.assert_called_once_with(b'sudo_password\n') + + +def test_put_file(connection): + """ Test putting a file to the remote system """ + connection.exec_command = MagicMock() + connection.exec_command.return_value = (0, b"", b"") + + with patch('builtins.open', create=True) as mock_open: + mock_open.return_value.__enter__.return_value.read.return_value = b'test content' + connection.put_file('/local/path', '/remote/path') + + connection.exec_command.assert_called_once_with("/bin/sh -c 'cat > /remote/path'", in_data=b'test content', sudoable=False) + + +@patch('paramiko.SSHClient') +def test_put_file_general_error(mock_ssh, connection): + """ Test put_file with general error """ + mock_client = MagicMock() + mock_ssh.return_value = mock_client + mock_channel = MagicMock() + mock_transport = MagicMock() + + mock_client.get_transport.return_value = mock_transport + mock_transport.open_session.return_value = mock_channel + mock_channel.recv_exit_status.return_value = 1 + mock_channel.makefile.return_value = [to_bytes("")] + mock_channel.makefile_stderr.return_value = [to_bytes('Some error')] + + connection._connected = True + connection.ssh = mock_client + + with pytest.raises(AnsibleError, match='error occurred while putting file from /remote/path to /local/path'): + connection.put_file('/remote/path', '/local/path') + + +@patch('paramiko.SSHClient') +def test_put_file_cat_not_found(mock_ssh, connection): + """ Test command execution when cat is not found """ + mock_client = MagicMock() + mock_ssh.return_value = mock_client + mock_channel = MagicMock() + mock_transport = MagicMock() + + mock_client.get_transport.return_value = mock_transport + mock_transport.open_session.return_value = mock_channel + mock_channel.recv_exit_status.return_value = 1 + mock_channel.makefile.return_value = [to_bytes("")] + mock_channel.makefile_stderr.return_value = [to_bytes('cat: not found')] + + connection._connected = True + connection.ssh = mock_client + + with pytest.raises(AnsibleError, match='cat not found in path of container:'): + connection.fetch_file('/remote/path', '/local/path') + + +def test_fetch_file(connection): + """ Test fetching a file from the remote system """ + connection.exec_command = MagicMock() + connection.exec_command.return_value = (0, b'test content', b"") + + with patch('builtins.open', create=True) as mock_open: + connection.fetch_file('/remote/path', '/local/path') + + connection.exec_command.assert_called_once_with("/bin/sh -c 'cat /remote/path'", sudoable=False) + mock_open.assert_called_with('/local/path', 'wb') + + +@patch('paramiko.SSHClient') +def test_fetch_file_general_error(mock_ssh, connection): + """ Test fetch_file with general error """ + mock_client = MagicMock() + mock_ssh.return_value = mock_client + mock_channel = MagicMock() + mock_transport = MagicMock() + + mock_client.get_transport.return_value = mock_transport + mock_transport.open_session.return_value = mock_channel + mock_channel.recv_exit_status.return_value = 1 + mock_channel.makefile.return_value = [to_bytes("")] + mock_channel.makefile_stderr.return_value = [to_bytes('Some error')] + + connection._connected = True + connection.ssh = mock_client + + with pytest.raises(AnsibleError, match='error occurred while fetching file from /remote/path to /local/path'): + connection.fetch_file('/remote/path', '/local/path') + + +@patch('paramiko.SSHClient') +def test_fetch_file_cat_not_found(mock_ssh, connection): + """ Test command execution when cat is not found """ + mock_client = MagicMock() + mock_ssh.return_value = mock_client + mock_channel = MagicMock() + mock_transport = MagicMock() + + mock_client.get_transport.return_value = mock_transport + mock_transport.open_session.return_value = mock_channel + mock_channel.recv_exit_status.return_value = 1 + mock_channel.makefile.return_value = [to_bytes("")] + mock_channel.makefile_stderr.return_value = [to_bytes('cat: not found')] + + connection._connected = True + connection.ssh = mock_client + + with pytest.raises(AnsibleError, match='cat not found in path of container:'): + connection.fetch_file('/remote/path', '/local/path') + + +def test_close(connection): + """ Test connection close """ + mock_ssh = MagicMock() + connection.ssh = mock_ssh + connection._connected = True + + connection.close() + + assert mock_ssh.close.called, 'ssh.close was not called' + assert not connection._connected, 'self._connected is still True' + + +def test_close_with_lock_file(connection): + """ Test close method with lock file creation """ + connection._any_keys_added = MagicMock(return_value=True) + connection._connected = True + connection.keyfile = '/tmp/pct-remote-known_hosts-test' + connection.set_option('host_key_checking', True) + connection.set_option('lock_file_timeout', 5) + connection.set_option('record_host_keys', True) + connection.ssh = MagicMock() + + lock_file_path = os.path.join(os.path.dirname(connection.keyfile), + f'ansible-{os.path.basename(connection.keyfile)}.lock') + + try: + connection.close() + assert os.path.exists(lock_file_path), 'Lock file was not created' + + lock_stat = os.stat(lock_file_path) + assert lock_stat.st_mode & 0o777 == 0o600, 'Incorrect lock file permissions' + finally: + Path(lock_file_path).unlink(missing_ok=True) + + +@patch('pathlib.Path.unlink') +@patch('os.path.exists') +def test_close_lock_file_time_out_error_handling(mock_exists, mock_unlink, connection): + """ Test close method with lock file timeout error """ + connection._any_keys_added = MagicMock(return_value=True) + connection._connected = True + connection._save_ssh_host_keys = MagicMock() + connection.keyfile = '/tmp/pct-remote-known_hosts-test' + connection.set_option('host_key_checking', True) + connection.set_option('lock_file_timeout', 5) + connection.set_option('record_host_keys', True) + connection.ssh = MagicMock() + + mock_exists.return_value = False + matcher = f'writing lock file for {connection.keyfile} ran in to the timeout of {connection.get_option("lock_file_timeout")}s' + with pytest.raises(AnsibleError, match=matcher): + with patch('os.getuid', return_value=1000), \ + patch('os.getgid', return_value=1000), \ + patch('os.chmod'), patch('os.chown'), \ + patch('os.rename'), \ + patch.object(FileLock, 'lock_file', side_effect=LockTimeout()): + connection.close() + + +@patch('ansible_collections.community.general.plugins.module_utils._filelock.FileLock.lock_file') +@patch('tempfile.NamedTemporaryFile') +@patch('os.chmod') +@patch('os.chown') +@patch('os.rename') +@patch('os.path.exists') +def test_tempfile_creation_and_move(mock_exists, mock_rename, mock_chown, mock_chmod, mock_tempfile, mock_lock_file, connection): + """ Test tempfile creation and move during close """ + connection._any_keys_added = MagicMock(return_value=True) + connection._connected = True + connection._save_ssh_host_keys = MagicMock() + connection.keyfile = '/tmp/pct-remote-known_hosts-test' + connection.set_option('host_key_checking', True) + connection.set_option('lock_file_timeout', 5) + connection.set_option('record_host_keys', True) + connection.ssh = MagicMock() + + mock_exists.return_value = False + + mock_lock_file_instance = MagicMock() + mock_lock_file.return_value = mock_lock_file_instance + mock_lock_file_instance.__enter__.return_value = None + + mock_tempfile_instance = MagicMock() + mock_tempfile_instance.name = '/tmp/mock_tempfile' + mock_tempfile.return_value.__enter__.return_value = mock_tempfile_instance + + mode = 0o644 + uid = 1000 + gid = 1000 + key_dir = os.path.dirname(connection.keyfile) + + with patch('os.getuid', return_value=uid), patch('os.getgid', return_value=gid): + connection.close() + + connection._save_ssh_host_keys.assert_called_once_with('/tmp/mock_tempfile') + mock_chmod.assert_called_once_with('/tmp/mock_tempfile', mode) + mock_chown.assert_called_once_with('/tmp/mock_tempfile', uid, gid) + mock_rename.assert_called_once_with('/tmp/mock_tempfile', connection.keyfile) + mock_tempfile.assert_called_once_with(dir=key_dir, delete=False) + + +@patch('pathlib.Path.unlink') +@patch('tempfile.NamedTemporaryFile') +@patch('ansible_collections.community.general.plugins.module_utils._filelock.FileLock.lock_file') +@patch('os.path.exists') +def test_close_tempfile_error_handling(mock_exists, mock_lock_file, mock_tempfile, mock_unlink, connection): + """ Test tempfile creation error """ + connection._any_keys_added = MagicMock(return_value=True) + connection._connected = True + connection._save_ssh_host_keys = MagicMock() + connection.keyfile = '/tmp/pct-remote-known_hosts-test' + connection.set_option('host_key_checking', True) + connection.set_option('lock_file_timeout', 5) + connection.set_option('record_host_keys', True) + connection.ssh = MagicMock() + + mock_exists.return_value = False + + mock_lock_file_instance = MagicMock() + mock_lock_file.return_value = mock_lock_file_instance + mock_lock_file_instance.__enter__.return_value = None + + mock_tempfile_instance = MagicMock() + mock_tempfile_instance.name = '/tmp/mock_tempfile' + mock_tempfile.return_value.__enter__.return_value = mock_tempfile_instance + + with pytest.raises(AnsibleError, match='error occurred while writing SSH host keys!'): + with patch.object(os, 'chmod', side_effect=Exception()): + connection.close() + mock_unlink.assert_called_with(missing_ok=True) + + +@patch('ansible_collections.community.general.plugins.module_utils._filelock.FileLock.lock_file') +@patch('os.path.exists') +def test_close_with_invalid_host_key(mock_exists, mock_lock_file, connection): + """ Test load_system_host_keys on close with InvalidHostKey error """ + connection._any_keys_added = MagicMock(return_value=True) + connection._connected = True + connection._save_ssh_host_keys = MagicMock() + connection.keyfile = '/tmp/pct-remote-known_hosts-test' + connection.set_option('host_key_checking', True) + connection.set_option('lock_file_timeout', 5) + connection.set_option('record_host_keys', True) + connection.ssh = MagicMock() + connection.ssh.load_system_host_keys.side_effect = paramiko.hostkeys.InvalidHostKey( + "Bad Line!", Exception('Something crashed!')) + + mock_exists.return_value = False + + mock_lock_file_instance = MagicMock() + mock_lock_file.return_value = mock_lock_file_instance + mock_lock_file_instance.__enter__.return_value = None + + with pytest.raises(AnsibleConnectionFailure, match="Invalid host key: Bad Line!"): + connection.close() + + +def test_reset(connection): + """ Test connection reset """ + connection._connected = True + connection.close = MagicMock() + connection._connect = MagicMock() + + connection.reset() + + connection.close.assert_called_once() + connection._connect.assert_called_once() + + connection._connected = False + connection.reset() + assert connection.close.call_count == 1 diff --git a/tests/unit/plugins/filter/test_json_patch.py b/tests/unit/plugins/filter/test_json_patch.py new file mode 100644 index 0000000000..7bd4a08664 --- /dev/null +++ b/tests/unit/plugins/filter/test_json_patch.py @@ -0,0 +1,313 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Stanislav Meduna (@numo68) +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type # pylint: disable=C0103 + +import unittest +from ansible_collections.community.general.plugins.filter.json_patch import FilterModule +from ansible.errors import AnsibleFilterError + + +class TestJsonPatch(unittest.TestCase): + def setUp(self): + self.filter = FilterModule() + self.json_patch = self.filter.filters()["json_patch"] + self.json_diff = self.filter.filters()["json_diff"] + self.json_patch_recipe = self.filter.filters()["json_patch_recipe"] + + # json_patch + + def test_patch_add_to_empty(self): + result = self.json_patch({}, "add", "/a", 1) + self.assertEqual(result, {"a": 1}) + + def test_patch_add_to_dict(self): + result = self.json_patch({"b": 2}, "add", "/a", 1) + self.assertEqual(result, {"a": 1, "b": 2}) + + def test_patch_add_to_array_index(self): + result = self.json_patch([1, 2, 3], "add", "/1", 99) + self.assertEqual(result, [1, 99, 2, 3]) + + def test_patch_add_to_array_last(self): + result = self.json_patch({"a": [1, 2, 3]}, "add", "/a/-", 99) + self.assertEqual(result, {"a": [1, 2, 3, 99]}) + + def test_patch_add_from_string(self): + result = self.json_patch("[1, 2, 3]", "add", "/-", 99) + self.assertEqual(result, [1, 2, 3, 99]) + + def test_patch_path_escape(self): + result = self.json_patch({}, "add", "/x~0~1y", 99) + self.assertEqual(result, {"x~/y": 99}) + + def test_patch_remove(self): + result = self.json_patch({"a": 1, "b": {"c": 2}, "d": 3}, "remove", "/b") + self.assertEqual(result, {"a": 1, "d": 3}) + + def test_patch_replace(self): + result = self.json_patch( + {"a": 1, "b": {"c": 2}, "d": 3}, "replace", "/b", {"x": 99} + ) + self.assertEqual(result, {"a": 1, "b": {"x": 99}, "d": 3}) + + def test_patch_copy(self): + result = self.json_patch( + {"a": 1, "b": {"c": 2}, "d": 3}, "copy", "/d", **{"from": "/b"} + ) + self.assertEqual(result, {"a": 1, "b": {"c": 2}, "d": {"c": 2}}) + + def test_patch_move(self): + result = self.json_patch( + {"a": 1, "b": {"c": 2}, "d": 3}, "move", "/d", **{"from": "/b"} + ) + self.assertEqual(result, {"a": 1, "d": {"c": 2}}) + + def test_patch_test_pass(self): + result = self.json_patch({"a": 1, "b": {"c": 2}, "d": 3}, "test", "/b/c", 2) + self.assertEqual(result, {"a": 1, "b": {"c": 2}, "d": 3}) + + def test_patch_test_fail_none(self): + result = self.json_patch({"a": 1, "b": {"c": 2}, "d": 3}, "test", "/b/c", 99) + self.assertIsNone(result) + + def test_patch_test_fail_fail(self): + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch( + {"a": 1, "b": {"c": 2}, "d": 3}, "test", "/b/c", 99, fail_test=True + ) + self.assertTrue("json_patch: test operation failed" in str(context.exception)) + + def test_patch_remove_nonexisting(self): + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch({"a": 1, "b": {"c": 2}, "d": 3}, "remove", "/e") + self.assertEqual( + str(context.exception), + "json_patch: patch failed: can't remove a non-existent object 'e'", + ) + + def test_patch_missing_lib(self): + with unittest.mock.patch( + "ansible_collections.community.general.plugins.filter.json_patch.HAS_LIB", + False, + ): + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch({}, "add", "/a", 1) + self.assertEqual( + str(context.exception), + "You need to install 'jsonpatch' package prior to running 'json_patch' filter", + ) + + def test_patch_invalid_operation(self): + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch({}, "invalid", "/a", 1) + self.assertEqual( + str(context.exception), + "json_patch: unsupported 'op' argument: invalid", + ) + + def test_patch_arg_checking(self): + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch(1, "add", "/a", 1) + self.assertEqual( + str(context.exception), + "json_patch: input is not dictionary, list or string", + ) + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch({}, 1, "/a", 1) + self.assertEqual( + str(context.exception), + "json_patch: 'op' argument is not a string", + ) + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch({}, None, "/a", 1) + self.assertEqual( + str(context.exception), + "json_patch: 'op' argument is not a string", + ) + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch({}, "add", 1, 1) + self.assertEqual( + str(context.exception), + "json_patch: 'path' argument is not a string", + ) + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch({}, "copy", "/a", **{"from": 1}) + self.assertEqual( + str(context.exception), + "json_patch: 'from' argument is not a string", + ) + + def test_patch_extra_kwarg(self): + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch({}, "add", "/a", 1, invalid=True) + self.assertEqual( + str(context.exception), + "json_patch: unexpected keywords arguments: invalid", + ) + + def test_patch_missing_from(self): + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch({}, "copy", "/a", 1) + self.assertEqual( + str(context.exception), + "json_patch: 'from' argument missing for 'copy' operation", + ) + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch({}, "move", "/a", 1) + self.assertEqual( + str(context.exception), + "json_patch: 'from' argument missing for 'move' operation", + ) + + def test_patch_add_to_dict_binary(self): + result = self.json_patch(b'{"b": 2}', "add", "/a", 1) + self.assertEqual(result, {"a": 1, "b": 2}) + result = self.json_patch(bytearray(b'{"b": 2}'), "add", "/a", 1) + self.assertEqual(result, {"a": 1, "b": 2}) + + # json_patch_recipe + + def test_patch_recipe_process(self): + result = self.json_patch_recipe( + {}, + [ + {"op": "add", "path": "/foo", "value": 1}, + {"op": "add", "path": "/bar", "value": []}, + {"op": "add", "path": "/bar/-", "value": 2}, + {"op": "add", "path": "/bar/0", "value": 1}, + {"op": "remove", "path": "/bar/0"}, + {"op": "move", "from": "/foo", "path": "/baz"}, + {"op": "copy", "from": "/baz", "path": "/bax"}, + {"op": "copy", "from": "/baz", "path": "/bay"}, + {"op": "replace", "path": "/baz", "value": [10, 20, 30]}, + {"op": "add", "path": "/foo", "value": 1}, + {"op": "add", "path": "/foo", "value": 1}, + {"op": "test", "path": "/baz/1", "value": 20}, + ], + ) + self.assertEqual( + result, {"bar": [2], "bax": 1, "bay": 1, "baz": [10, 20, 30], "foo": 1} + ) + + def test_patch_recipe_test_fail(self): + result = self.json_patch_recipe( + {}, + [ + {"op": "add", "path": "/bar", "value": []}, + {"op": "add", "path": "/bar/-", "value": 2}, + {"op": "test", "path": "/bar/0", "value": 20}, + {"op": "add", "path": "/bar/0", "value": 1}, + ], + ) + self.assertIsNone(result) + + def test_patch_recipe_missing_lib(self): + with unittest.mock.patch( + "ansible_collections.community.general.plugins.filter.json_patch.HAS_LIB", + False, + ): + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch_recipe({}, []) + self.assertEqual( + str(context.exception), + "You need to install 'jsonpatch' package prior to running 'json_patch_recipe' filter", + ) + + def test_patch_recipe_missing_from(self): + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch_recipe({}, [{"op": "copy", "path": "/a"}]) + self.assertEqual( + str(context.exception), + "json_patch_recipe: 'from' argument missing for 'copy' operation", + ) + + def test_patch_recipe_incorrect_type(self): + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch_recipe({}, "copy") + self.assertEqual( + str(context.exception), + "json_patch_recipe: 'operations' needs to be a list", + ) + + def test_patch_recipe_test_fail_none(self): + result = self.json_patch_recipe( + {"a": 1, "b": {"c": 2}, "d": 3}, + [{"op": "test", "path": "/b/c", "value": 99}], + ) + self.assertIsNone(result) + + def test_patch_recipe_test_fail_fail_pos(self): + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch_recipe( + {"a": 1, "b": {"c": 2}, "d": 3}, + [{"op": "test", "path": "/b/c", "value": 99}], + True, + ) + self.assertTrue( + "json_patch_recipe: test operation failed" in str(context.exception) + ) + + def test_patch_recipe_test_fail_fail_kw(self): + with self.assertRaises(AnsibleFilterError) as context: + self.json_patch_recipe( + {"a": 1, "b": {"c": 2}, "d": 3}, + [{"op": "test", "path": "/b/c", "value": 99}], + fail_test=True, + ) + self.assertTrue( + "json_patch_recipe: test operation failed" in str(context.exception) + ) + + # json_diff + + def test_diff_process(self): + result = self.json_diff( + {"foo": 1, "bar": {"baz": 2}, "baw": [1, 2, 3], "hello": "day"}, + { + "foo": 1, + "bar": {"baz": 2}, + "baw": [1, 3], + "baq": {"baz": 2}, + "hello": "night", + }, + ) + + # Sort as the order is unstable + self.assertEqual( + sorted(result, key=lambda k: k["path"]), + [ + {"op": "add", "path": "/baq", "value": {"baz": 2}}, + {"op": "remove", "path": "/baw/1"}, + {"op": "replace", "path": "/hello", "value": "night"}, + ], + ) + + def test_diff_missing_lib(self): + with unittest.mock.patch( + "ansible_collections.community.general.plugins.filter.json_patch.HAS_LIB", + False, + ): + with self.assertRaises(AnsibleFilterError) as context: + self.json_diff({}, {}) + self.assertEqual( + str(context.exception), + "You need to install 'jsonpatch' package prior to running 'json_diff' filter", + ) + + def test_diff_arg_checking(self): + with self.assertRaises(AnsibleFilterError) as context: + self.json_diff(1, {}) + self.assertEqual( + str(context.exception), "json_diff: input is not dictionary, list or string" + ) + with self.assertRaises(AnsibleFilterError) as context: + self.json_diff({}, 1) + self.assertEqual( + str(context.exception), + "json_diff: target is not dictionary, list or string", + ) diff --git a/tests/unit/plugins/inventory/test_iocage.py b/tests/unit/plugins/inventory/test_iocage.py index 011fc49388..98492ff9aa 100644 --- a/tests/unit/plugins/inventory/test_iocage.py +++ b/tests/unit/plugins/inventory/test_iocage.py @@ -36,16 +36,14 @@ def inventory(): def load_txt_data(path): - f = open(path, 'r') - s = f.read() - f.close() + with open(path, 'r') as f: + s = f.read() return s def load_yml_data(path): - f = open(path, 'r') - d = yaml.safe_load(f) - f.close() + with open(path, 'r') as f: + d = yaml.safe_load(f) return d diff --git a/tests/unit/plugins/lookup/onepassword_common.py b/tests/unit/plugins/lookup/onepassword_common.py index bf0cc35c12..0759e0abff 100644 --- a/tests/unit/plugins/lookup/onepassword_common.py +++ b/tests/unit/plugins/lookup/onepassword_common.py @@ -293,3 +293,39 @@ MOCK_ENTRIES = { }, ], } + +SSH_KEY_MOCK_ENTRIES = [ + # loads private key in PKCS#8 format by default + { + "vault_name": "Personal", + "queries": ["ssh key"], + "expected": [ + "-----BEGIN PRIVATE KEY-----\n..........=\n-----END PRIVATE KEY-----\n" + ], + "output": load_file("ssh_key_output.json"), + }, + # loads private key in PKCS#8 format becasue ssh_format=false + { + "vault_name": "Personal", + "queries": ["ssh key"], + "kwargs": { + "ssh_format": False, + }, + "expected": [ + "-----BEGIN PRIVATE KEY-----\n..........=\n-----END PRIVATE KEY-----\n" + ], + "output": load_file("ssh_key_output.json"), + }, + # loads private key in ssh format + { + "vault_name": "Personal", + "queries": ["ssh key"], + "kwargs": { + "ssh_format": True, + }, + "expected": [ + "-----BEGIN OPENSSH PRIVATE KEY-----\r\n.....\r\n-----END OPENSSH PRIVATE KEY-----\r\n" + ], + "output": load_file("ssh_key_output.json"), + }, +] diff --git a/tests/unit/plugins/lookup/onepassword_fixtures/ssh_key_output.json b/tests/unit/plugins/lookup/onepassword_fixtures/ssh_key_output.json new file mode 100644 index 0000000000..f14066b941 --- /dev/null +++ b/tests/unit/plugins/lookup/onepassword_fixtures/ssh_key_output.json @@ -0,0 +1,57 @@ +{ + "id": "wdtryfeh3jlx2dlanqgg4dqxmy", + "title": "ssh key", + "version": 1, + "vault": { + "id": "5auhrjy66hc7ndhe2wvym6gadv", + "name": "Personal" + }, + "category": "SSH_KEY", + "last_edited_by": "LSGPJERUYBH7BFPHMZ2KKGL6AU", + "created_at": "2025-01-10T16:57:16Z", + "updated_at": "2025-01-10T16:57:16Z", + "additional_information": "SHA256:frHmQAgblahD5HHgNj2O714", + "fields": [ + { + "id": "public_key", + "type": "STRING", + "label": "public key", + "value": "ssh-ed255.....", + "reference": "op://Personal/ssh key/public key" + }, + { + "id": "fingerprint", + "type": "STRING", + "label": "fingerprint", + "value": "SHA256:frHmQAgy7zBKeFDxHMW0QltZ/5O4N8gD5HHgNj2O614", + "reference": "op://Personal/ssh key/fingerprint" + }, + { + "id": "private_key", + "type": "SSHKEY", + "label": "private key", + "value": "-----BEGIN PRIVATE KEY-----\n..........=\n-----END PRIVATE KEY-----\n", + "reference": "op://Personal/ssh key/private key", + "ssh_formats": { + "openssh": { + "reference": "op://Personal/ssh key/private key?ssh-format=openssh", + "value": "-----BEGIN OPENSSH PRIVATE KEY-----\r\n.....\r\n-----END OPENSSH PRIVATE KEY-----\r\n" + } + } + }, + { + "id": "key_type", + "type": "STRING", + "label": "key type", + "value": "ed25519", + "reference": "op://Personal/ssh key/key type" + }, + { + "id": "notesPlain", + "type": "STRING", + "purpose": "NOTES", + "label": "notesPlain", + "reference": "op://Personal/ssh key/notesPlain" + } + ] + } \ No newline at end of file diff --git a/tests/unit/plugins/lookup/onepassword_fixtures/ssh_key_output.json.license b/tests/unit/plugins/lookup/onepassword_fixtures/ssh_key_output.json.license new file mode 100644 index 0000000000..59021c33ed --- /dev/null +++ b/tests/unit/plugins/lookup/onepassword_fixtures/ssh_key_output.json.license @@ -0,0 +1,3 @@ +GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +SPDX-License-Identifier: GPL-3.0-or-later +SPDX-FileCopyrightText: 2025, Ansible Project diff --git a/tests/unit/plugins/lookup/test_onepassword_ssh_key.py b/tests/unit/plugins/lookup/test_onepassword_ssh_key.py new file mode 100644 index 0000000000..864f58db6e --- /dev/null +++ b/tests/unit/plugins/lookup/test_onepassword_ssh_key.py @@ -0,0 +1,30 @@ +# Copyright (c) 2025 Ansible Project +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import (absolute_import, division, print_function) +__metaclass__ = type + +import json +import pytest + +from .onepassword_common import SSH_KEY_MOCK_ENTRIES + +from ansible.plugins.loader import lookup_loader + + +@pytest.mark.parametrize( + ("vault", "queries", "kwargs", "output", "expected"), + ( + (item["vault_name"], item["queries"], item.get("kwargs", {}), item["output"], item["expected"]) + for item in SSH_KEY_MOCK_ENTRIES + ) +) +def test_ssh_key(mocker, vault, queries, kwargs, output, expected): + mocker.patch("ansible_collections.community.general.plugins.lookup.onepassword.OnePass.assert_logged_in", return_value=True) + mocker.patch("ansible_collections.community.general.plugins.lookup.onepassword.OnePassCLIBase._run", return_value=(0, json.dumps(output), "")) + + op_lookup = lookup_loader.get("community.general.onepassword_ssh_key") + result = op_lookup.run(queries, vault=vault, **kwargs) + + assert result == expected diff --git a/tests/unit/plugins/module_utils/identity/keycloak/test_keycloak_connect.py b/tests/unit/plugins/module_utils/identity/keycloak/test_keycloak_connect.py index 9a816cfe25..bbf5d6265f 100644 --- a/tests/unit/plugins/module_utils/identity/keycloak/test_keycloak_connect.py +++ b/tests/unit/plugins/module_utils/identity/keycloak/test_keycloak_connect.py @@ -160,6 +160,6 @@ def test_json_without_token_returned(mock_json_without_token_returned): with pytest.raises(KeycloakError) as raised_error: get_token(module_params_creds) assert str(raised_error.value) == ( - 'Could not obtain access token from http://keycloak.url' - '/auth/realms/master/protocol/openid-connect/token' + 'API did not include access_token field in response from ' + 'http://keycloak.url/auth/realms/master/protocol/openid-connect/token' ) diff --git a/tests/unit/plugins/modules/helper.py b/tests/unit/plugins/modules/helper.py index 8071bc2aa9..2464f280de 100644 --- a/tests/unit/plugins/modules/helper.py +++ b/tests/unit/plugins/modules/helper.py @@ -35,7 +35,7 @@ class Helper(object): test_module = sys.modules[test_module_name] if test_spec is None: test_spec = test_module.__file__.replace('.py', '.yaml') - return Helper.from_file(test_module, ansible_module, test_spec) + return Helper.from_file(test_module, ansible_module, test_spec, mocks=mocks) def add_func_to_test_module(self, name, func): setattr(self.test_module, name, func) diff --git a/tests/unit/plugins/modules/test_cpanm.yaml b/tests/unit/plugins/modules/test_cpanm.yaml index 979e85dafe..467429b10d 100644 --- a/tests/unit/plugins/modules/test_cpanm.yaml +++ b/tests/unit/plugins/modules/test_cpanm.yaml @@ -8,356 +8,396 @@ anchors: environ_true: &env-def-true {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true} environ_false: &env-def-false {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: false} test_cases: -- id: install_dancer_compatibility - input: - name: Dancer - mode: compatibility - output: - changed: true - cpanm_version: "1.7047" - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/perl, -le, 'use Dancer;'] - environ: *env-def-false - rc: 2 - out: "" - err: "error, not installed" - - command: [/testbin/cpanm, Dancer] - environ: *env-def-true - rc: 0 - out: "" - err: "" -- id: install_dancer_already_installed_compatibility - input: - name: Dancer - mode: compatibility - output: - changed: false - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/perl, -le, 'use Dancer;'] - environ: *env-def-false - rc: 0 - out: "" - err: "" -- id: install_dancer - input: - name: Dancer - output: - changed: true - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/cpanm, Dancer] - environ: *env-def-true - rc: 0 - out: "" - err: "" -- id: install_distribution_file_compatibility - input: - name: MIYAGAWA/Plack-0.99_05.tar.gz - mode: compatibility - output: - changed: true - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/cpanm, MIYAGAWA/Plack-0.99_05.tar.gz] - environ: *env-def-true - rc: 0 - out: "" - err: "" -- id: install_distribution_file - input: - name: MIYAGAWA/Plack-0.99_05.tar.gz - output: - changed: true - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/cpanm, MIYAGAWA/Plack-0.99_05.tar.gz] - environ: *env-def-true - rc: 0 - out: "" - err: "" -- id: install_into_locallib - input: - name: Dancer - mode: new - locallib: /srv/webapps/my_app/extlib - output: - changed: true - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/cpanm, --local-lib, /srv/webapps/my_app/extlib, Dancer] - environ: *env-def-true - rc: 0 - out: "" - err: "" -- id: install_from_local_directory - input: - from_path: /srv/webapps/my_app/src/ - mode: new - output: - changed: true - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/cpanm, /srv/webapps/my_app/src/] - environ: *env-def-true - rc: 0 - out: "" - err: "" -- id: install_into_locallib_no_unit_testing - input: - name: Dancer - notest: true - mode: new - locallib: /srv/webapps/my_app/extlib - output: - changed: true - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/cpanm, --notest, --local-lib, /srv/webapps/my_app/extlib, Dancer] - environ: *env-def-true - rc: 0 - out: "" - err: "" -- id: install_from_mirror - input: - name: Dancer - mode: new - mirror: "http://cpan.cpantesters.org/" - output: - changed: true - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/cpanm, --mirror, "http://cpan.cpantesters.org/", Dancer] - environ: *env-def-true - rc: 0 - out: "" - err: "" -- id: install_into_system_lib - input: - name: Dancer - mode: new - system_lib: true - output: - failed: true - mocks: - run_command: [] -- id: install_minversion_implicit - input: - name: Dancer - mode: new - version: "1.0" - output: - changed: true - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/cpanm, Dancer~1.0] - environ: *env-def-true - rc: 0 - out: "" - err: "" -- id: install_minversion_explicit - input: - name: Dancer - mode: new - version: "~1.5" - output: - changed: true - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/cpanm, Dancer~1.5] - environ: *env-def-true - rc: 0 - out: "" - err: "" -- id: install_specific_version - input: - name: Dancer - mode: new - version: "@1.7" - output: - changed: true - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/cpanm, Dancer@1.7] - environ: *env-def-true - rc: 0 - out: "" - err: "" -- id: install_specific_version_from_file_error - input: - name: MIYAGAWA/Plack-0.99_05.tar.gz - mode: new - version: "@1.7" - output: - failed: true - msg: parameter 'version' must not be used when installing from a file - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" -- id: install_specific_version_from_directory_error - input: - from_path: ~/ - mode: new - version: "@1.7" - output: - failed: true - msg: parameter 'version' must not be used when installing from a directory - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" -- id: install_specific_version_from_git_url_explicit - input: - name: "git://github.com/plack/Plack.git" - mode: new - version: "@1.7" - output: - changed: true - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/cpanm, "git://github.com/plack/Plack.git@1.7"] - environ: *env-def-true - rc: 0 - out: "" - err: "" -- id: install_specific_version_from_git_url_implicit - input: - name: "git://github.com/plack/Plack.git" - mode: new - version: "2.5" - output: - changed: true - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" - - command: [/testbin/cpanm, "git://github.com/plack/Plack.git@2.5"] - environ: *env-def-true - rc: 0 - out: "" - err: "" -- id: install_version_operator_from_git_url_error - input: - name: "git://github.com/plack/Plack.git" - mode: new - version: "~2.5" - output: - failed: true - msg: operator '~' not allowed in version parameter when installing from git repository - mocks: - run_command: - - command: [/testbin/cpanm, --version] - environ: *env-def-true - rc: 0 - out: | - cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) - perl version 5.041005 (/usr/local/bin/perl) - err: "" + - id: install_dancer_compatibility + input: + name: Dancer + mode: compatibility + output: + changed: true + cpanm_version: '1.7047' + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/perl, -le, use Dancer;] + environ: *env-def-false + rc: 2 + out: '' + err: error, not installed + - command: [/testbin/cpanm, Dancer] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_dancer_already_installed_compatibility + input: + name: Dancer + mode: compatibility + output: + changed: false + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/perl, -le, use Dancer;] + environ: *env-def-false + rc: 0 + out: '' + err: '' + - id: install_dancer + input: + name: Dancer + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, Dancer] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_distribution_file_compatibility + input: + name: MIYAGAWA/Plack-0.99_05.tar.gz + mode: compatibility + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, MIYAGAWA/Plack-0.99_05.tar.gz] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_distribution_file + input: + name: MIYAGAWA/Plack-0.99_05.tar.gz + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, MIYAGAWA/Plack-0.99_05.tar.gz] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_into_locallib + input: + name: Dancer + mode: new + locallib: /srv/webapps/my_app/extlib + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, --local-lib, /srv/webapps/my_app/extlib, Dancer] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_from_local_directory + input: + from_path: /srv/webapps/my_app/src/ + mode: new + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, /srv/webapps/my_app/src/] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_into_locallib_no_unit_testing + input: + name: Dancer + notest: true + mode: new + locallib: /srv/webapps/my_app/extlib + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, --notest, --local-lib, /srv/webapps/my_app/extlib, Dancer] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_from_mirror + input: + name: Dancer + mode: new + mirror: http://cpan.cpantesters.org/ + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, --mirror, http://cpan.cpantesters.org/, Dancer] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_into_system_lib + input: + name: Dancer + mode: new + system_lib: true + output: + failed: true + mocks: + run_command: [] + - id: install_minversion_implicit + input: + name: Dancer + mode: new + version: '1.0' + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, Dancer~1.0] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_minversion_explicit + input: + name: Dancer + mode: new + version: ~1.5 + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, Dancer~1.5] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_specific_version + input: + name: Dancer + mode: new + version: '@1.7' + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, Dancer@1.7] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_specific_version_from_file_error + input: + name: MIYAGAWA/Plack-0.99_05.tar.gz + mode: new + version: '@1.7' + output: + failed: true + msg: parameter 'version' must not be used when installing from a file + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - id: install_specific_version_from_directory_error + input: + from_path: ~/ + mode: new + version: '@1.7' + output: + failed: true + msg: parameter 'version' must not be used when installing from a directory + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - id: install_specific_version_from_git_url_explicit + input: + name: git://github.com/plack/Plack.git + mode: new + version: '@1.7' + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, git://github.com/plack/Plack.git@1.7] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_specific_version_from_git_url_implicit + input: + name: git://github.com/plack/Plack.git + mode: new + version: '2.5' + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, git://github.com/plack/Plack.git@2.5] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_version_operator_from_git_url_error + input: + name: git://github.com/plack/Plack.git + mode: new + version: ~2.5 + output: + failed: true + msg: operator '~' not allowed in version parameter when installing from git repository + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - id: install_dancer_with_recommends + input: + name: Dancer2 + install_recommendations: true + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, --with-recommends, Dancer2] + environ: *env-def-true + rc: 0 + out: '' + err: '' + - id: install_dancer_with_suggests + input: + name: Dancer2 + install_suggestions: true + output: + changed: true + mocks: + run_command: + - command: [/testbin/cpanm, --version] + environ: *env-def-true + rc: 0 + out: | + cpanm (App::cpanminus) version 1.7047 (/usr/local/bin/cpanm) + perl version 5.041005 (/usr/local/bin/perl) + err: '' + - command: [/testbin/cpanm, --with-suggests, Dancer2] + environ: *env-def-true + rc: 0 + out: '' + err: '' diff --git a/tests/unit/plugins/modules/test_django_check.yaml b/tests/unit/plugins/modules/test_django_check.yaml index 1b8b36a52c..fb39b6d62f 100644 --- a/tests/unit/plugins/modules/test_django_check.yaml +++ b/tests/unit/plugins/modules/test_django_check.yaml @@ -7,40 +7,40 @@ anchors: environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true} test_cases: -- id: success - input: - settings: whatever.settings - output: - version: "5.1.2" - mocks: - run_command: - - command: [/testbin/python, -m, django, --version] - environ: *env-def - rc: 0 - out: "5.1.2\n" - err: "" - - command: [/testbin/python, -m, django, check, --no-color, --settings=whatever.settings] - environ: *env-def - rc: 0 - out: "whatever\n" - err: "" -- id: multiple_databases - input: - settings: whatever.settings - database: - - abc - - def - output: - version: "5.1.2" - mocks: - run_command: - - command: [/testbin/python, -m, django, --version] - environ: *env-def - rc: 0 - out: "5.1.2\n" - err: "" - - command: [/testbin/python, -m, django, check, --no-color, --settings=whatever.settings, --database, abc, --database, def] - environ: *env-def - rc: 0 - out: "whatever\n" - err: "" + - id: success + input: + settings: whatever.settings + output: + version: 5.1.2 + mocks: + run_command: + - command: [/testbin/python, -m, django, --version] + environ: *env-def + rc: 0 + out: "5.1.2\n" + err: '' + - command: [/testbin/python, -m, django, check, --no-color, --settings=whatever.settings] + environ: *env-def + rc: 0 + out: "whatever\n" + err: '' + - id: multiple_databases + input: + settings: whatever.settings + database: + - abc + - def + output: + version: 5.1.2 + mocks: + run_command: + - command: [/testbin/python, -m, django, --version] + environ: *env-def + rc: 0 + out: "5.1.2\n" + err: '' + - command: [/testbin/python, -m, django, check, --no-color, --settings=whatever.settings, --database, abc, --database, def] + environ: *env-def + rc: 0 + out: "whatever\n" + err: '' diff --git a/tests/unit/plugins/modules/test_django_command.yaml b/tests/unit/plugins/modules/test_django_command.yaml index a98182385e..10da8753bd 100644 --- a/tests/unit/plugins/modules/test_django_command.yaml +++ b/tests/unit/plugins/modules/test_django_command.yaml @@ -7,47 +7,47 @@ anchors: environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true} test_cases: -- id: command_success - input: - command: check - extra_args: - - babaloo - - yaba - - daba - - doo - settings: whatever.settings - mocks: - run_command: - - command: [/testbin/python, -m, django, --version] - environ: *env-def - rc: 0 - out: "5.1.2\n" - err: "" - - command: [/testbin/python, -m, django, check, --no-color, --settings=whatever.settings, babaloo, yaba, daba, doo] - environ: *env-def - rc: 0 - out: "whatever\n" - err: "" -- id: command_fail - input: - command: check - extra_args: - - babaloo - - yaba - - daba - - doo - settings: whatever.settings - output: - failed: true - mocks: - run_command: - - command: [/testbin/python, -m, django, --version] - environ: *env-def - rc: 0 - out: "5.1.2\n" - err: "" - - command: [/testbin/python, -m, django, check, --no-color, --settings=whatever.settings, babaloo, yaba, daba, doo] - environ: *env-def - rc: 1 - out: "whatever\n" - err: "" + - id: command_success + input: + command: check + extra_args: + - babaloo + - yaba + - daba + - doo + settings: whatever.settings + mocks: + run_command: + - command: [/testbin/python, -m, django, --version] + environ: *env-def + rc: 0 + out: "5.1.2\n" + err: '' + - command: [/testbin/python, -m, django, check, --no-color, --settings=whatever.settings, babaloo, yaba, daba, doo] + environ: *env-def + rc: 0 + out: "whatever\n" + err: '' + - id: command_fail + input: + command: check + extra_args: + - babaloo + - yaba + - daba + - doo + settings: whatever.settings + output: + failed: true + mocks: + run_command: + - command: [/testbin/python, -m, django, --version] + environ: *env-def + rc: 0 + out: "5.1.2\n" + err: '' + - command: [/testbin/python, -m, django, check, --no-color, --settings=whatever.settings, babaloo, yaba, daba, doo] + environ: *env-def + rc: 1 + out: "whatever\n" + err: '' diff --git a/tests/unit/plugins/modules/test_django_createcachetable.yaml b/tests/unit/plugins/modules/test_django_createcachetable.yaml index e800fb65ac..b8056e1b2e 100644 --- a/tests/unit/plugins/modules/test_django_createcachetable.yaml +++ b/tests/unit/plugins/modules/test_django_createcachetable.yaml @@ -7,18 +7,18 @@ anchors: environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true} test_cases: -- id: command_success - input: - settings: whatever.settings - mocks: - run_command: - - command: [/testbin/python, -m, django, --version] - environ: *env-def - rc: 0 - out: "5.1.2\n" - err: "" - - command: [/testbin/python, -m, django, createcachetable, --no-color, --settings=whatever.settings, --noinput, --database=default] - environ: *env-def - rc: 0 - out: "whatever\n" - err: "" + - id: command_success + input: + settings: whatever.settings + mocks: + run_command: + - command: [/testbin/python, -m, django, --version] + environ: *env-def + rc: 0 + out: "5.1.2\n" + err: '' + - command: [/testbin/python, -m, django, createcachetable, --no-color, --settings=whatever.settings, --noinput, --database=default] + environ: *env-def + rc: 0 + out: "whatever\n" + err: '' diff --git a/tests/unit/plugins/modules/test_facter_facts.yaml b/tests/unit/plugins/modules/test_facter_facts.yaml index ffce162b27..89a98714c5 100644 --- a/tests/unit/plugins/modules/test_facter_facts.yaml +++ b/tests/unit/plugins/modules/test_facter_facts.yaml @@ -7,39 +7,39 @@ anchors: environ: &env-def {check_rc: true} test_cases: -- id: simple run - output: - ansible_facts: - facter: - a: 1 - b: 2 - c: 3 - mocks: - run_command: - - command: [/testbin/facter, --json] - environ: *env-def - rc: 0 - out: > - { "a": 1, "b": 2, "c": 3 } - err: "" -- id: with args - input: - arguments: - - -p - - system_uptime - - timezone - - is_virtual - output: - ansible_facts: - facter: - a: 1 - b: 2 - c: 3 - mocks: - run_command: - - command: [/testbin/facter, --json, -p, system_uptime, timezone, is_virtual] - environ: *env-def - rc: 0 - out: > - { "a": 1, "b": 2, "c": 3 } - err: "" + - id: simple run + output: + ansible_facts: + facter: + a: 1 + b: 2 + c: 3 + mocks: + run_command: + - command: [/testbin/facter, --json] + environ: *env-def + rc: 0 + out: > + { "a": 1, "b": 2, "c": 3 } + err: '' + - id: with args + input: + arguments: + - -p + - system_uptime + - timezone + - is_virtual + output: + ansible_facts: + facter: + a: 1 + b: 2 + c: 3 + mocks: + run_command: + - command: [/testbin/facter, --json, -p, system_uptime, timezone, is_virtual] + environ: *env-def + rc: 0 + out: > + { "a": 1, "b": 2, "c": 3 } + err: '' diff --git a/tests/unit/plugins/modules/test_gconftool2.yaml b/tests/unit/plugins/modules/test_gconftool2.yaml index 12bf9099f4..19d389247f 100644 --- a/tests/unit/plugins/modules/test_gconftool2.yaml +++ b/tests/unit/plugins/modules/test_gconftool2.yaml @@ -7,147 +7,147 @@ anchors: environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true} test_cases: -- id: test_simple_element_set - input: - state: present - key: /desktop/gnome/background/picture_filename - value: 200 - value_type: int - output: - new_value: '200' - changed: true - version: "3.2.6" - mocks: - run_command: - - command: [/testbin/gconftool-2, --version] - environ: *env-def - rc: 0 - out: "3.2.6\n" - err: "" - - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] - environ: *env-def - rc: 0 - out: "100\n" - err: "" - - command: [/testbin/gconftool-2, --type, int, --set, /desktop/gnome/background/picture_filename, "200"] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] - environ: *env-def - rc: 0 - out: "200\n" - err: "" -- id: test_simple_element_set_idempotency_int - input: - state: present - key: /desktop/gnome/background/picture_filename - value: 200 - value_type: int - output: - new_value: '200' - changed: false - version: "3.2.5" - mocks: - run_command: - - command: [/testbin/gconftool-2, --version] - environ: *env-def - rc: 0 - out: "3.2.5\n" - err: "" - - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] - environ: *env-def - rc: 0 - out: "200\n" - err: "" - - command: [/testbin/gconftool-2, --type, int, --set, /desktop/gnome/background/picture_filename, "200"] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] - environ: *env-def - rc: 0 - out: "200\n" - err: "" -- id: test_simple_element_set_idempotency_bool - input: - state: present - key: /apps/gnome_settings_daemon/screensaver/start_screensaver - value: false - value_type: bool - output: - new_value: 'false' - changed: false - version: "3.2.4" - mocks: - run_command: - - command: [/testbin/gconftool-2, --version] - environ: *env-def - rc: 0 - out: "3.2.4\n" - err: "" - - command: [/testbin/gconftool-2, --get, /apps/gnome_settings_daemon/screensaver/start_screensaver] - environ: *env-def - rc: 0 - out: "false\n" - err: "" - - command: [/testbin/gconftool-2, --type, bool, --set, /apps/gnome_settings_daemon/screensaver/start_screensaver, "False"] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/gconftool-2, --get, /apps/gnome_settings_daemon/screensaver/start_screensaver] - environ: *env-def - rc: 0 - out: "false\n" - err: "" -- id: test_simple_element_unset - input: - state: absent - key: /desktop/gnome/background/picture_filename - output: - new_value: - changed: true - mocks: - run_command: - - command: [/testbin/gconftool-2, --version] - environ: *env-def - rc: 0 - out: "3.2.4\n" - err: "" - - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] - environ: *env-def - rc: 0 - out: "200\n" - err: "" - - command: [/testbin/gconftool-2, --unset, /desktop/gnome/background/picture_filename] - environ: *env-def - rc: 0 - out: "" - err: "" -- id: test_simple_element_unset_idempotency - input: - state: absent - key: /apps/gnome_settings_daemon/screensaver/start_screensaver - output: - new_value: - changed: false - mocks: - run_command: - - command: [/testbin/gconftool-2, --version] - environ: *env-def - rc: 0 - out: "3.2.4\n" - err: "" - - command: [/testbin/gconftool-2, --get, /apps/gnome_settings_daemon/screensaver/start_screensaver] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/gconftool-2, --unset, /apps/gnome_settings_daemon/screensaver/start_screensaver] - environ: *env-def - rc: 0 - out: "" - err: "" + - id: test_simple_element_set + input: + state: present + key: /desktop/gnome/background/picture_filename + value: 200 + value_type: int + output: + new_value: '200' + changed: true + version: 3.2.6 + mocks: + run_command: + - command: [/testbin/gconftool-2, --version] + environ: *env-def + rc: 0 + out: "3.2.6\n" + err: '' + - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] + environ: *env-def + rc: 0 + out: "100\n" + err: '' + - command: [/testbin/gconftool-2, --type, int, --set, /desktop/gnome/background/picture_filename, '200'] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] + environ: *env-def + rc: 0 + out: "200\n" + err: '' + - id: test_simple_element_set_idempotency_int + input: + state: present + key: /desktop/gnome/background/picture_filename + value: 200 + value_type: int + output: + new_value: '200' + changed: false + version: 3.2.5 + mocks: + run_command: + - command: [/testbin/gconftool-2, --version] + environ: *env-def + rc: 0 + out: "3.2.5\n" + err: '' + - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] + environ: *env-def + rc: 0 + out: "200\n" + err: '' + - command: [/testbin/gconftool-2, --type, int, --set, /desktop/gnome/background/picture_filename, '200'] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] + environ: *env-def + rc: 0 + out: "200\n" + err: '' + - id: test_simple_element_set_idempotency_bool + input: + state: present + key: /apps/gnome_settings_daemon/screensaver/start_screensaver + value: false + value_type: bool + output: + new_value: 'false' + changed: false + version: 3.2.4 + mocks: + run_command: + - command: [/testbin/gconftool-2, --version] + environ: *env-def + rc: 0 + out: "3.2.4\n" + err: '' + - command: [/testbin/gconftool-2, --get, /apps/gnome_settings_daemon/screensaver/start_screensaver] + environ: *env-def + rc: 0 + out: "false\n" + err: '' + - command: [/testbin/gconftool-2, --type, bool, --set, /apps/gnome_settings_daemon/screensaver/start_screensaver, 'False'] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/gconftool-2, --get, /apps/gnome_settings_daemon/screensaver/start_screensaver] + environ: *env-def + rc: 0 + out: "false\n" + err: '' + - id: test_simple_element_unset + input: + state: absent + key: /desktop/gnome/background/picture_filename + output: + new_value: + changed: true + mocks: + run_command: + - command: [/testbin/gconftool-2, --version] + environ: *env-def + rc: 0 + out: "3.2.4\n" + err: '' + - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] + environ: *env-def + rc: 0 + out: "200\n" + err: '' + - command: [/testbin/gconftool-2, --unset, /desktop/gnome/background/picture_filename] + environ: *env-def + rc: 0 + out: '' + err: '' + - id: test_simple_element_unset_idempotency + input: + state: absent + key: /apps/gnome_settings_daemon/screensaver/start_screensaver + output: + new_value: + changed: false + mocks: + run_command: + - command: [/testbin/gconftool-2, --version] + environ: *env-def + rc: 0 + out: "3.2.4\n" + err: '' + - command: [/testbin/gconftool-2, --get, /apps/gnome_settings_daemon/screensaver/start_screensaver] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/gconftool-2, --unset, /apps/gnome_settings_daemon/screensaver/start_screensaver] + environ: *env-def + rc: 0 + out: '' + err: '' diff --git a/tests/unit/plugins/modules/test_gconftool2_info.yaml b/tests/unit/plugins/modules/test_gconftool2_info.yaml index 269f0b4ea2..141b473e35 100644 --- a/tests/unit/plugins/modules/test_gconftool2_info.yaml +++ b/tests/unit/plugins/modules/test_gconftool2_info.yaml @@ -7,37 +7,37 @@ anchors: environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true} test_cases: -- id: test_simple_element_get - input: - key: /desktop/gnome/background/picture_filename - output: - value: '100' - mocks: - run_command: - - command: [/testbin/gconftool-2, --version] - environ: *env-def - rc: 0 - out: "3.2.6\n" - err: "" - - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] - environ: *env-def - rc: 0 - out: "100\n" - err: "" -- id: test_simple_element_get_not_found - input: - key: /desktop/gnome/background/picture_filename - output: - value: - mocks: - run_command: - - command: [/testbin/gconftool-2, --version] - environ: *env-def - rc: 0 - out: "3.2.6\n" - err: "" - - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] - environ: *env-def - rc: 0 - out: "" - err: "No value set for `/desktop/gnome/background/picture_filename'\n" + - id: test_simple_element_get + input: + key: /desktop/gnome/background/picture_filename + output: + value: '100' + mocks: + run_command: + - command: [/testbin/gconftool-2, --version] + environ: *env-def + rc: 0 + out: "3.2.6\n" + err: '' + - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] + environ: *env-def + rc: 0 + out: "100\n" + err: '' + - id: test_simple_element_get_not_found + input: + key: /desktop/gnome/background/picture_filename + output: + value: + mocks: + run_command: + - command: [/testbin/gconftool-2, --version] + environ: *env-def + rc: 0 + out: "3.2.6\n" + err: '' + - command: [/testbin/gconftool-2, --get, /desktop/gnome/background/picture_filename] + environ: *env-def + rc: 0 + out: '' + err: "No value set for `/desktop/gnome/background/picture_filename'\n" diff --git a/tests/unit/plugins/modules/test_gio_mime.yaml b/tests/unit/plugins/modules/test_gio_mime.yaml index c1a3c5def5..6ed5af5d49 100644 --- a/tests/unit/plugins/modules/test_gio_mime.yaml +++ b/tests/unit/plugins/modules/test_gio_mime.yaml @@ -7,85 +7,85 @@ anchors: environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true} test_cases: -- id: test_set_handler - input: - handler: google-chrome.desktop - mime_type: x-scheme-handler/http - output: - handler: google-chrome.desktop - changed: true - mocks: - run_command: - - command: [/testbin/gio, --version] - environ: *env-def - rc: 0 - out: "2.80.0\n" - err: "" - - command: [/testbin/gio, mime, x-scheme-handler/http] - environ: *env-def - rc: 0 - out: "" - err: > - No default applications for “x-scheme-handler/http” - - command: [/testbin/gio, mime, x-scheme-handler/http, google-chrome.desktop] - environ: *env-def - rc: 0 - out: "Set google-chrome.desktop as the default for x-scheme-handler/http\n" - err: "" -- id: test_set_handler_check - input: - handler: google-chrome.desktop - mime_type: x-scheme-handler/http - output: - handler: google-chrome.desktop - changed: true - flags: - skip: test helper does not support check mode yet - mocks: - run_command: - - command: [/testbin/gio, --version] - environ: *env-def - rc: 0 - out: "2.80.0\n" - err: "" - - command: [/testbin/gio, mime, x-scheme-handler/http] - environ: *env-def - rc: 0 - out: "" - err: > - No default applications for “x-scheme-handler/http” - - command: [/testbin/gio, mime, x-scheme-handler/http, google-chrome.desktop] - environ: *env-def - rc: 0 - out: "Set google-chrome.desktop as the default for x-scheme-handler/http\n" - err: "" -- id: test_set_handler_idempot - input: - handler: google-chrome.desktop - mime_type: x-scheme-handler/http - output: - handler: google-chrome.desktop - changed: false - mocks: - run_command: - - command: [/testbin/gio, --version] - environ: *env-def - rc: 0 - out: "2.80.0\n" - err: "" - - command: [/testbin/gio, mime, x-scheme-handler/http] - environ: *env-def - rc: 0 - out: | - Default application for “x-scheme-handler/https”: google-chrome.desktop - Registered applications: - brave-browser.desktop - firefox.desktop - google-chrome.desktop - firefox_firefox.desktop - Recommended applications: - brave-browser.desktop - firefox.desktop - google-chrome.desktop - firefox_firefox.desktop - err: "" + - id: test_set_handler + input: + handler: google-chrome.desktop + mime_type: x-scheme-handler/http + output: + handler: google-chrome.desktop + changed: true + mocks: + run_command: + - command: [/testbin/gio, --version] + environ: *env-def + rc: 0 + out: "2.80.0\n" + err: '' + - command: [/testbin/gio, mime, x-scheme-handler/http] + environ: *env-def + rc: 0 + out: '' + err: > + No default applications for “x-scheme-handler/http” + - command: [/testbin/gio, mime, x-scheme-handler/http, google-chrome.desktop] + environ: *env-def + rc: 0 + out: "Set google-chrome.desktop as the default for x-scheme-handler/http\n" + err: '' + - id: test_set_handler_check + input: + handler: google-chrome.desktop + mime_type: x-scheme-handler/http + output: + handler: google-chrome.desktop + changed: true + flags: + skip: test helper does not support check mode yet + mocks: + run_command: + - command: [/testbin/gio, --version] + environ: *env-def + rc: 0 + out: "2.80.0\n" + err: '' + - command: [/testbin/gio, mime, x-scheme-handler/http] + environ: *env-def + rc: 0 + out: '' + err: > + No default applications for “x-scheme-handler/http” + - command: [/testbin/gio, mime, x-scheme-handler/http, google-chrome.desktop] + environ: *env-def + rc: 0 + out: "Set google-chrome.desktop as the default for x-scheme-handler/http\n" + err: '' + - id: test_set_handler_idempot + input: + handler: google-chrome.desktop + mime_type: x-scheme-handler/http + output: + handler: google-chrome.desktop + changed: false + mocks: + run_command: + - command: [/testbin/gio, --version] + environ: *env-def + rc: 0 + out: "2.80.0\n" + err: '' + - command: [/testbin/gio, mime, x-scheme-handler/http] + environ: *env-def + rc: 0 + out: | + Default application for “x-scheme-handler/https”: google-chrome.desktop + Registered applications: + brave-browser.desktop + firefox.desktop + google-chrome.desktop + firefox_firefox.desktop + Recommended applications: + brave-browser.desktop + firefox.desktop + google-chrome.desktop + firefox_firefox.desktop + err: '' diff --git a/tests/unit/plugins/modules/test_krb_ticket.yaml b/tests/unit/plugins/modules/test_krb_ticket.yaml index cfc9e212cb..d1b6d67f57 100644 --- a/tests/unit/plugins/modules/test_krb_ticket.yaml +++ b/tests/unit/plugins/modules/test_krb_ticket.yaml @@ -9,106 +9,106 @@ anchors: environ_data: &env-data {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true, data: cool_password} environ_norc: &env-norc {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: true} test_cases: -- id: test_kinit_default - input: - state: present - password: cool_password - output: - changed: true - mocks: - run_command: - - command: [/testbin/klist] - environ: *env-def - rc: 1 - out: "" - err: "" - - command: [/testbin/kinit] - environ: *env-data - rc: 0 - out: "" - err: "" -- id: test_kinit_principal - input: - state: present - password: cool_password - principal: admin@IPA.TEST - output: - changed: true - mocks: - run_command: - - command: [/testbin/klist, -l] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/kinit, admin@IPA.TEST] - environ: *env-data - rc: 0 - out: "" - err: "" -- id: test_kdestroy_default - input: - state: absent - output: - changed: true - mocks: - run_command: - - command: [/testbin/klist] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/kdestroy] - environ: *env-norc - rc: 0 - out: "" - err: "" -- id: test_kdestroy_principal - input: - state: absent - principal: admin@IPA.TEST - output: - changed: true - mocks: - run_command: - - command: [/testbin/klist, -l] - environ: *env-def - rc: 0 - out: "admin@IPA.TEST" - err: "" - - command: [/testbin/kdestroy, -p, admin@IPA.TEST] - environ: *env-norc - rc: 0 - out: "" - err: "" -- id: test_kdestroy_cache_name - input: - state: absent - cache_name: KEYRING:persistent:0:0 - output: - changed: true - mocks: - run_command: - - command: [/testbin/klist, -l] - environ: *env-def - rc: 0 - out: "KEYRING:persistent:0:0" - err: "" - - command: [/testbin/kdestroy, -c, KEYRING:persistent:0:0] - environ: *env-norc - rc: 0 - out: "" - err: "" -- id: test_kdestroy_all - input: - state: absent - kdestroy_all: true - output: - changed: true - mocks: - run_command: - - command: [/testbin/kdestroy, -A] - environ: *env-norc - rc: 0 - out: "" - err: "" + - id: test_kinit_default + input: + state: present + password: cool_password + output: + changed: true + mocks: + run_command: + - command: [/testbin/klist] + environ: *env-def + rc: 1 + out: '' + err: '' + - command: [/testbin/kinit] + environ: *env-data + rc: 0 + out: '' + err: '' + - id: test_kinit_principal + input: + state: present + password: cool_password + principal: admin@IPA.TEST + output: + changed: true + mocks: + run_command: + - command: [/testbin/klist, -l] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/kinit, admin@IPA.TEST] + environ: *env-data + rc: 0 + out: '' + err: '' + - id: test_kdestroy_default + input: + state: absent + output: + changed: true + mocks: + run_command: + - command: [/testbin/klist] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/kdestroy] + environ: *env-norc + rc: 0 + out: '' + err: '' + - id: test_kdestroy_principal + input: + state: absent + principal: admin@IPA.TEST + output: + changed: true + mocks: + run_command: + - command: [/testbin/klist, -l] + environ: *env-def + rc: 0 + out: admin@IPA.TEST + err: '' + - command: [/testbin/kdestroy, -p, admin@IPA.TEST] + environ: *env-norc + rc: 0 + out: '' + err: '' + - id: test_kdestroy_cache_name + input: + state: absent + cache_name: KEYRING:persistent:0:0 + output: + changed: true + mocks: + run_command: + - command: [/testbin/klist, -l] + environ: *env-def + rc: 0 + out: KEYRING:persistent:0:0 + err: '' + - command: [/testbin/kdestroy, -c, KEYRING:persistent:0:0] + environ: *env-norc + rc: 0 + out: '' + err: '' + - id: test_kdestroy_all + input: + state: absent + kdestroy_all: true + output: + changed: true + mocks: + run_command: + - command: [/testbin/kdestroy, -A] + environ: *env-norc + rc: 0 + out: '' + err: '' diff --git a/tests/unit/plugins/modules/test_nmcli.py b/tests/unit/plugins/modules/test_nmcli.py index 89e8de6d64..f7b8278b4f 100644 --- a/tests/unit/plugins/modules/test_nmcli.py +++ b/tests/unit/plugins/modules/test_nmcli.py @@ -4397,6 +4397,7 @@ def test_bond_connection_unchanged(mocked_generic_connection_diff_check, capfd): downdelay=dict(type='int'), updelay=dict(type='int'), xmit_hash_policy=dict(type='str'), + fail_over_mac=dict(type='str', choices=['none', 'active', 'follow']), arp_interval=dict(type='int'), arp_ip_target=dict(type='str'), primary=dict(type='str'), diff --git a/tests/unit/plugins/modules/test_opkg.yaml b/tests/unit/plugins/modules/test_opkg.yaml index e6f627457d..a437e54499 100644 --- a/tests/unit/plugins/modules/test_opkg.yaml +++ b/tests/unit/plugins/modules/test_opkg.yaml @@ -7,169 +7,169 @@ anchors: environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: false} test_cases: -- id: install_zlibdev - input: - name: zlib-dev - state: present - output: - msg: installed 1 package(s) - mocks: - run_command: - - command: [/testbin/opkg, --version] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/opkg, list-installed, zlib-dev] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/opkg, install, zlib-dev] - environ: *env-def - rc: 0 - out: | - Installing zlib-dev (1.2.11-6) to root... - Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib-dev_1.2.11-6_mips_24kc.ipk - Installing zlib (1.2.11-6) to root... - Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib_1.2.11-6_mips_24kc.ipk - Configuring zlib. - Configuring zlib-dev. - err: "" - - command: [/testbin/opkg, list-installed, zlib-dev] - environ: *env-def - rc: 0 - out: | - zlib-dev - 1.2.11-6 - err: "" -- id: install_zlibdev_present - input: - name: zlib-dev - state: present - output: - msg: package(s) already present - mocks: - run_command: - - command: [/testbin/opkg, --version] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/opkg, list-installed, zlib-dev] - environ: *env-def - rc: 0 - out: | - zlib-dev - 1.2.11-6 - err: "" -- id: install_zlibdev_force_reinstall - input: - name: zlib-dev - state: present - force: reinstall - output: - msg: installed 1 package(s) - mocks: - run_command: - - command: [/testbin/opkg, --version] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/opkg, list-installed, zlib-dev] - environ: *env-def - rc: 0 - out: | - zlib-dev - 1.2.11-6 - err: "" - - command: [/testbin/opkg, install, --force-reinstall, zlib-dev] - environ: *env-def - rc: 0 - out: | - Installing zlib-dev (1.2.11-6) to root... - Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib-dev_1.2.11-6_mips_24kc.ipk - Configuring zlib-dev. - err: "" - - command: [/testbin/opkg, list-installed, zlib-dev] - environ: *env-def - rc: 0 - out: | - zlib-dev - 1.2.11-6 - err: "" -- id: install_zlibdev_with_version - input: - name: zlib-dev=1.2.11-6 - state: present - output: - msg: installed 1 package(s) - mocks: - run_command: - - command: [/testbin/opkg, --version] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/opkg, list-installed, zlib-dev] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/opkg, install, zlib-dev=1.2.11-6] - environ: *env-def - rc: 0 - out: | - Installing zlib-dev (1.2.11-6) to root... - Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib-dev_1.2.11-6_mips_24kc.ipk - Installing zlib (1.2.11-6) to root... - Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib_1.2.11-6_mips_24kc.ipk - Configuring zlib. - Configuring zlib-dev. - err: "" - - command: [/testbin/opkg, list-installed, zlib-dev] - environ: *env-def - rc: 0 - out: "zlib-dev - 1.2.11-6 \n" # This output has the extra space at the end, to satisfy the behaviour of Yocto/OpenEmbedded's opkg - err: "" -- id: install_vim_updatecache - input: - name: vim-fuller - state: present - update_cache: true - output: - msg: installed 1 package(s) - mocks: - run_command: - - command: [/testbin/opkg, --version] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/opkg, update] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/opkg, list-installed, vim-fuller] - environ: *env-def - rc: 0 - out: "" - err: "" - - command: [/testbin/opkg, install, vim-fuller] - environ: *env-def - rc: 0 - out: | - Multiple packages (libgcc1 and libgcc1) providing same name marked HOLD or PREFER. Using latest. - Installing vim-fuller (9.0-1) to root... - Downloading https://downloads.openwrt.org/snapshots/packages/x86_64/packages/vim-fuller_9.0-1_x86_64.ipk - Installing terminfo (6.4-2) to root... - Downloading https://downloads.openwrt.org/snapshots/packages/x86_64/base/terminfo_6.4-2_x86_64.ipk - Installing libncurses6 (6.4-2) to root... - Downloading https://downloads.openwrt.org/snapshots/packages/x86_64/base/libncurses6_6.4-2_x86_64.ipk - Configuring terminfo. - Configuring libncurses6. - Configuring vim-fuller. - err: "" - - command: [/testbin/opkg, list-installed, vim-fuller] - environ: *env-def - rc: 0 - out: "vim-fuller - 9.0-1 \n" # This output has the extra space at the end, to satisfy the behaviour of Yocto/OpenEmbedded's opkg - err: "" + - id: install_zlibdev + input: + name: zlib-dev + state: present + output: + msg: installed 1 package(s) + mocks: + run_command: + - command: [/testbin/opkg, --version] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/opkg, list-installed, zlib-dev] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/opkg, install, zlib-dev] + environ: *env-def + rc: 0 + out: | + Installing zlib-dev (1.2.11-6) to root... + Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib-dev_1.2.11-6_mips_24kc.ipk + Installing zlib (1.2.11-6) to root... + Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib_1.2.11-6_mips_24kc.ipk + Configuring zlib. + Configuring zlib-dev. + err: '' + - command: [/testbin/opkg, list-installed, zlib-dev] + environ: *env-def + rc: 0 + out: | + zlib-dev - 1.2.11-6 + err: '' + - id: install_zlibdev_present + input: + name: zlib-dev + state: present + output: + msg: package(s) already present + mocks: + run_command: + - command: [/testbin/opkg, --version] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/opkg, list-installed, zlib-dev] + environ: *env-def + rc: 0 + out: | + zlib-dev - 1.2.11-6 + err: '' + - id: install_zlibdev_force_reinstall + input: + name: zlib-dev + state: present + force: reinstall + output: + msg: installed 1 package(s) + mocks: + run_command: + - command: [/testbin/opkg, --version] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/opkg, list-installed, zlib-dev] + environ: *env-def + rc: 0 + out: | + zlib-dev - 1.2.11-6 + err: '' + - command: [/testbin/opkg, install, --force-reinstall, zlib-dev] + environ: *env-def + rc: 0 + out: | + Installing zlib-dev (1.2.11-6) to root... + Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib-dev_1.2.11-6_mips_24kc.ipk + Configuring zlib-dev. + err: '' + - command: [/testbin/opkg, list-installed, zlib-dev] + environ: *env-def + rc: 0 + out: | + zlib-dev - 1.2.11-6 + err: '' + - id: install_zlibdev_with_version + input: + name: zlib-dev=1.2.11-6 + state: present + output: + msg: installed 1 package(s) + mocks: + run_command: + - command: [/testbin/opkg, --version] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/opkg, list-installed, zlib-dev] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/opkg, install, zlib-dev=1.2.11-6] + environ: *env-def + rc: 0 + out: | + Installing zlib-dev (1.2.11-6) to root... + Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib-dev_1.2.11-6_mips_24kc.ipk + Installing zlib (1.2.11-6) to root... + Downloading https://downloads.openwrt.org/releases/22.03.0/packages/mips_24kc/base/zlib_1.2.11-6_mips_24kc.ipk + Configuring zlib. + Configuring zlib-dev. + err: '' + - command: [/testbin/opkg, list-installed, zlib-dev] + environ: *env-def + rc: 0 + out: "zlib-dev - 1.2.11-6 \n" # This output has the extra space at the end, to satisfy the behaviour of Yocto/OpenEmbedded's opkg + err: '' + - id: install_vim_updatecache + input: + name: vim-fuller + state: present + update_cache: true + output: + msg: installed 1 package(s) + mocks: + run_command: + - command: [/testbin/opkg, --version] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/opkg, update] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/opkg, list-installed, vim-fuller] + environ: *env-def + rc: 0 + out: '' + err: '' + - command: [/testbin/opkg, install, vim-fuller] + environ: *env-def + rc: 0 + out: | + Multiple packages (libgcc1 and libgcc1) providing same name marked HOLD or PREFER. Using latest. + Installing vim-fuller (9.0-1) to root... + Downloading https://downloads.openwrt.org/snapshots/packages/x86_64/packages/vim-fuller_9.0-1_x86_64.ipk + Installing terminfo (6.4-2) to root... + Downloading https://downloads.openwrt.org/snapshots/packages/x86_64/base/terminfo_6.4-2_x86_64.ipk + Installing libncurses6 (6.4-2) to root... + Downloading https://downloads.openwrt.org/snapshots/packages/x86_64/base/libncurses6_6.4-2_x86_64.ipk + Configuring terminfo. + Configuring libncurses6. + Configuring vim-fuller. + err: '' + - command: [/testbin/opkg, list-installed, vim-fuller] + environ: *env-def + rc: 0 + out: "vim-fuller - 9.0-1 \n" # This output has the extra space at the end, to satisfy the behaviour of Yocto/OpenEmbedded's opkg + err: '' diff --git a/tests/unit/plugins/modules/test_proxmox_backup_info.py b/tests/unit/plugins/modules/test_proxmox_backup_info.py new file mode 100644 index 0000000000..73a15b8ab8 --- /dev/null +++ b/tests/unit/plugins/modules/test_proxmox_backup_info.py @@ -0,0 +1,275 @@ +# -*- coding: utf-8 -*- +# +# Copyright (c) 2024 Marzieh Raoufnezhad +# Copyright (c) 2024 Maryam Mayabi +# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt) +# SPDX-License-Identifier: GPL-3.0-or-later + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +import pytest + +proxmoxer = pytest.importorskip("proxmoxer") + +from ansible_collections.community.general.plugins.modules import proxmox_backup_info +from ansible_collections.community.general.tests.unit.compat.mock import patch +from ansible_collections.community.general.tests.unit.plugins.modules.utils import ( + AnsibleExitJson, + AnsibleFailJson, + ModuleTestCase, + set_module_args, +) +import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils + +RESOURCE_LIST = [ + { + "uptime": 0, + "diskwrite": 0, + "name": "test01", + "maxcpu": 0, + "node": "NODE1", + "mem": 0, + "netout": 0, + "netin": 0, + "maxmem": 0, + "diskread": 0, + "disk": 0, + "maxdisk": 0, + "status": "running", + "cpu": 0, + "id": "qemu/100", + "template": 0, + "vmid": 100, + "type": "qemu" + }, + { + "uptime": 0, + "diskwrite": 0, + "name": "test02", + "maxcpu": 0, + "node": "NODE1", + "mem": 0, + "netout": 0, + "netin": 0, + "maxmem": 0, + "diskread": 0, + "disk": 0, + "maxdisk": 0, + "status": "running", + "cpu": 0, + "id": "qemu/101", + "template": 0, + "vmid": 101, + "type": "qemu" + }, + { + "uptime": 0, + "diskwrite": 0, + "name": "test03", + "maxcpu": 0, + "node": "NODE2", + "mem": 0, + "netout": 0, + "netin": 0, + "maxmem": 0, + "diskread": 0, + "disk": 0, + "maxdisk": 0, + "status": "running", + "cpu": 0, + "id": "qemu/102", + "template": 0, + "vmid": 102, + "type": "qemu" + } +] +BACKUP_JOBS = [ + { + "type": "vzdump", + "id": "backup-83831498-c631", + "storage": "local", + "vmid": "100", + "enabled": 1, + "next-run": 1735138800, + "mailnotification": "always", + "schedule": "06,18:30", + "mode": "snapshot", + "notes-template": "guestname" + }, + { + "schedule": "sat 15:00", + "notes-template": "guestname", + "mode": "snapshot", + "mailnotification": "always", + "next-run": 1735385400, + "type": "vzdump", + "enabled": 1, + "vmid": "100,101,102", + "storage": "local", + "id": "backup-70025700-2302", + } +] + +EXPECTED_BACKUP_OUTPUT = [ + { + "bktype": "vzdump", + "enabled": 1, + "id": "backup-83831498-c631", + "mode": "snapshot", + "next-run": "2024-12-25 15:00:00", + "schedule": "06,18:30", + "storage": "local", + "vm_name": "test01", + "vmid": "100" + }, + { + "bktype": "vzdump", + "enabled": 1, + "id": "backup-70025700-2302", + "mode": "snapshot", + "next-run": "2024-12-28 11:30:00", + "schedule": "sat 15:00", + "storage": "local", + "vm_name": "test01", + "vmid": "100" + }, + { + "bktype": "vzdump", + "enabled": 1, + "id": "backup-70025700-2302", + "mode": "snapshot", + "next-run": "2024-12-28 11:30:00", + "schedule": "sat 15:00", + "storage": "local", + "vm_name": "test02", + "vmid": "101" + }, + { + "bktype": "vzdump", + "enabled": 1, + "id": "backup-70025700-2302", + "mode": "snapshot", + "next-run": "2024-12-28 11:30:00", + "schedule": "sat 15:00", + "storage": "local", + "vm_name": "test03", + "vmid": "102" + } +] +EXPECTED_BACKUP_JOBS_OUTPUT = [ + { + "enabled": 1, + "id": "backup-83831498-c631", + "mailnotification": "always", + "mode": "snapshot", + "next-run": 1735138800, + "notes-template": "guestname", + "schedule": "06,18:30", + "storage": "local", + "type": "vzdump", + "vmid": "100" + }, + { + "enabled": 1, + "id": "backup-70025700-2302", + "mailnotification": "always", + "mode": "snapshot", + "next-run": 1735385400, + "notes-template": "guestname", + "schedule": "sat 15:00", + "storage": "local", + "type": "vzdump", + "vmid": "100,101,102" + } +] + + +class TestProxmoxBackupInfoModule(ModuleTestCase): + def setUp(self): + super(TestProxmoxBackupInfoModule, self).setUp() + proxmox_utils.HAS_PROXMOXER = True + self.module = proxmox_backup_info + self.connect_mock = patch( + "ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect", + ).start() + self.connect_mock.return_value.cluster.resources.get.return_value = ( + RESOURCE_LIST + ) + self.connect_mock.return_value.cluster.backup.get.return_value = ( + BACKUP_JOBS + ) + + def tearDown(self): + self.connect_mock.stop() + super(TestProxmoxBackupInfoModule, self).tearDown() + + def test_module_fail_when_required_args_missing(self): + with pytest.raises(AnsibleFailJson) as exc_info: + set_module_args({}) + self.module.main() + + result = exc_info.value.args[0] + assert result["msg"] == "missing required arguments: api_host, api_user" + + def test_get_all_backups_information(self): + with pytest.raises(AnsibleExitJson) as exc_info: + set_module_args({ + 'api_host': 'proxmoxhost', + 'api_user': 'root@pam', + 'api_password': 'supersecret' + }) + self.module.main() + + result = exc_info.value.args[0] + assert result["backup_info"] == EXPECTED_BACKUP_OUTPUT + + def test_get_specific_backup_information_by_vmname(self): + with pytest.raises(AnsibleExitJson) as exc_info: + vmname = 'test01' + expected_output = [ + backup for backup in EXPECTED_BACKUP_OUTPUT if backup["vm_name"] == vmname + ] + set_module_args({ + 'api_host': 'proxmoxhost', + 'api_user': 'root@pam', + 'api_password': 'supersecret', + 'vm_name': vmname + }) + self.module.main() + + result = exc_info.value.args[0] + assert result["backup_info"] == expected_output + assert len(result["backup_info"]) == 2 + + def test_get_specific_backup_information_by_vmid(self): + with pytest.raises(AnsibleExitJson) as exc_info: + vmid = "101" + expected_output = [ + backup for backup in EXPECTED_BACKUP_OUTPUT if backup["vmid"] == vmid + ] + set_module_args({ + 'api_host': 'proxmoxhost', + 'api_user': 'root@pam', + 'api_password': 'supersecret', + 'vm_id': vmid + }) + self.module.main() + result = exc_info.value.args[0] + assert result["backup_info"] == expected_output + assert len(result["backup_info"]) == 1 + + def test_get_specific_backup_information_by_backupjobs(self): + with pytest.raises(AnsibleExitJson) as exc_info: + backupjobs = True + set_module_args({ + 'api_host': 'proxmoxhost', + 'api_user': 'root@pam', + 'api_password': 'supersecret', + 'backup_jobs': backupjobs + }) + self.module.main() + + result = exc_info.value.args[0] + assert result["backup_info"] == EXPECTED_BACKUP_JOBS_OUTPUT diff --git a/tests/unit/plugins/modules/test_puppet.yaml b/tests/unit/plugins/modules/test_puppet.yaml index 44cfb98d6f..df813c6231 100644 --- a/tests/unit/plugins/modules/test_puppet.yaml +++ b/tests/unit/plugins/modules/test_puppet.yaml @@ -7,228 +7,228 @@ anchors: environ: &env-def {environ_update: {LANGUAGE: C, LC_ALL: C}, check_rc: false} test_cases: -- id: puppet_agent_plain - input: {} - output: - changed: false - mocks: - run_command: - - command: [/testbin/puppet, config, print, agent_disabled_lockfile] - environ: *env-def - rc: 0 - out: "blah, anything" - err: "" - - command: - - /testbin/timeout - - -s - - "9" - - 30m - - /testbin/puppet - - agent - - --onetime - - --no-daemonize - - --no-usecacheonfailure - - --no-splay - - --detailed-exitcodes - - --verbose - - --color - - "0" - environ: *env-def - rc: 0 - out: "" - err: "" -- id: puppet_agent_certname - input: - certname: potatobox - output: - changed: false - mocks: - run_command: - - command: [/testbin/puppet, config, print, agent_disabled_lockfile] - environ: *env-def - rc: 0 - out: "blah, anything" - err: "" - - command: - - /testbin/timeout - - -s - - "9" - - 30m - - /testbin/puppet - - agent - - --onetime - - --no-daemonize - - --no-usecacheonfailure - - --no-splay - - --detailed-exitcodes - - --verbose - - --color - - "0" - - --certname=potatobox - environ: *env-def - rc: 0 - out: "" - err: "" -- id: puppet_agent_tags_abc - input: - tags: [a, b, c] - output: - changed: false - mocks: - run_command: - - command: [/testbin/puppet, config, print, agent_disabled_lockfile] - environ: *env-def - rc: 0 - out: "blah, anything" - err: "" - - command: - - /testbin/timeout - - -s - - "9" - - 30m - - /testbin/puppet - - agent - - --onetime - - --no-daemonize - - --no-usecacheonfailure - - --no-splay - - --detailed-exitcodes - - --verbose - - --color - - "0" - - --tags - - a,b,c - environ: *env-def - rc: 0 - out: "" - err: "" -- id: puppet_agent_skip_tags_def - input: - skip_tags: [d, e, f] - output: - changed: false - mocks: - run_command: - - command: [/testbin/puppet, config, print, agent_disabled_lockfile] - environ: *env-def - rc: 0 - out: "blah, anything" - err: "" - - command: - - /testbin/timeout - - -s - - "9" - - 30m - - /testbin/puppet - - agent - - --onetime - - --no-daemonize - - --no-usecacheonfailure - - --no-splay - - --detailed-exitcodes - - --verbose - - --color - - "0" - - --skip_tags - - d,e,f - environ: *env-def - rc: 0 - out: "" - err: "" -- id: puppet_agent_noop_false - input: - noop: false - output: - changed: false - mocks: - run_command: - - command: [/testbin/puppet, config, print, agent_disabled_lockfile] - environ: *env-def - rc: 0 - out: "blah, anything" - err: "" - - command: - - /testbin/timeout - - -s - - "9" - - 30m - - /testbin/puppet - - agent - - --onetime - - --no-daemonize - - --no-usecacheonfailure - - --no-splay - - --detailed-exitcodes - - --verbose - - --color - - "0" - - --no-noop - environ: *env-def - rc: 0 - out: "" - err: "" -- id: puppet_agent_noop_true - input: - noop: true - output: - changed: false - mocks: - run_command: - - command: [/testbin/puppet, config, print, agent_disabled_lockfile] - environ: *env-def - rc: 0 - out: "blah, anything" - err: "" - - command: - - /testbin/timeout - - -s - - "9" - - 30m - - /testbin/puppet - - agent - - --onetime - - --no-daemonize - - --no-usecacheonfailure - - --no-splay - - --detailed-exitcodes - - --verbose - - --color - - "0" - - --noop - environ: *env-def - rc: 0 - out: "" - err: "" -- id: puppet_agent_waitforlock - input: - waitforlock: 30 - output: - changed: false - mocks: - run_command: - - command: [/testbin/puppet, config, print, agent_disabled_lockfile] - environ: *env-def - rc: 0 - out: "blah, anything" - err: "" - - command: - - /testbin/timeout - - -s - - "9" - - 30m - - /testbin/puppet - - agent - - --onetime - - --no-daemonize - - --no-usecacheonfailure - - --no-splay - - --detailed-exitcodes - - --verbose - - --color - - "0" - - --waitforlock - - "30" - environ: *env-def - rc: 0 - out: "" - err: "" + - id: puppet_agent_plain + input: {} + output: + changed: false + mocks: + run_command: + - command: [/testbin/puppet, config, print, agent_disabled_lockfile] + environ: *env-def + rc: 0 + out: blah, anything + err: '' + - command: + - /testbin/timeout + - -s + - '9' + - 30m + - /testbin/puppet + - agent + - --onetime + - --no-daemonize + - --no-usecacheonfailure + - --no-splay + - --detailed-exitcodes + - --verbose + - --color + - '0' + environ: *env-def + rc: 0 + out: '' + err: '' + - id: puppet_agent_certname + input: + certname: potatobox + output: + changed: false + mocks: + run_command: + - command: [/testbin/puppet, config, print, agent_disabled_lockfile] + environ: *env-def + rc: 0 + out: blah, anything + err: '' + - command: + - /testbin/timeout + - -s + - '9' + - 30m + - /testbin/puppet + - agent + - --onetime + - --no-daemonize + - --no-usecacheonfailure + - --no-splay + - --detailed-exitcodes + - --verbose + - --color + - '0' + - --certname=potatobox + environ: *env-def + rc: 0 + out: '' + err: '' + - id: puppet_agent_tags_abc + input: + tags: [a, b, c] + output: + changed: false + mocks: + run_command: + - command: [/testbin/puppet, config, print, agent_disabled_lockfile] + environ: *env-def + rc: 0 + out: blah, anything + err: '' + - command: + - /testbin/timeout + - -s + - '9' + - 30m + - /testbin/puppet + - agent + - --onetime + - --no-daemonize + - --no-usecacheonfailure + - --no-splay + - --detailed-exitcodes + - --verbose + - --color + - '0' + - --tags + - a,b,c + environ: *env-def + rc: 0 + out: '' + err: '' + - id: puppet_agent_skip_tags_def + input: + skip_tags: [d, e, f] + output: + changed: false + mocks: + run_command: + - command: [/testbin/puppet, config, print, agent_disabled_lockfile] + environ: *env-def + rc: 0 + out: blah, anything + err: '' + - command: + - /testbin/timeout + - -s + - '9' + - 30m + - /testbin/puppet + - agent + - --onetime + - --no-daemonize + - --no-usecacheonfailure + - --no-splay + - --detailed-exitcodes + - --verbose + - --color + - '0' + - --skip_tags + - d,e,f + environ: *env-def + rc: 0 + out: '' + err: '' + - id: puppet_agent_noop_false + input: + noop: false + output: + changed: false + mocks: + run_command: + - command: [/testbin/puppet, config, print, agent_disabled_lockfile] + environ: *env-def + rc: 0 + out: blah, anything + err: '' + - command: + - /testbin/timeout + - -s + - '9' + - 30m + - /testbin/puppet + - agent + - --onetime + - --no-daemonize + - --no-usecacheonfailure + - --no-splay + - --detailed-exitcodes + - --verbose + - --color + - '0' + - --no-noop + environ: *env-def + rc: 0 + out: '' + err: '' + - id: puppet_agent_noop_true + input: + noop: true + output: + changed: false + mocks: + run_command: + - command: [/testbin/puppet, config, print, agent_disabled_lockfile] + environ: *env-def + rc: 0 + out: blah, anything + err: '' + - command: + - /testbin/timeout + - -s + - '9' + - 30m + - /testbin/puppet + - agent + - --onetime + - --no-daemonize + - --no-usecacheonfailure + - --no-splay + - --detailed-exitcodes + - --verbose + - --color + - '0' + - --noop + environ: *env-def + rc: 0 + out: '' + err: '' + - id: puppet_agent_waitforlock + input: + waitforlock: 30 + output: + changed: false + mocks: + run_command: + - command: [/testbin/puppet, config, print, agent_disabled_lockfile] + environ: *env-def + rc: 0 + out: blah, anything + err: '' + - command: + - /testbin/timeout + - -s + - '9' + - 30m + - /testbin/puppet + - agent + - --onetime + - --no-daemonize + - --no-usecacheonfailure + - --no-splay + - --detailed-exitcodes + - --verbose + - --color + - '0' + - --waitforlock + - '30' + environ: *env-def + rc: 0 + out: '' + err: '' diff --git a/tests/unit/plugins/modules/test_redhat_subscription.py b/tests/unit/plugins/modules/test_redhat_subscription.py index 7be3740d26..bbdbbdab7d 100644 --- a/tests/unit/plugins/modules/test_redhat_subscription.py +++ b/tests/unit/plugins/modules/test_redhat_subscription.py @@ -199,11 +199,6 @@ TEST_CASES = [ {'check_rc': False}, (0, 'system identity: b26df632-25ed-4452-8f89-0308bfd167cb', '') ), - ( - ['/testbin/subscription-manager', 'remove', '--all'], - {'check_rc': True}, - (0, '', '') - ), ( ['/testbin/subscription-manager', 'unregister'], {'check_rc': True}, diff --git a/tests/unit/plugins/modules/test_snap.py b/tests/unit/plugins/modules/test_snap.py index 5ed366d724..97e3e8700e 100644 --- a/tests/unit/plugins/modules/test_snap.py +++ b/tests/unit/plugins/modules/test_snap.py @@ -376,6 +376,16 @@ issue_6803_kubectl_out = ( "\r\u001b[0m\u001b[?25h\u001b[Kkubectl (1.27/stable) v1.27.2 from Canonical** installed\n" ) + +default_env = {'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False} +default_version_out = """\ +snap 2.66.1+24.04 +snapd 2.66.1+24.04 +series 16 +ubuntu 24.04 +kernel 6.8.0-49-generic +""" + TEST_CASES = [ dict( id="simple case", @@ -384,30 +394,37 @@ TEST_CASES = [ flags={}, mocks=dict( run_command=[ + dict( + command=['/testbin/snap', 'version'], + environ=default_env, + rc=0, + out=default_version_out, + err="", + ), dict( command=['/testbin/snap', 'info', 'hello-world'], - environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False}, + environ=default_env, rc=0, out='name: hello-world\n', err="", ), dict( command=['/testbin/snap', 'list'], - environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False}, + environ=default_env, rc=0, out="", err="", ), dict( command=['/testbin/snap', 'install', 'hello-world'], - environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False}, + environ=default_env, rc=0, out="hello-world (12345/stable) v12345 from Canonical** installed\n", err="", ), dict( command=['/testbin/snap', 'list'], - environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False}, + environ=default_env, rc=0, out=( "Name Version Rev Tracking Publisher Notes" @@ -428,37 +445,44 @@ TEST_CASES = [ flags={}, mocks=dict( run_command=[ + dict( + command=['/testbin/snap', 'version'], + environ=default_env, + rc=0, + out=default_version_out, + err="", + ), dict( command=['/testbin/snap', 'info', 'microk8s', 'kubectl'], - environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False}, + environ=default_env, rc=0, out='name: microk8s\n---\nname: kubectl\n', err="", ), dict( command=['/testbin/snap', 'list'], - environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False}, + environ=default_env, rc=0, out=issue_6803_status_out, err="", ), dict( command=['/testbin/snap', 'install', '--classic', 'microk8s'], - environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False}, + environ=default_env, rc=0, out=issue_6803_microk8s_out, err="", ), dict( command=['/testbin/snap', 'install', '--classic', 'kubectl'], - environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False}, + environ=default_env, rc=0, out=issue_6803_kubectl_out, err="", ), dict( command=['/testbin/snap', 'list'], - environ={'environ_update': {'LANGUAGE': 'C', 'LC_ALL': 'C'}, 'check_rc': False}, + environ=default_env, rc=0, out=( "Name Version Rev Tracking Publisher Notes" diff --git a/tests/unit/plugins/modules/test_wdc_redfish_command.py b/tests/unit/plugins/modules/test_wdc_redfish_command.py index 0775ac73dd..9f0104042c 100644 --- a/tests/unit/plugins/modules/test_wdc_redfish_command.py +++ b/tests/unit/plugins/modules/test_wdc_redfish_command.py @@ -896,15 +896,14 @@ class TestWdcRedfishCommand(unittest.TestCase): bundle_tarfile = tarfile.open(os.path.join(self.tempdir, tar_name), "w") package_filename = "oobm-{0}.pkg".format(mock_firmware_version) package_filename_path = os.path.join(self.tempdir, package_filename) - package_file = open(package_filename_path, "w") - package_file.close() + with open(package_filename_path, "w"): + pass bundle_tarfile.add(os.path.join(self.tempdir, package_filename), arcname=package_filename) bin_filename = "firmware.bin" bin_filename_path = os.path.join(self.tempdir, bin_filename) - bin_file = open(bin_filename_path, "wb") - byte_to_write = b'\x80' if is_multi_tenant else b'\xFF' - bin_file.write(byte_to_write * 12) - bin_file.close() + with open(bin_filename_path, "wb") as bin_file: + byte_to_write = b'\x80' if is_multi_tenant else b'\xFF' + bin_file.write(byte_to_write * 12) for filename in [package_filename, bin_filename]: bundle_tarfile.add(os.path.join(self.tempdir, filename), arcname=filename) bundle_tarfile.close() diff --git a/tests/unit/plugins/modules/test_xfconf.yaml b/tests/unit/plugins/modules/test_xfconf.yaml index f306bfdfa0..2ba274fdfb 100644 --- a/tests/unit/plugins/modules/test_xfconf.yaml +++ b/tests/unit/plugins/modules/test_xfconf.yaml @@ -14,224 +14,224 @@ anchors: Please report bugs to . test_cases: -- id: test_missing_input - input: {} - output: - failed: true - msg: "missing required arguments: channel, property" -- id: test_property_set_property - input: - channel: xfwm4 - property: /general/inactive_opacity - state: present - value_type: int - value: 90 - output: - changed: true - previous_value: '100' - type: int - value: '90' - version: "4.18.1" - mocks: - run_command: - - command: [/testbin/xfconf-query, --version] - environ: *env-def - rc: 0 - out: *version-output - err: "" - - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity] - environ: *env-def - rc: 0 - out: "100\n" - err: "" - - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity, --create, --type, int, --set, '90'] - environ: *env-def - rc: 0 - out: "" - err: "" -- id: test_property_set_property_same_value - input: - channel: xfwm4 - property: /general/inactive_opacity - state: present - value_type: int - value: 90 - output: - changed: false - previous_value: '90' - type: int - value: '90' - version: "4.18.1" - mocks: - run_command: - - command: [/testbin/xfconf-query, --version] - environ: *env-def - rc: 0 - out: *version-output - err: "" - - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity] - environ: *env-def - rc: 0 - out: "90\n" - err: "" - - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity, --create, --type, int, --set, '90'] - environ: *env-def - rc: 0 - out: "" - err: "" -- id: test_property_set_property_bool_false - input: - channel: xfce4-session - property: /general/SaveOnExit - state: present - value_type: bool - value: false - output: - changed: true - previous_value: 'true' - type: bool - value: 'False' - version: "4.18.1" - mocks: - run_command: - - command: [/testbin/xfconf-query, --version] - environ: *env-def - rc: 0 - out: *version-output - err: "" - - command: [/testbin/xfconf-query, --channel, xfce4-session, --property, /general/SaveOnExit] - environ: *env-def - rc: 0 - out: "true\n" - err: "" - - command: [/testbin/xfconf-query, --channel, xfce4-session, --property, /general/SaveOnExit, --create, --type, bool, --set, 'false'] - environ: *env-def - rc: 0 - out: "false\n" - err: "" -- id: test_property_set_array - input: - channel: xfwm4 - property: /general/workspace_names - state: present - value_type: string - value: [A, B, C] - output: - changed: true - previous_value: [Main, Work, Tmp] - type: [string, string, string] - value: [A, B, C] - version: "4.18.1" - mocks: - run_command: - - command: [/testbin/xfconf-query, --version] - environ: *env-def - rc: 0 - out: *version-output - err: "" - - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names] - environ: *env-def - rc: 0 - out: "Value is an array with 3 items:\n\nMain\nWork\nTmp\n" - err: "" - - command: - - /testbin/xfconf-query - - --channel - - xfwm4 - - --property - - /general/workspace_names - - --create - - --force-array - - --type - - string - - --set - - A - - --type - - string - - --set - - B - - --type - - string - - --set - - C - environ: *env-def - rc: 0 - out: "" - err: "" -- id: test_property_set_array_to_same_value - input: - channel: xfwm4 - property: /general/workspace_names - state: present - value_type: string - value: [A, B, C] - output: - changed: false - previous_value: [A, B, C] - type: [string, string, string] - value: [A, B, C] - version: "4.18.1" - mocks: - run_command: - - command: [/testbin/xfconf-query, --version] - environ: *env-def - rc: 0 - out: *version-output - err: "" - - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names] - environ: *env-def - rc: 0 - out: "Value is an array with 3 items:\n\nA\nB\nC\n" - err: "" - - command: - - /testbin/xfconf-query - - --channel - - xfwm4 - - --property - - /general/workspace_names - - --create - - --force-array - - --type - - string - - --set - - A - - --type - - string - - --set - - B - - --type - - string - - --set - - C - environ: *env-def - rc: 0 - out: "" - err: "" -- id: test_property_reset_value - input: - channel: xfwm4 - property: /general/workspace_names - state: absent - output: - changed: true - previous_value: [A, B, C] - type: - value: - version: "4.18.1" - mocks: - run_command: - - command: [/testbin/xfconf-query, --version] - environ: *env-def - rc: 0 - out: *version-output - err: "" - - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names] - environ: *env-def - rc: 0 - out: "Value is an array with 3 items:\n\nA\nB\nC\n" - err: "" - - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names, --reset] - environ: *env-def - rc: 0 - out: "" - err: "" + - id: test_missing_input + input: {} + output: + failed: true + msg: 'missing required arguments: channel, property' + - id: test_property_set_property + input: + channel: xfwm4 + property: /general/inactive_opacity + state: present + value_type: int + value: 90 + output: + changed: true + previous_value: '100' + type: int + value: '90' + version: 4.18.1 + mocks: + run_command: + - command: [/testbin/xfconf-query, --version] + environ: *env-def + rc: 0 + out: *version-output + err: '' + - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity] + environ: *env-def + rc: 0 + out: "100\n" + err: '' + - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity, --create, --type, int, --set, '90'] + environ: *env-def + rc: 0 + out: '' + err: '' + - id: test_property_set_property_same_value + input: + channel: xfwm4 + property: /general/inactive_opacity + state: present + value_type: int + value: 90 + output: + changed: false + previous_value: '90' + type: int + value: '90' + version: 4.18.1 + mocks: + run_command: + - command: [/testbin/xfconf-query, --version] + environ: *env-def + rc: 0 + out: *version-output + err: '' + - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity] + environ: *env-def + rc: 0 + out: "90\n" + err: '' + - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity, --create, --type, int, --set, '90'] + environ: *env-def + rc: 0 + out: '' + err: '' + - id: test_property_set_property_bool_false + input: + channel: xfce4-session + property: /general/SaveOnExit + state: present + value_type: bool + value: false + output: + changed: true + previous_value: 'true' + type: bool + value: 'False' + version: 4.18.1 + mocks: + run_command: + - command: [/testbin/xfconf-query, --version] + environ: *env-def + rc: 0 + out: *version-output + err: '' + - command: [/testbin/xfconf-query, --channel, xfce4-session, --property, /general/SaveOnExit] + environ: *env-def + rc: 0 + out: "true\n" + err: '' + - command: [/testbin/xfconf-query, --channel, xfce4-session, --property, /general/SaveOnExit, --create, --type, bool, --set, 'false'] + environ: *env-def + rc: 0 + out: "false\n" + err: '' + - id: test_property_set_array + input: + channel: xfwm4 + property: /general/workspace_names + state: present + value_type: string + value: [A, B, C] + output: + changed: true + previous_value: [Main, Work, Tmp] + type: [string, string, string] + value: [A, B, C] + version: 4.18.1 + mocks: + run_command: + - command: [/testbin/xfconf-query, --version] + environ: *env-def + rc: 0 + out: *version-output + err: '' + - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names] + environ: *env-def + rc: 0 + out: "Value is an array with 3 items:\n\nMain\nWork\nTmp\n" + err: '' + - command: + - /testbin/xfconf-query + - --channel + - xfwm4 + - --property + - /general/workspace_names + - --create + - --force-array + - --type + - string + - --set + - A + - --type + - string + - --set + - B + - --type + - string + - --set + - C + environ: *env-def + rc: 0 + out: '' + err: '' + - id: test_property_set_array_to_same_value + input: + channel: xfwm4 + property: /general/workspace_names + state: present + value_type: string + value: [A, B, C] + output: + changed: false + previous_value: [A, B, C] + type: [string, string, string] + value: [A, B, C] + version: 4.18.1 + mocks: + run_command: + - command: [/testbin/xfconf-query, --version] + environ: *env-def + rc: 0 + out: *version-output + err: '' + - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names] + environ: *env-def + rc: 0 + out: "Value is an array with 3 items:\n\nA\nB\nC\n" + err: '' + - command: + - /testbin/xfconf-query + - --channel + - xfwm4 + - --property + - /general/workspace_names + - --create + - --force-array + - --type + - string + - --set + - A + - --type + - string + - --set + - B + - --type + - string + - --set + - C + environ: *env-def + rc: 0 + out: '' + err: '' + - id: test_property_reset_value + input: + channel: xfwm4 + property: /general/workspace_names + state: absent + output: + changed: true + previous_value: [A, B, C] + type: + value: + version: 4.18.1 + mocks: + run_command: + - command: [/testbin/xfconf-query, --version] + environ: *env-def + rc: 0 + out: *version-output + err: '' + - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names] + environ: *env-def + rc: 0 + out: "Value is an array with 3 items:\n\nA\nB\nC\n" + err: '' + - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names, --reset] + environ: *env-def + rc: 0 + out: '' + err: '' diff --git a/tests/unit/plugins/modules/test_xfconf_info.yaml b/tests/unit/plugins/modules/test_xfconf_info.yaml index 8e7ae667c4..d4d0deb39f 100644 --- a/tests/unit/plugins/modules/test_xfconf_info.yaml +++ b/tests/unit/plugins/modules/test_xfconf_info.yaml @@ -14,114 +14,114 @@ anchors: Please report bugs to . test_cases: -- id: test_simple_property_get - input: - channel: xfwm4 - property: /general/inactive_opacity - output: - value: '100' - is_array: false - version: "4.18.1" - mocks: - run_command: - - command: [/testbin/xfconf-query, --version] - environ: *env-def - rc: 0 - out: *version-output - err: "" - - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity] - environ: *env-def - rc: 0 - out: "100\n" - err: "" -- id: test_simple_property_get_nonexistent - input: - channel: xfwm4 - property: /general/i_dont_exist - output: - version: "4.18.1" - mocks: - run_command: - - command: [/testbin/xfconf-query, --version] - environ: *env-def - rc: 0 - out: *version-output - err: "" - - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/i_dont_exist] - environ: *env-def - rc: 1 - out: "" - err: 'Property "/general/i_dont_exist" does not exist on channel "xfwm4".\n' -- id: test_property_no_channel - input: - property: /general/i_dont_exist - output: - failed: true - msg: "missing parameter(s) required by 'property': channel" -- id: test_property_get_array - input: - channel: xfwm4 - property: /general/workspace_names - output: - is_array: true - value_array: [Main, Work, Tmp] - version: "4.18.1" - mocks: - run_command: - - command: [/testbin/xfconf-query, --version] - environ: *env-def - rc: 0 - out: *version-output - err: "" - - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names] - environ: *env-def - rc: 0 - out: "Value is an array with 3 items:\n\nMain\nWork\nTmp\n" - err: "" -- id: get_channels - input: {} - output: - channels: [a, b, c] - version: "4.18.1" - mocks: - run_command: - - command: [/testbin/xfconf-query, --version] - environ: *env-def - rc: 0 - out: *version-output - err: "" - - command: [/testbin/xfconf-query, --list] - environ: *env-def - rc: 0 - out: "Channels:\n a\n b\n c\n" - err: "" -- id: get_properties - input: - channel: xfwm4 - output: - properties: - - /general/wrap_cycle - - /general/wrap_layout - - /general/wrap_resistance - - /general/wrap_windows - - /general/wrap_workspaces - - /general/zoom_desktop - version: "4.18.1" - mocks: - run_command: - - command: [/testbin/xfconf-query, --version] - environ: *env-def - rc: 0 - out: *version-output - err: "" - - command: [/testbin/xfconf-query, --list, --channel, xfwm4] - environ: *env-def - rc: 0 - out: | - /general/wrap_cycle - /general/wrap_layout - /general/wrap_resistance - /general/wrap_windows - /general/wrap_workspaces - /general/zoom_desktop - err: "" + - id: test_simple_property_get + input: + channel: xfwm4 + property: /general/inactive_opacity + output: + value: '100' + is_array: false + version: 4.18.1 + mocks: + run_command: + - command: [/testbin/xfconf-query, --version] + environ: *env-def + rc: 0 + out: *version-output + err: '' + - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/inactive_opacity] + environ: *env-def + rc: 0 + out: "100\n" + err: '' + - id: test_simple_property_get_nonexistent + input: + channel: xfwm4 + property: /general/i_dont_exist + output: + version: 4.18.1 + mocks: + run_command: + - command: [/testbin/xfconf-query, --version] + environ: *env-def + rc: 0 + out: *version-output + err: '' + - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/i_dont_exist] + environ: *env-def + rc: 1 + out: '' + err: Property "/general/i_dont_exist" does not exist on channel "xfwm4".\n + - id: test_property_no_channel + input: + property: /general/i_dont_exist + output: + failed: true + msg: "missing parameter(s) required by 'property': channel" + - id: test_property_get_array + input: + channel: xfwm4 + property: /general/workspace_names + output: + is_array: true + value_array: [Main, Work, Tmp] + version: 4.18.1 + mocks: + run_command: + - command: [/testbin/xfconf-query, --version] + environ: *env-def + rc: 0 + out: *version-output + err: '' + - command: [/testbin/xfconf-query, --channel, xfwm4, --property, /general/workspace_names] + environ: *env-def + rc: 0 + out: "Value is an array with 3 items:\n\nMain\nWork\nTmp\n" + err: '' + - id: get_channels + input: {} + output: + channels: [a, b, c] + version: 4.18.1 + mocks: + run_command: + - command: [/testbin/xfconf-query, --version] + environ: *env-def + rc: 0 + out: *version-output + err: '' + - command: [/testbin/xfconf-query, --list] + environ: *env-def + rc: 0 + out: "Channels:\n a\n b\n c\n" + err: '' + - id: get_properties + input: + channel: xfwm4 + output: + properties: + - /general/wrap_cycle + - /general/wrap_layout + - /general/wrap_resistance + - /general/wrap_windows + - /general/wrap_workspaces + - /general/zoom_desktop + version: 4.18.1 + mocks: + run_command: + - command: [/testbin/xfconf-query, --version] + environ: *env-def + rc: 0 + out: *version-output + err: '' + - command: [/testbin/xfconf-query, --list, --channel, xfwm4] + environ: *env-def + rc: 0 + out: | + /general/wrap_cycle + /general/wrap_layout + /general/wrap_resistance + /general/wrap_windows + /general/wrap_workspaces + /general/zoom_desktop + err: '' diff --git a/tests/unit/requirements.txt b/tests/unit/requirements.txt index cfc8493912..fb24975d7b 100644 --- a/tests/unit/requirements.txt +++ b/tests/unit/requirements.txt @@ -51,9 +51,15 @@ passlib[argon2] proxmoxer < 2.0.0 ; python_version >= '2.7' and python_version <= '3.6' proxmoxer ; python_version > '3.6' +# requirements for the proxmox_pct_remote connection plugin +paramiko >= 3.0.0 ; python_version >= '3.6' + #requirements for nomad_token modules python-nomad < 2.0.0 ; python_version <= '3.6' python-nomad >= 2.0.0 ; python_version >= '3.7' # requirement for jenkins_build, jenkins_node, jenkins_plugin modules -python-jenkins >= 0.4.12 \ No newline at end of file +python-jenkins >= 0.4.12 + +# requirement for json_patch, json_patch_recipe and json_patch plugins +jsonpatch \ No newline at end of file