mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-06-27 10:40:22 -07:00
Remove proxmox content (#10110)
Some checks failed
EOL CI / EOL Sanity (Ⓐ2.16) (push) Has been cancelled
EOL CI / EOL Units (Ⓐ2.16+py2.7) (push) Has been cancelled
EOL CI / EOL Units (Ⓐ2.16+py3.11) (push) Has been cancelled
EOL CI / EOL Units (Ⓐ2.16+py3.6) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+alpine3+py:azp/posix/1/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+alpine3+py:azp/posix/2/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+alpine3+py:azp/posix/3/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+fedora38+py:azp/posix/1/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+fedora38+py:azp/posix/2/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+fedora38+py:azp/posix/3/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+opensuse15+py:azp/posix/1/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+opensuse15+py:azp/posix/2/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+opensuse15+py:azp/posix/3/) (push) Has been cancelled
nox / Run extra sanity tests (push) Has been cancelled
Some checks failed
EOL CI / EOL Sanity (Ⓐ2.16) (push) Has been cancelled
EOL CI / EOL Units (Ⓐ2.16+py2.7) (push) Has been cancelled
EOL CI / EOL Units (Ⓐ2.16+py3.11) (push) Has been cancelled
EOL CI / EOL Units (Ⓐ2.16+py3.6) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+alpine3+py:azp/posix/1/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+alpine3+py:azp/posix/2/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+alpine3+py:azp/posix/3/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+fedora38+py:azp/posix/1/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+fedora38+py:azp/posix/2/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+fedora38+py:azp/posix/3/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+opensuse15+py:azp/posix/1/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+opensuse15+py:azp/posix/2/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+opensuse15+py:azp/posix/3/) (push) Has been cancelled
nox / Run extra sanity tests (push) Has been cancelled
Remove proxmox content.
This commit is contained in:
parent
f63fdceb23
commit
f2b7bdf293
76 changed files with 124 additions and 14629 deletions
|
@ -1,374 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2019, Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
import \
|
||||
ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
|
||||
from ansible_collections.community.general.plugins.modules import proxmox_backup
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import (
|
||||
AnsibleExitJson, AnsibleFailJson, set_module_args, ModuleTestCase)
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import patch
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
proxmoxer = pytest.importorskip('proxmoxer')
|
||||
|
||||
|
||||
MINIMAL_PERMISSIONS = {
|
||||
'/sdn/zones': {'Datastore.AllocateSpace': 1, 'Datastore.Audit': 1},
|
||||
'/nodes': {'Datastore.AllocateSpace': 1, 'Datastore.Audit': 1},
|
||||
'/sdn': {'Datastore.AllocateSpace': 1, 'Datastore.Audit': 1},
|
||||
'/vms': {'VM.Audit': 1,
|
||||
'Sys.Audit': 1,
|
||||
'Mapping.Audit': 1,
|
||||
'VM.Backup': 1,
|
||||
'Datastore.Audit': 1,
|
||||
'SDN.Audit': 1,
|
||||
'Pool.Audit': 1},
|
||||
'/': {'Datastore.Audit': 1, 'Datastore.AllocateSpace': 1},
|
||||
'/storage/local-zfs': {'Datastore.AllocateSpace': 1,
|
||||
'Datastore.Audit': 1},
|
||||
'/storage': {'Datastore.AllocateSpace': 1, 'Datastore.Audit': 1},
|
||||
'/access': {'Datastore.AllocateSpace': 1, 'Datastore.Audit': 1},
|
||||
'/vms/101': {'VM.Backup': 1,
|
||||
'Mapping.Audit': 1,
|
||||
'Datastore.AllocateSpace': 0,
|
||||
'Sys.Audit': 1,
|
||||
'VM.Audit': 1,
|
||||
'SDN.Audit': 1,
|
||||
'Pool.Audit': 1,
|
||||
'Datastore.Audit': 1},
|
||||
'/vms/100': {'VM.Backup': 1,
|
||||
'Mapping.Audit': 1,
|
||||
'Datastore.AllocateSpace': 0,
|
||||
'Sys.Audit': 1,
|
||||
'VM.Audit': 1,
|
||||
'SDN.Audit': 1,
|
||||
'Pool.Audit': 1,
|
||||
'Datastore.Audit': 1},
|
||||
'/pool': {'Datastore.Audit': 1, 'Datastore.AllocateSpace': 1}, }
|
||||
|
||||
STORAGE = [{'type': 'pbs',
|
||||
'username': 'test@pbs',
|
||||
'datastore': 'Backup-Pool',
|
||||
'server': '10.0.0.1',
|
||||
'shared': 1,
|
||||
'fingerprint': '94:fd:ac:e7:d5:36:0e:11:5b:23:05:40:d2:a4:e1:8a:c1:52:41:01:07:28:c0:4d:c5:ee:df:7f:7c:03:ab:41',
|
||||
'prune-backups': 'keep-all=1',
|
||||
'storage': 'backup',
|
||||
'content': 'backup',
|
||||
'digest': 'ca46a68d7699de061c139d714892682ea7c9d681'},
|
||||
{'nodes': 'node1,node2,node3',
|
||||
'sparse': 1,
|
||||
'type': 'zfspool',
|
||||
'content': 'rootdir,images',
|
||||
'digest': 'ca46a68d7699de061c139d714892682ea7c9d681',
|
||||
'pool': 'rpool/data',
|
||||
'storage': 'local-zfs'}]
|
||||
|
||||
|
||||
VMS = [{"diskwrite": 0,
|
||||
"vmid": 100,
|
||||
"node": "node1",
|
||||
"id": "lxc/100",
|
||||
"maxdisk": 10000,
|
||||
"template": 0,
|
||||
"disk": 10000,
|
||||
"uptime": 10000,
|
||||
"maxmem": 10000,
|
||||
"maxcpu": 1,
|
||||
"netin": 10000,
|
||||
"type": "lxc",
|
||||
"netout": 10000,
|
||||
"mem": 10000,
|
||||
"diskread": 10000,
|
||||
"cpu": 0.01,
|
||||
"name": "test-lxc",
|
||||
"status": "running"},
|
||||
{"diskwrite": 0,
|
||||
"vmid": 101,
|
||||
"node": "node2",
|
||||
"id": "kvm/101",
|
||||
"maxdisk": 10000,
|
||||
"template": 0,
|
||||
"disk": 10000,
|
||||
"uptime": 10000,
|
||||
"maxmem": 10000,
|
||||
"maxcpu": 1,
|
||||
"netin": 10000,
|
||||
"type": "lxc",
|
||||
"netout": 10000,
|
||||
"mem": 10000,
|
||||
"diskread": 10000,
|
||||
"cpu": 0.01,
|
||||
"name": "test-kvm",
|
||||
"status": "running"}
|
||||
]
|
||||
|
||||
NODES = [{'level': '',
|
||||
'type': 'node',
|
||||
'node': 'node1',
|
||||
'status': 'online',
|
||||
'id': 'node/node1',
|
||||
'cgroup-mode': 2},
|
||||
{'status': 'online',
|
||||
'id': 'node/node2',
|
||||
'cgroup-mode': 2,
|
||||
'level': '',
|
||||
'node': 'node2',
|
||||
'type': 'node'},
|
||||
{'status': 'online',
|
||||
'id': 'node/node3',
|
||||
'cgroup-mode': 2,
|
||||
'level': '',
|
||||
'node': 'node3',
|
||||
'type': 'node'},
|
||||
]
|
||||
|
||||
TASK_API_RETURN = {
|
||||
"node1": {
|
||||
'starttime': 1732606253,
|
||||
'status': 'stopped',
|
||||
'type': 'vzdump',
|
||||
'pstart': 517463911,
|
||||
'upid': 'UPID:node1:003F8C63:1E7FB79C:67449780:vzdump:100:root@pam:',
|
||||
'id': '100',
|
||||
'node': 'hypervisor',
|
||||
'pid': 541669,
|
||||
'user': 'test@pve',
|
||||
'exitstatus': 'OK'},
|
||||
"node2": {
|
||||
'starttime': 1732606253,
|
||||
'status': 'stopped',
|
||||
'type': 'vzdump',
|
||||
'pstart': 517463911,
|
||||
'upid': 'UPID:node2:000029DD:1599528B:6108F068:vzdump:101:root@pam:',
|
||||
'id': '101',
|
||||
'node': 'hypervisor',
|
||||
'pid': 541669,
|
||||
'user': 'test@pve',
|
||||
'exitstatus': 'OK'},
|
||||
}
|
||||
|
||||
|
||||
VZDUMP_API_RETURN = {
|
||||
"node1": "UPID:node1:003F8C63:1E7FB79C:67449780:vzdump:100:root@pam:",
|
||||
"node2": "UPID:node2:000029DD:1599528B:6108F068:vzdump:101:root@pam:",
|
||||
"node3": "OK",
|
||||
}
|
||||
|
||||
|
||||
TASKLOG_API_RETURN = {"node1": [{'n': 1,
|
||||
't': "INFO: starting new backup job: vzdump 100 --mode snapshot --node node1 "
|
||||
"--notes-template '{{guestname}}' --storage backup --notification-mode auto"},
|
||||
{'t': 'INFO: Starting Backup of VM 100 (lxc)',
|
||||
'n': 2},
|
||||
{'n': 23, 't': 'INFO: adding notes to backup'},
|
||||
{'n': 24,
|
||||
't': 'INFO: Finished Backup of VM 100 (00:00:03)'},
|
||||
{'n': 25,
|
||||
't': 'INFO: Backup finished at 2024-11-25 16:28:03'},
|
||||
{'t': 'INFO: Backup job finished successfully',
|
||||
'n': 26},
|
||||
{'n': 27, 't': 'TASK OK'}],
|
||||
"node2": [{'n': 1,
|
||||
't': "INFO: starting new backup job: vzdump 101 --mode snapshot --node node2 "
|
||||
"--notes-template '{{guestname}}' --storage backup --notification-mode auto"},
|
||||
{'t': 'INFO: Starting Backup of VM 101 (kvm)',
|
||||
'n': 2},
|
||||
{'n': 24,
|
||||
't': 'INFO: Finished Backup of VM 100 (00:00:03)'},
|
||||
{'n': 25,
|
||||
't': 'INFO: Backup finished at 2024-11-25 16:28:03'},
|
||||
{'t': 'INFO: Backup job finished successfully',
|
||||
'n': 26},
|
||||
{'n': 27, 't': 'TASK OK'}],
|
||||
}
|
||||
|
||||
|
||||
def return_valid_resources(resource_type, *args, **kwargs):
|
||||
if resource_type == "vm":
|
||||
return VMS
|
||||
if resource_type == "node":
|
||||
return NODES
|
||||
|
||||
|
||||
def return_vzdump_api(node, *args, **kwargs):
|
||||
if node in ("node1", "node2", "node3"):
|
||||
return VZDUMP_API_RETURN[node]
|
||||
|
||||
|
||||
def return_logs_api(node, *args, **kwargs):
|
||||
if node in ("node1", "node2"):
|
||||
return TASKLOG_API_RETURN[node]
|
||||
|
||||
|
||||
def return_task_status_api(node, *args, **kwargs):
|
||||
if node in ("node1", "node2"):
|
||||
return TASK_API_RETURN[node]
|
||||
|
||||
|
||||
class TestProxmoxBackup(ModuleTestCase):
|
||||
def setUp(self):
|
||||
super(TestProxmoxBackup, self).setUp()
|
||||
proxmox_utils.HAS_PROXMOXER = True
|
||||
self.module = proxmox_backup
|
||||
self.connect_mock = patch(
|
||||
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect",
|
||||
).start()
|
||||
self.mock_get_permissions = patch.object(
|
||||
proxmox_backup.ProxmoxBackupAnsible, "_get_permissions").start()
|
||||
self.mock_get_storages = patch.object(proxmox_utils.ProxmoxAnsible,
|
||||
"get_storages").start()
|
||||
self.mock_get_resources = patch.object(
|
||||
proxmox_backup.ProxmoxBackupAnsible, "_get_resources").start()
|
||||
self.mock_get_tasklog = patch.object(
|
||||
proxmox_backup.ProxmoxBackupAnsible, "_get_tasklog").start()
|
||||
self.mock_post_vzdump = patch.object(
|
||||
proxmox_backup.ProxmoxBackupAnsible, "_post_vzdump").start()
|
||||
self.mock_get_taskok = patch.object(
|
||||
proxmox_backup.ProxmoxBackupAnsible, "_get_taskok").start()
|
||||
self.mock_get_permissions.return_value = MINIMAL_PERMISSIONS
|
||||
self.mock_get_storages.return_value = STORAGE
|
||||
self.mock_get_resources.side_effect = return_valid_resources
|
||||
self.mock_get_taskok.side_effect = return_task_status_api
|
||||
self.mock_get_tasklog.side_effect = return_logs_api
|
||||
self.mock_post_vzdump.side_effect = return_vzdump_api
|
||||
|
||||
def tearDown(self):
|
||||
self.connect_mock.stop()
|
||||
self.mock_get_permissions.stop()
|
||||
self.mock_get_storages.stop()
|
||||
self.mock_get_resources.stop()
|
||||
super(TestProxmoxBackup, self).tearDown()
|
||||
|
||||
def test_proxmox_backup_without_argument(self):
|
||||
with set_module_args({}):
|
||||
with pytest.raises(AnsibleFailJson):
|
||||
proxmox_backup.main()
|
||||
|
||||
def test_create_backup_check_mode(self):
|
||||
with set_module_args(
|
||||
{
|
||||
"api_user": "root@pam",
|
||||
"api_password": "secret",
|
||||
"api_host": "127.0.0.1",
|
||||
"mode": "all",
|
||||
"storage": "backup",
|
||||
"_ansible_check_mode": True,
|
||||
}
|
||||
):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
proxmox_backup.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
|
||||
assert result["changed"] is True
|
||||
assert result["msg"] == "Backups would be created"
|
||||
assert len(result["backups"]) == 0
|
||||
assert self.mock_get_taskok.call_count == 0
|
||||
assert self.mock_get_tasklog.call_count == 0
|
||||
assert self.mock_post_vzdump.call_count == 0
|
||||
|
||||
def test_create_backup_all_mode(self):
|
||||
with set_module_args({
|
||||
"api_user": "root@pam",
|
||||
"api_password": "secret",
|
||||
"api_host": "127.0.0.1",
|
||||
"mode": "all",
|
||||
"storage": "backup",
|
||||
}):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
proxmox_backup.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["changed"] is True
|
||||
assert result["msg"] == "Backup tasks created"
|
||||
for backup_result in result["backups"]:
|
||||
assert backup_result["upid"] in {
|
||||
VZDUMP_API_RETURN[key] for key in VZDUMP_API_RETURN}
|
||||
assert self.mock_get_taskok.call_count == 0
|
||||
assert self.mock_post_vzdump.call_count == 3
|
||||
|
||||
def test_create_backup_include_mode_with_wait(self):
|
||||
with set_module_args({
|
||||
"api_user": "root@pam",
|
||||
"api_password": "secret",
|
||||
"api_host": "127.0.0.1",
|
||||
"mode": "include",
|
||||
"node": "node1",
|
||||
"storage": "backup",
|
||||
"vmids": [100],
|
||||
"wait": True
|
||||
}):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
proxmox_backup.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["changed"] is True
|
||||
assert result["msg"] == "Backups succeeded"
|
||||
for backup_result in result["backups"]:
|
||||
assert backup_result["upid"] in {
|
||||
VZDUMP_API_RETURN[key] for key in VZDUMP_API_RETURN}
|
||||
assert self.mock_get_taskok.call_count == 1
|
||||
assert self.mock_post_vzdump.call_count == 1
|
||||
|
||||
def test_fail_insufficient_permissions(self):
|
||||
with set_module_args({
|
||||
"api_user": "root@pam",
|
||||
"api_password": "secret",
|
||||
"api_host": "127.0.0.1",
|
||||
"mode": "include",
|
||||
"storage": "backup",
|
||||
"performance_tweaks": "max-workers=2",
|
||||
"vmids": [100],
|
||||
"wait": True
|
||||
}):
|
||||
with pytest.raises(AnsibleFailJson) as exc_info:
|
||||
proxmox_backup.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["msg"] == "Insufficient permission: Performance_tweaks and bandwidth require 'Sys.Modify' permission for '/'"
|
||||
assert self.mock_get_taskok.call_count == 0
|
||||
assert self.mock_post_vzdump.call_count == 0
|
||||
|
||||
def test_fail_missing_node(self):
|
||||
with set_module_args({
|
||||
"api_user": "root@pam",
|
||||
"api_password": "secret",
|
||||
"api_host": "127.0.0.1",
|
||||
"mode": "include",
|
||||
"storage": "backup",
|
||||
"node": "nonexistingnode",
|
||||
"vmids": [100],
|
||||
"wait": True
|
||||
}):
|
||||
with pytest.raises(AnsibleFailJson) as exc_info:
|
||||
proxmox_backup.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["msg"] == "Node nonexistingnode was specified, but does not exist on the cluster"
|
||||
assert self.mock_get_taskok.call_count == 0
|
||||
assert self.mock_post_vzdump.call_count == 0
|
||||
|
||||
def test_fail_missing_storage(self):
|
||||
with set_module_args({
|
||||
"api_user": "root@pam",
|
||||
"api_password": "secret",
|
||||
"api_host": "127.0.0.1",
|
||||
"mode": "include",
|
||||
"storage": "nonexistingstorage",
|
||||
"vmids": [100],
|
||||
"wait": True
|
||||
}):
|
||||
with pytest.raises(AnsibleFailJson) as exc_info:
|
||||
proxmox_backup.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["msg"] == "Storage nonexistingstorage does not exist in the cluster"
|
||||
assert self.mock_get_taskok.call_count == 0
|
||||
assert self.mock_post_vzdump.call_count == 0
|
|
@ -1,275 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2024 Marzieh Raoufnezhad <raoufnezhad at gmail.com>
|
||||
# Copyright (c) 2024 Maryam Mayabi <mayabi.ahm at gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
proxmoxer = pytest.importorskip("proxmoxer")
|
||||
|
||||
from ansible_collections.community.general.plugins.modules import proxmox_backup_info
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import patch
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import (
|
||||
AnsibleExitJson,
|
||||
AnsibleFailJson,
|
||||
ModuleTestCase,
|
||||
set_module_args,
|
||||
)
|
||||
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
|
||||
|
||||
RESOURCE_LIST = [
|
||||
{
|
||||
"uptime": 0,
|
||||
"diskwrite": 0,
|
||||
"name": "test01",
|
||||
"maxcpu": 0,
|
||||
"node": "NODE1",
|
||||
"mem": 0,
|
||||
"netout": 0,
|
||||
"netin": 0,
|
||||
"maxmem": 0,
|
||||
"diskread": 0,
|
||||
"disk": 0,
|
||||
"maxdisk": 0,
|
||||
"status": "running",
|
||||
"cpu": 0,
|
||||
"id": "qemu/100",
|
||||
"template": 0,
|
||||
"vmid": 100,
|
||||
"type": "qemu"
|
||||
},
|
||||
{
|
||||
"uptime": 0,
|
||||
"diskwrite": 0,
|
||||
"name": "test02",
|
||||
"maxcpu": 0,
|
||||
"node": "NODE1",
|
||||
"mem": 0,
|
||||
"netout": 0,
|
||||
"netin": 0,
|
||||
"maxmem": 0,
|
||||
"diskread": 0,
|
||||
"disk": 0,
|
||||
"maxdisk": 0,
|
||||
"status": "running",
|
||||
"cpu": 0,
|
||||
"id": "qemu/101",
|
||||
"template": 0,
|
||||
"vmid": 101,
|
||||
"type": "qemu"
|
||||
},
|
||||
{
|
||||
"uptime": 0,
|
||||
"diskwrite": 0,
|
||||
"name": "test03",
|
||||
"maxcpu": 0,
|
||||
"node": "NODE2",
|
||||
"mem": 0,
|
||||
"netout": 0,
|
||||
"netin": 0,
|
||||
"maxmem": 0,
|
||||
"diskread": 0,
|
||||
"disk": 0,
|
||||
"maxdisk": 0,
|
||||
"status": "running",
|
||||
"cpu": 0,
|
||||
"id": "qemu/102",
|
||||
"template": 0,
|
||||
"vmid": 102,
|
||||
"type": "qemu"
|
||||
}
|
||||
]
|
||||
BACKUP_JOBS = [
|
||||
{
|
||||
"type": "vzdump",
|
||||
"id": "backup-83831498-c631",
|
||||
"storage": "local",
|
||||
"vmid": "100",
|
||||
"enabled": 1,
|
||||
"next-run": 1735138800,
|
||||
"mailnotification": "always",
|
||||
"schedule": "06,18:30",
|
||||
"mode": "snapshot",
|
||||
"notes-template": "guestname"
|
||||
},
|
||||
{
|
||||
"schedule": "sat 15:00",
|
||||
"notes-template": "guestname",
|
||||
"mode": "snapshot",
|
||||
"mailnotification": "always",
|
||||
"next-run": 1735385400,
|
||||
"type": "vzdump",
|
||||
"enabled": 1,
|
||||
"vmid": "100,101,102",
|
||||
"storage": "local",
|
||||
"id": "backup-70025700-2302",
|
||||
}
|
||||
]
|
||||
|
||||
EXPECTED_BACKUP_OUTPUT = [
|
||||
{
|
||||
"bktype": "vzdump",
|
||||
"enabled": 1,
|
||||
"id": "backup-83831498-c631",
|
||||
"mode": "snapshot",
|
||||
"next-run": "2024-12-25 15:00:00",
|
||||
"schedule": "06,18:30",
|
||||
"storage": "local",
|
||||
"vm_name": "test01",
|
||||
"vmid": "100"
|
||||
},
|
||||
{
|
||||
"bktype": "vzdump",
|
||||
"enabled": 1,
|
||||
"id": "backup-70025700-2302",
|
||||
"mode": "snapshot",
|
||||
"next-run": "2024-12-28 11:30:00",
|
||||
"schedule": "sat 15:00",
|
||||
"storage": "local",
|
||||
"vm_name": "test01",
|
||||
"vmid": "100"
|
||||
},
|
||||
{
|
||||
"bktype": "vzdump",
|
||||
"enabled": 1,
|
||||
"id": "backup-70025700-2302",
|
||||
"mode": "snapshot",
|
||||
"next-run": "2024-12-28 11:30:00",
|
||||
"schedule": "sat 15:00",
|
||||
"storage": "local",
|
||||
"vm_name": "test02",
|
||||
"vmid": "101"
|
||||
},
|
||||
{
|
||||
"bktype": "vzdump",
|
||||
"enabled": 1,
|
||||
"id": "backup-70025700-2302",
|
||||
"mode": "snapshot",
|
||||
"next-run": "2024-12-28 11:30:00",
|
||||
"schedule": "sat 15:00",
|
||||
"storage": "local",
|
||||
"vm_name": "test03",
|
||||
"vmid": "102"
|
||||
}
|
||||
]
|
||||
EXPECTED_BACKUP_JOBS_OUTPUT = [
|
||||
{
|
||||
"enabled": 1,
|
||||
"id": "backup-83831498-c631",
|
||||
"mailnotification": "always",
|
||||
"mode": "snapshot",
|
||||
"next-run": 1735138800,
|
||||
"notes-template": "guestname",
|
||||
"schedule": "06,18:30",
|
||||
"storage": "local",
|
||||
"type": "vzdump",
|
||||
"vmid": "100"
|
||||
},
|
||||
{
|
||||
"enabled": 1,
|
||||
"id": "backup-70025700-2302",
|
||||
"mailnotification": "always",
|
||||
"mode": "snapshot",
|
||||
"next-run": 1735385400,
|
||||
"notes-template": "guestname",
|
||||
"schedule": "sat 15:00",
|
||||
"storage": "local",
|
||||
"type": "vzdump",
|
||||
"vmid": "100,101,102"
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
class TestProxmoxBackupInfoModule(ModuleTestCase):
|
||||
def setUp(self):
|
||||
super(TestProxmoxBackupInfoModule, self).setUp()
|
||||
proxmox_utils.HAS_PROXMOXER = True
|
||||
self.module = proxmox_backup_info
|
||||
self.connect_mock = patch(
|
||||
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect",
|
||||
).start()
|
||||
self.connect_mock.return_value.cluster.resources.get.return_value = (
|
||||
RESOURCE_LIST
|
||||
)
|
||||
self.connect_mock.return_value.cluster.backup.get.return_value = (
|
||||
BACKUP_JOBS
|
||||
)
|
||||
|
||||
def tearDown(self):
|
||||
self.connect_mock.stop()
|
||||
super(TestProxmoxBackupInfoModule, self).tearDown()
|
||||
|
||||
def test_module_fail_when_required_args_missing(self):
|
||||
with pytest.raises(AnsibleFailJson) as exc_info:
|
||||
with set_module_args({}):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["msg"] == "missing required arguments: api_host, api_user"
|
||||
|
||||
def test_get_all_backups_information(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
with set_module_args({
|
||||
'api_host': 'proxmoxhost',
|
||||
'api_user': 'root@pam',
|
||||
'api_password': 'supersecret'
|
||||
}):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["backup_info"] == EXPECTED_BACKUP_OUTPUT
|
||||
|
||||
def test_get_specific_backup_information_by_vmname(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
vmname = 'test01'
|
||||
expected_output = [
|
||||
backup for backup in EXPECTED_BACKUP_OUTPUT if backup["vm_name"] == vmname
|
||||
]
|
||||
with set_module_args({
|
||||
'api_host': 'proxmoxhost',
|
||||
'api_user': 'root@pam',
|
||||
'api_password': 'supersecret',
|
||||
'vm_name': vmname
|
||||
}):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["backup_info"] == expected_output
|
||||
assert len(result["backup_info"]) == 2
|
||||
|
||||
def test_get_specific_backup_information_by_vmid(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
vmid = "101"
|
||||
expected_output = [
|
||||
backup for backup in EXPECTED_BACKUP_OUTPUT if backup["vmid"] == vmid
|
||||
]
|
||||
with set_module_args({
|
||||
'api_host': 'proxmoxhost',
|
||||
'api_user': 'root@pam',
|
||||
'api_password': 'supersecret',
|
||||
'vm_id': vmid
|
||||
}):
|
||||
self.module.main()
|
||||
result = exc_info.value.args[0]
|
||||
assert result["backup_info"] == expected_output
|
||||
assert len(result["backup_info"]) == 1
|
||||
|
||||
def test_get_specific_backup_information_by_backupjobs(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
backupjobs = True
|
||||
with set_module_args({
|
||||
'api_host': 'proxmoxhost',
|
||||
'api_user': 'root@pam',
|
||||
'api_password': 'supersecret',
|
||||
'backup_jobs': backupjobs
|
||||
}):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["backup_info"] == EXPECTED_BACKUP_JOBS_OUTPUT
|
|
@ -1,168 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2021, Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
proxmoxer = pytest.importorskip("proxmoxer")
|
||||
mandatory_py_version = pytest.mark.skipif(
|
||||
sys.version_info < (2, 7),
|
||||
reason="The proxmoxer dependency requires python2.7 or higher",
|
||||
)
|
||||
|
||||
from ansible_collections.community.general.plugins.modules import proxmox_kvm
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import (
|
||||
patch,
|
||||
DEFAULT,
|
||||
)
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import (
|
||||
AnsibleExitJson,
|
||||
AnsibleFailJson,
|
||||
ModuleTestCase,
|
||||
set_module_args,
|
||||
)
|
||||
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
|
||||
|
||||
|
||||
class TestProxmoxKvmModule(ModuleTestCase):
|
||||
def setUp(self):
|
||||
super(TestProxmoxKvmModule, self).setUp()
|
||||
proxmox_utils.HAS_PROXMOXER = True
|
||||
self.module = proxmox_kvm
|
||||
self.connect_mock = patch(
|
||||
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect"
|
||||
).start()
|
||||
self.get_node_mock = patch.object(
|
||||
proxmox_utils.ProxmoxAnsible, "get_node"
|
||||
).start()
|
||||
self.get_vm_mock = patch.object(proxmox_utils.ProxmoxAnsible, "get_vm").start()
|
||||
self.create_vm_mock = patch.object(
|
||||
proxmox_kvm.ProxmoxKvmAnsible, "create_vm"
|
||||
).start()
|
||||
|
||||
def tearDown(self):
|
||||
self.create_vm_mock.stop()
|
||||
self.get_vm_mock.stop()
|
||||
self.get_node_mock.stop()
|
||||
self.connect_mock.stop()
|
||||
super(TestProxmoxKvmModule, self).tearDown()
|
||||
|
||||
def test_module_fail_when_required_args_missing(self):
|
||||
with self.assertRaises(AnsibleFailJson):
|
||||
with set_module_args({}):
|
||||
self.module.main()
|
||||
|
||||
def test_module_exits_unchaged_when_provided_vmid_exists(self):
|
||||
with set_module_args(
|
||||
{
|
||||
"api_host": "host",
|
||||
"api_user": "user",
|
||||
"api_password": "password",
|
||||
"vmid": "100",
|
||||
"node": "pve",
|
||||
}
|
||||
):
|
||||
self.get_vm_mock.return_value = [{"vmid": "100"}]
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
self.module.main()
|
||||
|
||||
assert self.get_vm_mock.call_count == 1
|
||||
result = exc_info.value.args[0]
|
||||
assert result["changed"] is False
|
||||
assert result["msg"] == "VM with vmid <100> already exists"
|
||||
|
||||
def test_vm_created_when_vmid_not_exist_but_name_already_exist(self):
|
||||
with set_module_args(
|
||||
{
|
||||
"api_host": "host",
|
||||
"api_user": "user",
|
||||
"api_password": "password",
|
||||
"vmid": "100",
|
||||
"name": "existing.vm.local",
|
||||
"node": "pve",
|
||||
}
|
||||
):
|
||||
self.get_vm_mock.return_value = None
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
self.module.main()
|
||||
|
||||
assert self.get_vm_mock.call_count == 1
|
||||
assert self.get_node_mock.call_count == 1
|
||||
result = exc_info.value.args[0]
|
||||
assert result["changed"] is True
|
||||
assert result["msg"] == "VM existing.vm.local with vmid 100 deployed"
|
||||
|
||||
def test_vm_not_created_when_name_already_exist_and_vmid_not_set(self):
|
||||
with set_module_args(
|
||||
{
|
||||
"api_host": "host",
|
||||
"api_user": "user",
|
||||
"api_password": "password",
|
||||
"name": "existing.vm.local",
|
||||
"node": "pve",
|
||||
}
|
||||
):
|
||||
with patch.object(proxmox_utils.ProxmoxAnsible, "get_vmid") as get_vmid_mock:
|
||||
get_vmid_mock.return_value = {
|
||||
"vmid": 100,
|
||||
"name": "existing.vm.local",
|
||||
}
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
self.module.main()
|
||||
|
||||
assert get_vmid_mock.call_count == 1
|
||||
result = exc_info.value.args[0]
|
||||
assert result["changed"] is False
|
||||
|
||||
def test_vm_created_when_name_doesnt_exist_and_vmid_not_set(self):
|
||||
with set_module_args(
|
||||
{
|
||||
"api_host": "host",
|
||||
"api_user": "user",
|
||||
"api_password": "password",
|
||||
"name": "existing.vm.local",
|
||||
"node": "pve",
|
||||
}
|
||||
):
|
||||
self.get_vm_mock.return_value = None
|
||||
with patch.multiple(
|
||||
proxmox_utils.ProxmoxAnsible, get_vmid=DEFAULT, get_nextvmid=DEFAULT
|
||||
) as utils_mock:
|
||||
utils_mock["get_vmid"].return_value = None
|
||||
utils_mock["get_nextvmid"].return_value = 101
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
self.module.main()
|
||||
|
||||
assert utils_mock["get_vmid"].call_count == 1
|
||||
assert utils_mock["get_nextvmid"].call_count == 1
|
||||
result = exc_info.value.args[0]
|
||||
assert result["changed"] is True
|
||||
assert result["msg"] == "VM existing.vm.local with vmid 101 deployed"
|
||||
|
||||
def test_parse_mac(self):
|
||||
assert (
|
||||
proxmox_kvm.parse_mac("virtio=00:11:22:AA:BB:CC,bridge=vmbr0,firewall=1")
|
||||
== "00:11:22:AA:BB:CC"
|
||||
)
|
||||
|
||||
def test_parse_dev(self):
|
||||
assert (
|
||||
proxmox_kvm.parse_dev("local-lvm:vm-1000-disk-0,format=qcow2")
|
||||
== "local-lvm:vm-1000-disk-0"
|
||||
)
|
||||
assert (
|
||||
proxmox_kvm.parse_dev("local-lvm:vm-101-disk-1,size=8G")
|
||||
== "local-lvm:vm-101-disk-1"
|
||||
)
|
||||
assert (
|
||||
proxmox_kvm.parse_dev("local-zfs:vm-1001-disk-0")
|
||||
== "local-zfs:vm-1001-disk-0"
|
||||
)
|
|
@ -1,131 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2019, Ansible Project
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
proxmoxer = pytest.importorskip('proxmoxer')
|
||||
mandatory_py_version = pytest.mark.skipif(
|
||||
sys.version_info < (2, 7),
|
||||
reason='The proxmoxer dependency requires python2.7 or higher'
|
||||
)
|
||||
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import MagicMock, patch
|
||||
from ansible_collections.community.general.plugins.modules import proxmox_snap
|
||||
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import set_module_args
|
||||
|
||||
|
||||
def get_resources(type):
|
||||
return [{"diskwrite": 0,
|
||||
"vmid": 100,
|
||||
"node": "localhost",
|
||||
"id": "lxc/100",
|
||||
"maxdisk": 10000,
|
||||
"template": 0,
|
||||
"disk": 10000,
|
||||
"uptime": 10000,
|
||||
"maxmem": 10000,
|
||||
"maxcpu": 1,
|
||||
"netin": 10000,
|
||||
"type": "lxc",
|
||||
"netout": 10000,
|
||||
"mem": 10000,
|
||||
"diskread": 10000,
|
||||
"cpu": 0.01,
|
||||
"name": "test-lxc",
|
||||
"status": "running"}]
|
||||
|
||||
|
||||
def fake_api(mocker):
|
||||
r = mocker.MagicMock()
|
||||
r.cluster.resources.get = MagicMock(side_effect=get_resources)
|
||||
return r
|
||||
|
||||
|
||||
def test_proxmox_snap_without_argument(capfd):
|
||||
with set_module_args({}):
|
||||
with pytest.raises(SystemExit) as results:
|
||||
proxmox_snap.main()
|
||||
|
||||
out, err = capfd.readouterr()
|
||||
assert not err
|
||||
assert json.loads(out)['failed']
|
||||
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
|
||||
def test_create_snapshot_check_mode(connect_mock, capfd, mocker):
|
||||
with set_module_args({
|
||||
"hostname": "test-lxc",
|
||||
"api_user": "root@pam",
|
||||
"api_password": "secret",
|
||||
"api_host": "127.0.0.1",
|
||||
"state": "present",
|
||||
"snapname": "test",
|
||||
"timeout": "1",
|
||||
"force": True,
|
||||
"_ansible_check_mode": True
|
||||
}):
|
||||
proxmox_utils.HAS_PROXMOXER = True
|
||||
connect_mock.side_effect = lambda: fake_api(mocker)
|
||||
with pytest.raises(SystemExit) as results:
|
||||
proxmox_snap.main()
|
||||
|
||||
out, err = capfd.readouterr()
|
||||
assert not err
|
||||
assert not json.loads(out)['changed']
|
||||
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
|
||||
def test_remove_snapshot_check_mode(connect_mock, capfd, mocker):
|
||||
with set_module_args({
|
||||
"hostname": "test-lxc",
|
||||
"api_user": "root@pam",
|
||||
"api_password": "secret",
|
||||
"api_host": "127.0.0.1",
|
||||
"state": "absent",
|
||||
"snapname": "test",
|
||||
"timeout": "1",
|
||||
"force": True,
|
||||
"_ansible_check_mode": True
|
||||
}):
|
||||
proxmox_utils.HAS_PROXMOXER = True
|
||||
connect_mock.side_effect = lambda: fake_api(mocker)
|
||||
with pytest.raises(SystemExit) as results:
|
||||
proxmox_snap.main()
|
||||
|
||||
out, err = capfd.readouterr()
|
||||
assert not err
|
||||
assert not json.loads(out)['changed']
|
||||
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
|
||||
def test_rollback_snapshot_check_mode(connect_mock, capfd, mocker):
|
||||
with set_module_args({
|
||||
"hostname": "test-lxc",
|
||||
"api_user": "root@pam",
|
||||
"api_password": "secret",
|
||||
"api_host": "127.0.0.1",
|
||||
"state": "rollback",
|
||||
"snapname": "test",
|
||||
"timeout": "1",
|
||||
"force": True,
|
||||
"_ansible_check_mode": True
|
||||
}):
|
||||
proxmox_utils.HAS_PROXMOXER = True
|
||||
connect_mock.side_effect = lambda: fake_api(mocker)
|
||||
with pytest.raises(SystemExit) as results:
|
||||
proxmox_snap.main()
|
||||
|
||||
out, err = capfd.readouterr()
|
||||
assert not err
|
||||
output = json.loads(out)
|
||||
assert not output['changed']
|
||||
assert output['msg'] == "Snapshot test does not exist"
|
|
@ -1,90 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2023, Julian Vanden Broeck <julian.vandenbroeck at dalibo.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
proxmoxer = pytest.importorskip("proxmoxer")
|
||||
|
||||
from ansible_collections.community.general.plugins.modules import proxmox_storage_contents_info
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import patch
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import (
|
||||
AnsibleExitJson,
|
||||
AnsibleFailJson,
|
||||
ModuleTestCase,
|
||||
set_module_args,
|
||||
)
|
||||
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
|
||||
|
||||
NODE1 = "pve"
|
||||
RAW_LIST_OUTPUT = [
|
||||
{
|
||||
"content": "backup",
|
||||
"ctime": 1702528474,
|
||||
"format": "pbs-vm",
|
||||
"size": 273804166061,
|
||||
"subtype": "qemu",
|
||||
"vmid": 931,
|
||||
"volid": "datastore:backup/vm/931/2023-12-14T04:34:34Z",
|
||||
},
|
||||
{
|
||||
"content": "backup",
|
||||
"ctime": 1702582560,
|
||||
"format": "pbs-vm",
|
||||
"size": 273804166059,
|
||||
"subtype": "qemu",
|
||||
"vmid": 931,
|
||||
"volid": "datastore:backup/vm/931/2023-12-14T19:36:00Z",
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def get_module_args(node, storage, content="all", vmid=None):
|
||||
return {
|
||||
"api_host": "host",
|
||||
"api_user": "user",
|
||||
"api_password": "password",
|
||||
"node": node,
|
||||
"storage": storage,
|
||||
"content": content,
|
||||
"vmid": vmid,
|
||||
}
|
||||
|
||||
|
||||
class TestProxmoxStorageContentsInfo(ModuleTestCase):
|
||||
def setUp(self):
|
||||
super(TestProxmoxStorageContentsInfo, self).setUp()
|
||||
proxmox_utils.HAS_PROXMOXER = True
|
||||
self.module = proxmox_storage_contents_info
|
||||
self.connect_mock = patch(
|
||||
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect",
|
||||
).start()
|
||||
self.connect_mock.return_value.nodes.return_value.storage.return_value.content.return_value.get.return_value = (
|
||||
RAW_LIST_OUTPUT
|
||||
)
|
||||
self.connect_mock.return_value.nodes.get.return_value = [{"node": NODE1}]
|
||||
|
||||
def tearDown(self):
|
||||
self.connect_mock.stop()
|
||||
super(TestProxmoxStorageContentsInfo, self).tearDown()
|
||||
|
||||
def test_module_fail_when_required_args_missing(self):
|
||||
with pytest.raises(AnsibleFailJson) as exc_info:
|
||||
with set_module_args({}):
|
||||
self.module.main()
|
||||
|
||||
def test_storage_contents_info(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
with set_module_args(get_module_args(node=NODE1, storage="datastore")):
|
||||
expected_output = {}
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert not result["changed"]
|
||||
assert result["proxmox_storage_content"] == RAW_LIST_OUTPUT
|
|
@ -1,206 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2021, Andreas Botzner (@paginabianca) <andreas at botzner dot com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
#
|
||||
# Proxmox Tasks module unit tests.
|
||||
# The API responses used in these tests were recorded from PVE version 6.4-8
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
proxmoxer = pytest.importorskip('proxmoxer')
|
||||
mandatory_py_version = pytest.mark.skipif(
|
||||
sys.version_info < (2, 7),
|
||||
reason='The proxmoxer dependency requires python2.7 or higher'
|
||||
)
|
||||
|
||||
from ansible_collections.community.general.plugins.modules import proxmox_tasks_info
|
||||
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import patch
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import set_module_args
|
||||
|
||||
NODE = 'node01'
|
||||
TASK_UPID = 'UPID:iaclab-01-01:000029DD:1599528B:6108F068:srvreload:networking:root@pam:'
|
||||
TASKS = [
|
||||
{
|
||||
"endtime": 1629092710,
|
||||
"id": "networking",
|
||||
"node": "iaclab-01-01",
|
||||
"pid": 3539,
|
||||
"pstart": 474062216,
|
||||
"starttime": 1629092709,
|
||||
"status": "OK",
|
||||
"type": "srvreload",
|
||||
"upid": "UPID:iaclab-01-01:00000DD3:1C419D88:6119FB65:srvreload:networking:root@pam:",
|
||||
"user": "root@pam"
|
||||
},
|
||||
{
|
||||
"endtime": 1627975785,
|
||||
"id": "networking",
|
||||
"node": "iaclab-01-01",
|
||||
"pid": 10717,
|
||||
"pstart": 362369675,
|
||||
"starttime": 1627975784,
|
||||
"status": "command 'ifreload -a' failed: exit code 1",
|
||||
"type": "srvreload",
|
||||
"upid": "UPID:iaclab-01-01:000029DD:1599528B:6108F068:srvreload:networking:root@pam:",
|
||||
"user": "root@pam"
|
||||
},
|
||||
{
|
||||
"endtime": 1627975503,
|
||||
"id": "networking",
|
||||
"node": "iaclab-01-01",
|
||||
"pid": 6778,
|
||||
"pstart": 362341540,
|
||||
"starttime": 1627975503,
|
||||
"status": "OK",
|
||||
"type": "srvreload",
|
||||
"upid": "UPID:iaclab-01-01:00001A7A:1598E4A4:6108EF4F:srvreload:networking:root@pam:",
|
||||
"user": "root@pam"
|
||||
}
|
||||
]
|
||||
EXPECTED_TASKS = [
|
||||
{
|
||||
"endtime": 1629092710,
|
||||
"id": "networking",
|
||||
"node": "iaclab-01-01",
|
||||
"pid": 3539,
|
||||
"pstart": 474062216,
|
||||
"starttime": 1629092709,
|
||||
"status": "OK",
|
||||
"type": "srvreload",
|
||||
"upid": "UPID:iaclab-01-01:00000DD3:1C419D88:6119FB65:srvreload:networking:root@pam:",
|
||||
"user": "root@pam",
|
||||
"failed": False
|
||||
},
|
||||
{
|
||||
"endtime": 1627975785,
|
||||
"id": "networking",
|
||||
"node": "iaclab-01-01",
|
||||
"pid": 10717,
|
||||
"pstart": 362369675,
|
||||
"starttime": 1627975784,
|
||||
"status": "command 'ifreload -a' failed: exit code 1",
|
||||
"type": "srvreload",
|
||||
"upid": "UPID:iaclab-01-01:000029DD:1599528B:6108F068:srvreload:networking:root@pam:",
|
||||
"user": "root@pam",
|
||||
"failed": True
|
||||
},
|
||||
{
|
||||
"endtime": 1627975503,
|
||||
"id": "networking",
|
||||
"node": "iaclab-01-01",
|
||||
"pid": 6778,
|
||||
"pstart": 362341540,
|
||||
"starttime": 1627975503,
|
||||
"status": "OK",
|
||||
"type": "srvreload",
|
||||
"upid": "UPID:iaclab-01-01:00001A7A:1598E4A4:6108EF4F:srvreload:networking:root@pam:",
|
||||
"user": "root@pam",
|
||||
"failed": False
|
||||
}
|
||||
]
|
||||
|
||||
EXPECTED_SINGLE_TASK = [
|
||||
{
|
||||
"endtime": 1627975785,
|
||||
"id": "networking",
|
||||
"node": "iaclab-01-01",
|
||||
"pid": 10717,
|
||||
"pstart": 362369675,
|
||||
"starttime": 1627975784,
|
||||
"status": "command 'ifreload -a' failed: exit code 1",
|
||||
"type": "srvreload",
|
||||
"upid": "UPID:iaclab-01-01:000029DD:1599528B:6108F068:srvreload:networking:root@pam:",
|
||||
"user": "root@pam",
|
||||
"failed": True
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
|
||||
def test_without_required_parameters(connect_mock, capfd, mocker):
|
||||
with set_module_args({}):
|
||||
with pytest.raises(SystemExit):
|
||||
proxmox_tasks_info.main()
|
||||
out, err = capfd.readouterr()
|
||||
assert not err
|
||||
assert json.loads(out)['failed']
|
||||
|
||||
|
||||
def mock_api_tasks_response(mocker):
|
||||
m = mocker.MagicMock()
|
||||
g = mocker.MagicMock()
|
||||
m.nodes = mocker.MagicMock(return_value=g)
|
||||
g.tasks.get = mocker.MagicMock(return_value=TASKS)
|
||||
return m
|
||||
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
|
||||
def test_get_tasks(connect_mock, capfd, mocker):
|
||||
with set_module_args({
|
||||
'api_host': 'proxmoxhost',
|
||||
'api_user': 'root@pam',
|
||||
'api_password': 'supersecret',
|
||||
'node': NODE
|
||||
}):
|
||||
connect_mock.side_effect = lambda: mock_api_tasks_response(mocker)
|
||||
proxmox_utils.HAS_PROXMOXER = True
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
proxmox_tasks_info.main()
|
||||
out, err = capfd.readouterr()
|
||||
assert not err
|
||||
assert len(json.loads(out)['proxmox_tasks']) != 0
|
||||
assert not json.loads(out)['changed']
|
||||
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
|
||||
def test_get_single_task(connect_mock, capfd, mocker):
|
||||
with set_module_args({
|
||||
'api_host': 'proxmoxhost',
|
||||
'api_user': 'root@pam',
|
||||
'api_password': 'supersecret',
|
||||
'node': NODE,
|
||||
'task': TASK_UPID
|
||||
}):
|
||||
connect_mock.side_effect = lambda: mock_api_tasks_response(mocker)
|
||||
proxmox_utils.HAS_PROXMOXER = True
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
proxmox_tasks_info.main()
|
||||
out, err = capfd.readouterr()
|
||||
assert not err
|
||||
assert len(json.loads(out)['proxmox_tasks']) == 1
|
||||
assert json.loads(out)
|
||||
assert not json.loads(out)['changed']
|
||||
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
|
||||
def test_get_non_existent_task(connect_mock, capfd, mocker):
|
||||
with set_module_args({
|
||||
'api_host': 'proxmoxhost',
|
||||
'api_user': 'root@pam',
|
||||
'api_password': 'supersecret',
|
||||
'node': NODE,
|
||||
'task': 'UPID:nonexistent'
|
||||
}):
|
||||
connect_mock.side_effect = lambda: mock_api_tasks_response(mocker)
|
||||
proxmox_utils.HAS_PROXMOXER = True
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
proxmox_tasks_info.main()
|
||||
out, err = capfd.readouterr()
|
||||
assert not err
|
||||
assert json.loads(out)['failed']
|
||||
assert 'proxmox_tasks' not in json.loads(out)
|
||||
assert not json.loads(out)['changed']
|
||||
assert json.loads(
|
||||
out)['msg'] == 'Task: UPID:nonexistent does not exist on node: node01.'
|
|
@ -1,66 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2023, Sergei Antipov <greendayonfire at gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
proxmoxer = pytest.importorskip('proxmoxer')
|
||||
mandatory_py_version = pytest.mark.skipif(
|
||||
sys.version_info < (2, 7),
|
||||
reason='The proxmoxer dependency requires python2.7 or higher'
|
||||
)
|
||||
|
||||
from ansible_collections.community.general.plugins.modules import proxmox_template
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import patch, Mock
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import (
|
||||
AnsibleFailJson,
|
||||
ModuleTestCase,
|
||||
set_module_args,
|
||||
)
|
||||
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
|
||||
|
||||
|
||||
class TestProxmoxTemplateModule(ModuleTestCase):
|
||||
def setUp(self):
|
||||
super(TestProxmoxTemplateModule, self).setUp()
|
||||
proxmox_utils.HAS_PROXMOXER = True
|
||||
self.module = proxmox_template
|
||||
self.connect_mock = patch(
|
||||
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect"
|
||||
)
|
||||
self.connect_mock.start()
|
||||
|
||||
def tearDown(self):
|
||||
self.connect_mock.stop()
|
||||
super(TestProxmoxTemplateModule, self).tearDown()
|
||||
|
||||
@patch("os.stat")
|
||||
@patch.multiple(os.path, exists=Mock(return_value=True), isfile=Mock(return_value=True))
|
||||
def test_module_fail_when_toolbelt_not_installed_and_file_size_is_big(self, mock_stat):
|
||||
self.module.HAS_REQUESTS_TOOLBELT = False
|
||||
mock_stat.return_value.st_size = 268435460
|
||||
with set_module_args(
|
||||
{
|
||||
"api_host": "host",
|
||||
"api_user": "user",
|
||||
"api_password": "password",
|
||||
"node": "pve",
|
||||
"src": "/tmp/mock.iso",
|
||||
"content_type": "iso"
|
||||
}
|
||||
):
|
||||
with pytest.raises(AnsibleFailJson) as exc_info:
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["failed"] is True
|
||||
assert result["msg"] == "'requests_toolbelt' module is required to upload files larger than 256MB"
|
|
@ -1,714 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (c) 2023, Sergei Antipov <greendayonfire at gmail.com>
|
||||
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
# SPDX-License-Identifier: GPL-3.0-or-later
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__metaclass__ = type
|
||||
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
proxmoxer = pytest.importorskip("proxmoxer")
|
||||
mandatory_py_version = pytest.mark.skipif(
|
||||
sys.version_info < (2, 7),
|
||||
reason="The proxmoxer dependency requires python2.7 or higher",
|
||||
)
|
||||
|
||||
from ansible_collections.community.general.plugins.modules import proxmox_vm_info
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import patch
|
||||
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import (
|
||||
AnsibleExitJson,
|
||||
AnsibleFailJson,
|
||||
ModuleTestCase,
|
||||
set_module_args,
|
||||
)
|
||||
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
|
||||
|
||||
NODE1 = "pve"
|
||||
NODE2 = "pve2"
|
||||
RAW_CLUSTER_OUTPUT = [
|
||||
{
|
||||
"cpu": 0.174069059487628,
|
||||
"disk": 0,
|
||||
"diskread": 6656,
|
||||
"diskwrite": 0,
|
||||
"id": "qemu/100",
|
||||
"maxcpu": 1,
|
||||
"maxdisk": 34359738368,
|
||||
"maxmem": 4294967296,
|
||||
"mem": 35304543,
|
||||
"name": "pxe.home.arpa",
|
||||
"netin": 416956,
|
||||
"netout": 17330,
|
||||
"node": NODE1,
|
||||
"status": "running",
|
||||
"template": 0,
|
||||
"type": "qemu",
|
||||
"uptime": 669,
|
||||
"vmid": 100,
|
||||
},
|
||||
{
|
||||
"cpu": 0,
|
||||
"disk": 0,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"id": "qemu/101",
|
||||
"maxcpu": 1,
|
||||
"maxdisk": 0,
|
||||
"maxmem": 536870912,
|
||||
"mem": 0,
|
||||
"name": "test1",
|
||||
"netin": 0,
|
||||
"netout": 0,
|
||||
"node": NODE2,
|
||||
"pool": "pool1",
|
||||
"status": "stopped",
|
||||
"template": 0,
|
||||
"type": "qemu",
|
||||
"uptime": 0,
|
||||
"vmid": 101,
|
||||
},
|
||||
{
|
||||
"cpu": 0,
|
||||
"disk": 352190464,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"id": "lxc/102",
|
||||
"maxcpu": 2,
|
||||
"maxdisk": 10737418240,
|
||||
"maxmem": 536870912,
|
||||
"mem": 28192768,
|
||||
"name": "test-lxc.home.arpa",
|
||||
"netin": 102757,
|
||||
"netout": 446,
|
||||
"node": NODE1,
|
||||
"status": "running",
|
||||
"template": 0,
|
||||
"type": "lxc",
|
||||
"uptime": 161,
|
||||
"vmid": 102,
|
||||
},
|
||||
{
|
||||
"cpu": 0,
|
||||
"disk": 0,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"id": "lxc/103",
|
||||
"maxcpu": 2,
|
||||
"maxdisk": 10737418240,
|
||||
"maxmem": 536870912,
|
||||
"mem": 0,
|
||||
"name": "test1-lxc.home.arpa",
|
||||
"netin": 0,
|
||||
"netout": 0,
|
||||
"node": NODE2,
|
||||
"pool": "pool1",
|
||||
"status": "stopped",
|
||||
"template": 0,
|
||||
"type": "lxc",
|
||||
"uptime": 0,
|
||||
"vmid": 103,
|
||||
},
|
||||
{
|
||||
"cpu": 0,
|
||||
"disk": 0,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"id": "lxc/104",
|
||||
"maxcpu": 2,
|
||||
"maxdisk": 10737418240,
|
||||
"maxmem": 536870912,
|
||||
"mem": 0,
|
||||
"name": "test-lxc.home.arpa",
|
||||
"netin": 0,
|
||||
"netout": 0,
|
||||
"node": NODE2,
|
||||
"pool": "pool1",
|
||||
"status": "stopped",
|
||||
"template": 0,
|
||||
"type": "lxc",
|
||||
"uptime": 0,
|
||||
"vmid": 104,
|
||||
},
|
||||
{
|
||||
"cpu": 0,
|
||||
"disk": 0,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"id": "lxc/105",
|
||||
"maxcpu": 2,
|
||||
"maxdisk": 10737418240,
|
||||
"maxmem": 536870912,
|
||||
"mem": 0,
|
||||
"name": "",
|
||||
"netin": 0,
|
||||
"netout": 0,
|
||||
"node": NODE2,
|
||||
"pool": "pool1",
|
||||
"status": "stopped",
|
||||
"template": 0,
|
||||
"type": "lxc",
|
||||
"uptime": 0,
|
||||
"vmid": 105,
|
||||
},
|
||||
]
|
||||
RAW_LXC_OUTPUT = [
|
||||
{
|
||||
"cpu": 0,
|
||||
"cpus": 2,
|
||||
"disk": 0,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"maxdisk": 10737418240,
|
||||
"maxmem": 536870912,
|
||||
"maxswap": 536870912,
|
||||
"mem": 0,
|
||||
"name": "test1-lxc.home.arpa",
|
||||
"netin": 0,
|
||||
"netout": 0,
|
||||
"status": "stopped",
|
||||
"swap": 0,
|
||||
"type": "lxc",
|
||||
"uptime": 0,
|
||||
"vmid": "103",
|
||||
},
|
||||
{
|
||||
"cpu": 0,
|
||||
"cpus": 2,
|
||||
"disk": 352190464,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"maxdisk": 10737418240,
|
||||
"maxmem": 536870912,
|
||||
"maxswap": 536870912,
|
||||
"mem": 28192768,
|
||||
"name": "test-lxc.home.arpa",
|
||||
"netin": 102757,
|
||||
"netout": 446,
|
||||
"pid": 4076752,
|
||||
"status": "running",
|
||||
"swap": 0,
|
||||
"type": "lxc",
|
||||
"uptime": 161,
|
||||
"vmid": "102",
|
||||
},
|
||||
{
|
||||
"cpu": 0,
|
||||
"cpus": 2,
|
||||
"disk": 0,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"maxdisk": 10737418240,
|
||||
"maxmem": 536870912,
|
||||
"maxswap": 536870912,
|
||||
"mem": 0,
|
||||
"name": "test-lxc.home.arpa",
|
||||
"netin": 0,
|
||||
"netout": 0,
|
||||
"status": "stopped",
|
||||
"swap": 0,
|
||||
"type": "lxc",
|
||||
"uptime": 0,
|
||||
"vmid": "104",
|
||||
},
|
||||
{
|
||||
"cpu": 0,
|
||||
"cpus": 2,
|
||||
"disk": 0,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"maxdisk": 10737418240,
|
||||
"maxmem": 536870912,
|
||||
"maxswap": 536870912,
|
||||
"mem": 0,
|
||||
"name": "",
|
||||
"netin": 0,
|
||||
"netout": 0,
|
||||
"status": "stopped",
|
||||
"swap": 0,
|
||||
"type": "lxc",
|
||||
"uptime": 0,
|
||||
"vmid": "105",
|
||||
},
|
||||
]
|
||||
RAW_QEMU_OUTPUT = [
|
||||
{
|
||||
"cpu": 0,
|
||||
"cpus": 1,
|
||||
"disk": 0,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"maxdisk": 0,
|
||||
"maxmem": 536870912,
|
||||
"mem": 0,
|
||||
"name": "test1",
|
||||
"netin": 0,
|
||||
"netout": 0,
|
||||
"status": "stopped",
|
||||
"uptime": 0,
|
||||
"vmid": 101,
|
||||
},
|
||||
{
|
||||
"cpu": 0.174069059487628,
|
||||
"cpus": 1,
|
||||
"disk": 0,
|
||||
"diskread": 6656,
|
||||
"diskwrite": 0,
|
||||
"maxdisk": 34359738368,
|
||||
"maxmem": 4294967296,
|
||||
"mem": 35304543,
|
||||
"name": "pxe.home.arpa",
|
||||
"netin": 416956,
|
||||
"netout": 17330,
|
||||
"pid": 4076688,
|
||||
"status": "running",
|
||||
"uptime": 669,
|
||||
"vmid": 100,
|
||||
},
|
||||
]
|
||||
EXPECTED_VMS_OUTPUT = [
|
||||
{
|
||||
"cpu": 0.174069059487628,
|
||||
"cpus": 1,
|
||||
"disk": 0,
|
||||
"diskread": 6656,
|
||||
"diskwrite": 0,
|
||||
"id": "qemu/100",
|
||||
"maxcpu": 1,
|
||||
"maxdisk": 34359738368,
|
||||
"maxmem": 4294967296,
|
||||
"mem": 35304543,
|
||||
"name": "pxe.home.arpa",
|
||||
"netin": 416956,
|
||||
"netout": 17330,
|
||||
"node": NODE1,
|
||||
"pid": 4076688,
|
||||
"status": "running",
|
||||
"template": False,
|
||||
"type": "qemu",
|
||||
"uptime": 669,
|
||||
"vmid": 100,
|
||||
},
|
||||
{
|
||||
"cpu": 0,
|
||||
"cpus": 1,
|
||||
"disk": 0,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"id": "qemu/101",
|
||||
"maxcpu": 1,
|
||||
"maxdisk": 0,
|
||||
"maxmem": 536870912,
|
||||
"mem": 0,
|
||||
"name": "test1",
|
||||
"netin": 0,
|
||||
"netout": 0,
|
||||
"node": NODE2,
|
||||
"pool": "pool1",
|
||||
"status": "stopped",
|
||||
"template": False,
|
||||
"type": "qemu",
|
||||
"uptime": 0,
|
||||
"vmid": 101,
|
||||
},
|
||||
{
|
||||
"cpu": 0,
|
||||
"cpus": 2,
|
||||
"disk": 352190464,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"id": "lxc/102",
|
||||
"maxcpu": 2,
|
||||
"maxdisk": 10737418240,
|
||||
"maxmem": 536870912,
|
||||
"maxswap": 536870912,
|
||||
"mem": 28192768,
|
||||
"name": "test-lxc.home.arpa",
|
||||
"netin": 102757,
|
||||
"netout": 446,
|
||||
"node": NODE1,
|
||||
"pid": 4076752,
|
||||
"status": "running",
|
||||
"swap": 0,
|
||||
"template": False,
|
||||
"type": "lxc",
|
||||
"uptime": 161,
|
||||
"vmid": 102,
|
||||
},
|
||||
{
|
||||
"cpu": 0,
|
||||
"cpus": 2,
|
||||
"disk": 0,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"id": "lxc/103",
|
||||
"maxcpu": 2,
|
||||
"maxdisk": 10737418240,
|
||||
"maxmem": 536870912,
|
||||
"maxswap": 536870912,
|
||||
"mem": 0,
|
||||
"name": "test1-lxc.home.arpa",
|
||||
"netin": 0,
|
||||
"netout": 0,
|
||||
"node": NODE2,
|
||||
"pool": "pool1",
|
||||
"status": "stopped",
|
||||
"swap": 0,
|
||||
"template": False,
|
||||
"type": "lxc",
|
||||
"uptime": 0,
|
||||
"vmid": 103,
|
||||
},
|
||||
{
|
||||
"cpu": 0,
|
||||
"cpus": 2,
|
||||
"disk": 0,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"id": "lxc/104",
|
||||
"maxcpu": 2,
|
||||
"maxdisk": 10737418240,
|
||||
"maxmem": 536870912,
|
||||
"maxswap": 536870912,
|
||||
"mem": 0,
|
||||
"name": "test-lxc.home.arpa",
|
||||
"netin": 0,
|
||||
"netout": 0,
|
||||
"node": NODE2,
|
||||
"pool": "pool1",
|
||||
"status": "stopped",
|
||||
"swap": 0,
|
||||
"template": False,
|
||||
"type": "lxc",
|
||||
"uptime": 0,
|
||||
"vmid": 104,
|
||||
},
|
||||
{
|
||||
"cpu": 0,
|
||||
"cpus": 2,
|
||||
"disk": 0,
|
||||
"diskread": 0,
|
||||
"diskwrite": 0,
|
||||
"id": "lxc/105",
|
||||
"maxcpu": 2,
|
||||
"maxdisk": 10737418240,
|
||||
"maxmem": 536870912,
|
||||
"maxswap": 536870912,
|
||||
"mem": 0,
|
||||
"name": "",
|
||||
"netin": 0,
|
||||
"netout": 0,
|
||||
"node": NODE2,
|
||||
"pool": "pool1",
|
||||
"status": "stopped",
|
||||
"swap": 0,
|
||||
"template": False,
|
||||
"type": "lxc",
|
||||
"uptime": 0,
|
||||
"vmid": 105,
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
def get_module_args(type="all", node=None, vmid=None, name=None, config="none"):
|
||||
return {
|
||||
"api_host": "host",
|
||||
"api_user": "user",
|
||||
"api_password": "password",
|
||||
"node": node,
|
||||
"type": type,
|
||||
"vmid": vmid,
|
||||
"name": name,
|
||||
"config": config,
|
||||
}
|
||||
|
||||
|
||||
class TestProxmoxVmInfoModule(ModuleTestCase):
|
||||
def setUp(self):
|
||||
super(TestProxmoxVmInfoModule, self).setUp()
|
||||
proxmox_utils.HAS_PROXMOXER = True
|
||||
self.module = proxmox_vm_info
|
||||
self.connect_mock = patch(
|
||||
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect",
|
||||
).start()
|
||||
self.connect_mock.return_value.nodes.return_value.lxc.return_value.get.return_value = (
|
||||
RAW_LXC_OUTPUT
|
||||
)
|
||||
self.connect_mock.return_value.nodes.return_value.qemu.return_value.get.return_value = (
|
||||
RAW_QEMU_OUTPUT
|
||||
)
|
||||
self.connect_mock.return_value.cluster.return_value.resources.return_value.get.return_value = (
|
||||
RAW_CLUSTER_OUTPUT
|
||||
)
|
||||
self.connect_mock.return_value.nodes.get.return_value = [{"node": NODE1}]
|
||||
|
||||
def tearDown(self):
|
||||
self.connect_mock.stop()
|
||||
super(TestProxmoxVmInfoModule, self).tearDown()
|
||||
|
||||
def test_module_fail_when_required_args_missing(self):
|
||||
with pytest.raises(AnsibleFailJson) as exc_info:
|
||||
with set_module_args({}):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["msg"] == "missing required arguments: api_host, api_user"
|
||||
|
||||
def test_get_lxc_vms_information(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
with set_module_args(get_module_args(type="lxc")):
|
||||
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["type"] == "lxc"]
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["changed"] is False
|
||||
assert result["proxmox_vms"] == expected_output
|
||||
|
||||
def test_get_qemu_vms_information(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
with set_module_args(get_module_args(type="qemu")):
|
||||
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["type"] == "qemu"]
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["proxmox_vms"] == expected_output
|
||||
|
||||
def test_get_all_vms_information(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
with set_module_args(get_module_args()):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["proxmox_vms"] == EXPECTED_VMS_OUTPUT
|
||||
|
||||
def test_vmid_is_converted_to_int(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
with set_module_args(get_module_args(type="lxc")):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert isinstance(result["proxmox_vms"][0]["vmid"], int)
|
||||
|
||||
def test_get_specific_lxc_vm_information(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
vmid = 102
|
||||
expected_output = [
|
||||
vm
|
||||
for vm in EXPECTED_VMS_OUTPUT
|
||||
if vm["vmid"] == vmid and vm["type"] == "lxc"
|
||||
]
|
||||
with set_module_args(get_module_args(type="lxc", vmid=vmid)):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["proxmox_vms"] == expected_output
|
||||
assert len(result["proxmox_vms"]) == 1
|
||||
|
||||
def test_get_specific_qemu_vm_information(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
vmid = 100
|
||||
expected_output = [
|
||||
vm
|
||||
for vm in EXPECTED_VMS_OUTPUT
|
||||
if vm["vmid"] == vmid and vm["type"] == "qemu"
|
||||
]
|
||||
with set_module_args(get_module_args(type="qemu", vmid=vmid)):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["proxmox_vms"] == expected_output
|
||||
assert len(result["proxmox_vms"]) == 1
|
||||
|
||||
def test_get_specific_vm_information(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
vmid = 100
|
||||
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["vmid"] == vmid]
|
||||
with set_module_args(get_module_args(type="all", vmid=vmid)):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["proxmox_vms"] == expected_output
|
||||
assert len(result["proxmox_vms"]) == 1
|
||||
|
||||
def test_get_specific_vm_information_by_using_name(self):
|
||||
name = "test1-lxc.home.arpa"
|
||||
self.connect_mock.return_value.cluster.resources.get.return_value = [
|
||||
{"name": name, "vmid": "103"}
|
||||
]
|
||||
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["name"] == name]
|
||||
with set_module_args(get_module_args(type="all", name=name)):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["proxmox_vms"] == expected_output
|
||||
assert len(result["proxmox_vms"]) == 1
|
||||
|
||||
def test_get_multiple_vms_with_the_same_name(self):
|
||||
name = "test-lxc.home.arpa"
|
||||
self.connect_mock.return_value.cluster.resources.get.return_value = [
|
||||
{"name": name, "vmid": "102"},
|
||||
{"name": name, "vmid": "104"},
|
||||
]
|
||||
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["name"] == name]
|
||||
with set_module_args(get_module_args(type="all", name=name)):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["proxmox_vms"] == expected_output
|
||||
assert len(result["proxmox_vms"]) == 2
|
||||
|
||||
def test_get_vm_with_an_empty_name(self):
|
||||
name = ""
|
||||
self.connect_mock.return_value.cluster.resources.get.return_value = [
|
||||
{"name": name, "vmid": "105"},
|
||||
]
|
||||
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["name"] == name]
|
||||
with set_module_args(get_module_args(type="all", name=name)):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["proxmox_vms"] == expected_output
|
||||
assert len(result["proxmox_vms"]) == 1
|
||||
|
||||
def test_get_all_lxc_vms_from_specific_node(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
expected_output = [
|
||||
vm
|
||||
for vm in EXPECTED_VMS_OUTPUT
|
||||
if vm["node"] == NODE1 and vm["type"] == "lxc"
|
||||
]
|
||||
with set_module_args(get_module_args(type="lxc", node=NODE1)):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["proxmox_vms"] == expected_output
|
||||
assert len(result["proxmox_vms"]) == 1
|
||||
|
||||
def test_get_all_qemu_vms_from_specific_node(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
expected_output = [
|
||||
vm
|
||||
for vm in EXPECTED_VMS_OUTPUT
|
||||
if vm["node"] == NODE1 and vm["type"] == "qemu"
|
||||
]
|
||||
with set_module_args(get_module_args(type="qemu", node=NODE1)):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["proxmox_vms"] == expected_output
|
||||
assert len(result["proxmox_vms"]) == 1
|
||||
|
||||
def test_get_all_vms_from_specific_node(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["node"] == NODE1]
|
||||
with set_module_args(get_module_args(node=NODE1)):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["proxmox_vms"] == expected_output
|
||||
assert len(result["proxmox_vms"]) == 2
|
||||
|
||||
def test_module_returns_empty_list_when_vm_does_not_exist(self):
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
vmid = 200
|
||||
with set_module_args(get_module_args(type="all", vmid=vmid)):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["proxmox_vms"] == []
|
||||
|
||||
def test_module_fail_when_qemu_request_fails(self):
|
||||
self.connect_mock.return_value.nodes.return_value.qemu.return_value.get.side_effect = IOError(
|
||||
"Some mocked connection error."
|
||||
)
|
||||
with pytest.raises(AnsibleFailJson) as exc_info:
|
||||
with set_module_args(get_module_args(type="qemu")):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert "Failed to retrieve QEMU VMs information:" in result["msg"]
|
||||
|
||||
def test_module_fail_when_lxc_request_fails(self):
|
||||
self.connect_mock.return_value.nodes.return_value.lxc.return_value.get.side_effect = IOError(
|
||||
"Some mocked connection error."
|
||||
)
|
||||
with pytest.raises(AnsibleFailJson) as exc_info:
|
||||
with set_module_args(get_module_args(type="lxc")):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert "Failed to retrieve LXC VMs information:" in result["msg"]
|
||||
|
||||
def test_module_fail_when_cluster_resources_request_fails(self):
|
||||
self.connect_mock.return_value.cluster.return_value.resources.return_value.get.side_effect = IOError(
|
||||
"Some mocked connection error."
|
||||
)
|
||||
with pytest.raises(AnsibleFailJson) as exc_info:
|
||||
with set_module_args(get_module_args()):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert (
|
||||
"Failed to retrieve VMs information from cluster resources:"
|
||||
in result["msg"]
|
||||
)
|
||||
|
||||
def test_module_fail_when_node_does_not_exist(self):
|
||||
with pytest.raises(AnsibleFailJson) as exc_info:
|
||||
with set_module_args(get_module_args(type="all", node="NODE3")):
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["msg"] == "Node NODE3 doesn't exist in PVE cluster"
|
||||
|
||||
def test_call_to_get_vmid_is_not_used_when_vmid_provided(self):
|
||||
with patch(
|
||||
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible.get_vmid"
|
||||
) as get_vmid_mock:
|
||||
with pytest.raises(AnsibleExitJson):
|
||||
vmid = 100
|
||||
with set_module_args(
|
||||
get_module_args(type="all", vmid=vmid, name="something")
|
||||
):
|
||||
self.module.main()
|
||||
|
||||
assert get_vmid_mock.call_count == 0
|
||||
|
||||
def test_config_returned_when_specified_qemu_vm_with_config(self):
|
||||
config_vm_value = {
|
||||
'scsi0': 'local-lvm:vm-101-disk-0,iothread=1,size=32G',
|
||||
'net0': 'virtio=4E:79:9F:A8:EE:E4,bridge=vmbr0,firewall=1',
|
||||
'scsihw': 'virtio-scsi-single',
|
||||
'cores': 1,
|
||||
'name': 'test1',
|
||||
'ostype': 'l26',
|
||||
'boot': 'order=scsi0;ide2;net0',
|
||||
'memory': 2048,
|
||||
'sockets': 1,
|
||||
}
|
||||
(self.connect_mock.return_value.nodes.return_value.qemu.return_value.
|
||||
config.return_value.get.return_value) = config_vm_value
|
||||
|
||||
with pytest.raises(AnsibleExitJson) as exc_info:
|
||||
vmid = 101
|
||||
with set_module_args(get_module_args(
|
||||
type="qemu",
|
||||
vmid=vmid,
|
||||
config="current",
|
||||
)):
|
||||
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["vmid"] == vmid]
|
||||
expected_output[0]["config"] = config_vm_value
|
||||
self.module.main()
|
||||
|
||||
result = exc_info.value.args[0]
|
||||
assert result["proxmox_vms"] == expected_output
|
Loading…
Add table
Add a link
Reference in a new issue