Remove proxmox content (#10110)
Some checks failed
EOL CI / EOL Sanity (Ⓐ2.16) (push) Has been cancelled
EOL CI / EOL Units (Ⓐ2.16+py2.7) (push) Has been cancelled
EOL CI / EOL Units (Ⓐ2.16+py3.11) (push) Has been cancelled
EOL CI / EOL Units (Ⓐ2.16+py3.6) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+alpine3+py:azp/posix/1/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+alpine3+py:azp/posix/2/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+alpine3+py:azp/posix/3/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+fedora38+py:azp/posix/1/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+fedora38+py:azp/posix/2/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+fedora38+py:azp/posix/3/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+opensuse15+py:azp/posix/1/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+opensuse15+py:azp/posix/2/) (push) Has been cancelled
EOL CI / EOL I (Ⓐ2.16+opensuse15+py:azp/posix/3/) (push) Has been cancelled
nox / Run extra sanity tests (push) Has been cancelled

Remove proxmox content.
This commit is contained in:
Felix Fontein 2025-06-08 16:18:16 +02:00 committed by GitHub
commit f2b7bdf293
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
76 changed files with 124 additions and 14629 deletions

View file

@ -1,585 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2024 Nils Stein (@mietzen) <github.nstein@mailbox.org>
# Copyright (c) 2024 Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import (annotations, absolute_import, division, print_function)
__metaclass__ = type
import os
import pytest
from ansible_collections.community.general.plugins.connection.proxmox_pct_remote import authenticity_msg, MyAddPolicy
from ansible_collections.community.general.plugins.module_utils._filelock import FileLock, LockTimeout
from ansible.errors import AnsibleError, AnsibleAuthenticationFailure, AnsibleConnectionFailure
from ansible.module_utils.common.text.converters import to_bytes
from ansible.module_utils.compat.paramiko import paramiko
from ansible.playbook.play_context import PlayContext
from ansible.plugins.loader import connection_loader
from io import StringIO
from pathlib import Path
from unittest.mock import patch, MagicMock, mock_open
@pytest.fixture
def connection():
play_context = PlayContext()
in_stream = StringIO()
conn = connection_loader.get('community.general.proxmox_pct_remote', play_context, in_stream)
conn.set_option('remote_addr', '192.168.1.100')
conn.set_option('remote_user', 'root')
conn.set_option('password', 'password')
return conn
def test_connection_options(connection):
""" Test that connection options are properly set """
assert connection.get_option('remote_addr') == '192.168.1.100'
assert connection.get_option('remote_user') == 'root'
assert connection.get_option('password') == 'password'
def test_authenticity_msg():
""" Test authenticity message formatting """
msg = authenticity_msg('test.host', 'ssh-rsa', 'AA:BB:CC:DD')
assert 'test.host' in msg
assert 'ssh-rsa' in msg
assert 'AA:BB:CC:DD' in msg
def test_missing_host_key(connection):
""" Test MyAddPolicy missing_host_key method """
client = MagicMock()
key = MagicMock()
key.get_fingerprint.return_value = b'fingerprint'
key.get_name.return_value = 'ssh-rsa'
policy = MyAddPolicy(connection)
connection.set_option('host_key_auto_add', True)
policy.missing_host_key(client, 'test.host', key)
assert hasattr(key, '_added_by_ansible_this_time')
connection.set_option('host_key_auto_add', False)
connection.set_option('host_key_checking', False)
policy.missing_host_key(client, 'test.host', key)
connection.set_option('host_key_checking', True)
connection.set_option('host_key_auto_add', False)
connection.set_option('use_persistent_connections', False)
with patch('ansible.utils.display.Display.prompt_until', return_value='yes'):
policy.missing_host_key(client, 'test.host', key)
with patch('ansible.utils.display.Display.prompt_until', return_value='no'):
with pytest.raises(AnsibleError, match='host connection rejected by user'):
policy.missing_host_key(client, 'test.host', key)
def test_set_log_channel(connection):
""" Test setting log channel """
connection._set_log_channel('test_channel')
assert connection._log_channel == 'test_channel'
def test_parse_proxy_command(connection):
""" Test proxy command parsing """
connection.set_option('proxy_command', 'ssh -W %h:%p proxy.example.com')
connection.set_option('remote_addr', 'target.example.com')
connection.set_option('remote_user', 'testuser')
result = connection._parse_proxy_command(port=2222)
assert 'sock' in result
assert isinstance(result['sock'], paramiko.ProxyCommand)
@patch('paramiko.SSHClient')
def test_connect_with_rsa_sha2_disabled(mock_ssh, connection):
""" Test connection with RSA SHA2 algorithms disabled """
connection.set_option('use_rsa_sha2_algorithms', False)
mock_client = MagicMock()
mock_ssh.return_value = mock_client
connection._connect()
call_kwargs = mock_client.connect.call_args[1]
assert 'disabled_algorithms' in call_kwargs
assert 'pubkeys' in call_kwargs['disabled_algorithms']
@patch('paramiko.SSHClient')
def test_connect_with_bad_host_key(mock_ssh, connection):
""" Test connection with bad host key """
mock_client = MagicMock()
mock_ssh.return_value = mock_client
mock_client.connect.side_effect = paramiko.ssh_exception.BadHostKeyException(
'hostname', MagicMock(), MagicMock())
with pytest.raises(AnsibleConnectionFailure, match='host key mismatch'):
connection._connect()
@patch('paramiko.SSHClient')
def test_connect_with_invalid_host_key(mock_ssh, connection):
""" Test connection with bad host key """
connection.set_option('host_key_checking', True)
mock_client = MagicMock()
mock_ssh.return_value = mock_client
mock_client.load_system_host_keys.side_effect = paramiko.hostkeys.InvalidHostKey(
"Bad Line!", Exception('Something crashed!'))
with pytest.raises(AnsibleConnectionFailure, match="Invalid host key: Bad Line!"):
connection._connect()
@patch('paramiko.SSHClient')
def test_connect_success(mock_ssh, connection):
""" Test successful SSH connection establishment """
mock_client = MagicMock()
mock_ssh.return_value = mock_client
connection._connect()
assert mock_client.connect.called
assert connection._connected
@patch('paramiko.SSHClient')
def test_connect_authentication_failure(mock_ssh, connection):
""" Test SSH connection with authentication failure """
mock_client = MagicMock()
mock_ssh.return_value = mock_client
mock_client.connect.side_effect = paramiko.ssh_exception.AuthenticationException('Auth failed')
with pytest.raises(AnsibleAuthenticationFailure):
connection._connect()
def test_any_keys_added(connection):
""" Test checking for added host keys """
connection.ssh = MagicMock()
connection.ssh._host_keys = {
'host1': {
'ssh-rsa': MagicMock(_added_by_ansible_this_time=True),
'ssh-ed25519': MagicMock(_added_by_ansible_this_time=False)
}
}
assert connection._any_keys_added() is True
connection.ssh._host_keys = {
'host1': {
'ssh-rsa': MagicMock(_added_by_ansible_this_time=False)
}
}
assert connection._any_keys_added() is False
@patch('os.path.exists')
@patch('os.stat')
@patch('tempfile.NamedTemporaryFile')
def test_save_ssh_host_keys(mock_tempfile, mock_stat, mock_exists, connection):
""" Test saving SSH host keys """
mock_exists.return_value = True
mock_stat.return_value = MagicMock(st_mode=0o644, st_uid=1000, st_gid=1000)
mock_tempfile.return_value.__enter__.return_value.name = '/tmp/test_keys'
connection.ssh = MagicMock()
connection.ssh._host_keys = {
'host1': {
'ssh-rsa': MagicMock(
get_base64=lambda: 'KEY1',
_added_by_ansible_this_time=True
)
}
}
mock_open_obj = mock_open()
with patch('builtins.open', mock_open_obj):
connection._save_ssh_host_keys('/tmp/test_keys')
mock_open_obj().write.assert_called_with('host1 ssh-rsa KEY1\n')
def test_build_pct_command(connection):
""" Test PCT command building with different users """
connection.set_option('vmid', '100')
cmd = connection._build_pct_command('/bin/sh -c "ls -la"')
assert cmd == '/usr/sbin/pct exec 100 -- /bin/sh -c "ls -la"'
connection.set_option('remote_user', 'user')
connection.set_option('proxmox_become_method', 'sudo')
cmd = connection._build_pct_command('/bin/sh -c "ls -la"')
assert cmd == 'sudo /usr/sbin/pct exec 100 -- /bin/sh -c "ls -la"'
@patch('paramiko.SSHClient')
def test_exec_command_success(mock_ssh, connection):
""" Test successful command execution """
mock_client = MagicMock()
mock_ssh.return_value = mock_client
mock_channel = MagicMock()
mock_transport = MagicMock()
mock_client.get_transport.return_value = mock_transport
mock_transport.open_session.return_value = mock_channel
mock_channel.recv_exit_status.return_value = 0
mock_channel.makefile.return_value = [to_bytes('stdout')]
mock_channel.makefile_stderr.return_value = [to_bytes("")]
connection._connected = True
connection.ssh = mock_client
returncode, stdout, stderr = connection.exec_command('ls -la')
mock_transport.open_session.assert_called_once()
mock_channel.get_pty.assert_called_once()
mock_transport.set_keepalive.assert_called_once_with(5)
@patch('paramiko.SSHClient')
def test_exec_command_pct_not_found(mock_ssh, connection):
""" Test command execution when PCT is not found """
mock_client = MagicMock()
mock_ssh.return_value = mock_client
mock_channel = MagicMock()
mock_transport = MagicMock()
mock_client.get_transport.return_value = mock_transport
mock_transport.open_session.return_value = mock_channel
mock_channel.recv_exit_status.return_value = 1
mock_channel.makefile.return_value = [to_bytes("")]
mock_channel.makefile_stderr.return_value = [to_bytes('pct: not found')]
connection._connected = True
connection.ssh = mock_client
with pytest.raises(AnsibleError, match='pct not found in path of host'):
connection.exec_command('ls -la')
@patch('paramiko.SSHClient')
def test_exec_command_session_open_failure(mock_ssh, connection):
""" Test exec_command when session opening fails """
mock_client = MagicMock()
mock_transport = MagicMock()
mock_transport.open_session.side_effect = Exception('Failed to open session')
mock_client.get_transport.return_value = mock_transport
connection._connected = True
connection.ssh = mock_client
with pytest.raises(AnsibleConnectionFailure, match='Failed to open session'):
connection.exec_command('test command')
@patch('paramiko.SSHClient')
def test_exec_command_with_privilege_escalation(mock_ssh, connection):
""" Test exec_command with privilege escalation """
mock_client = MagicMock()
mock_channel = MagicMock()
mock_transport = MagicMock()
mock_client.get_transport.return_value = mock_transport
mock_transport.open_session.return_value = mock_channel
connection._connected = True
connection.ssh = mock_client
connection.become = MagicMock()
connection.become.expect_prompt.return_value = True
connection.become.check_success.return_value = False
connection.become.check_password_prompt.return_value = True
connection.become.get_option.return_value = 'sudo_password'
mock_channel.recv.return_value = b'[sudo] password:'
mock_channel.recv_exit_status.return_value = 0
mock_channel.makefile.return_value = [b""]
mock_channel.makefile_stderr.return_value = [b""]
returncode, stdout, stderr = connection.exec_command('sudo test command')
mock_channel.sendall.assert_called_once_with(b'sudo_password\n')
def test_put_file(connection):
""" Test putting a file to the remote system """
connection.exec_command = MagicMock()
connection.exec_command.return_value = (0, b"", b"")
with patch('builtins.open', create=True) as mock_open:
mock_open.return_value.__enter__.return_value.read.return_value = b'test content'
connection.put_file('/local/path', '/remote/path')
connection.exec_command.assert_called_once_with("/bin/sh -c 'cat > /remote/path'", in_data=b'test content', sudoable=False)
@patch('paramiko.SSHClient')
def test_put_file_general_error(mock_ssh, connection):
""" Test put_file with general error """
mock_client = MagicMock()
mock_ssh.return_value = mock_client
mock_channel = MagicMock()
mock_transport = MagicMock()
mock_client.get_transport.return_value = mock_transport
mock_transport.open_session.return_value = mock_channel
mock_channel.recv_exit_status.return_value = 1
mock_channel.makefile.return_value = [to_bytes("")]
mock_channel.makefile_stderr.return_value = [to_bytes('Some error')]
connection._connected = True
connection.ssh = mock_client
with pytest.raises(AnsibleError, match='error occurred while putting file from /remote/path to /local/path'):
connection.put_file('/remote/path', '/local/path')
@patch('paramiko.SSHClient')
def test_put_file_cat_not_found(mock_ssh, connection):
""" Test command execution when cat is not found """
mock_client = MagicMock()
mock_ssh.return_value = mock_client
mock_channel = MagicMock()
mock_transport = MagicMock()
mock_client.get_transport.return_value = mock_transport
mock_transport.open_session.return_value = mock_channel
mock_channel.recv_exit_status.return_value = 1
mock_channel.makefile.return_value = [to_bytes("")]
mock_channel.makefile_stderr.return_value = [to_bytes('cat: not found')]
connection._connected = True
connection.ssh = mock_client
with pytest.raises(AnsibleError, match='cat not found in path of container:'):
connection.fetch_file('/remote/path', '/local/path')
def test_fetch_file(connection):
""" Test fetching a file from the remote system """
connection.exec_command = MagicMock()
connection.exec_command.return_value = (0, b'test content', b"")
with patch('builtins.open', create=True) as mock_open:
connection.fetch_file('/remote/path', '/local/path')
connection.exec_command.assert_called_once_with("/bin/sh -c 'cat /remote/path'", sudoable=False)
mock_open.assert_called_with('/local/path', 'wb')
@patch('paramiko.SSHClient')
def test_fetch_file_general_error(mock_ssh, connection):
""" Test fetch_file with general error """
mock_client = MagicMock()
mock_ssh.return_value = mock_client
mock_channel = MagicMock()
mock_transport = MagicMock()
mock_client.get_transport.return_value = mock_transport
mock_transport.open_session.return_value = mock_channel
mock_channel.recv_exit_status.return_value = 1
mock_channel.makefile.return_value = [to_bytes("")]
mock_channel.makefile_stderr.return_value = [to_bytes('Some error')]
connection._connected = True
connection.ssh = mock_client
with pytest.raises(AnsibleError, match='error occurred while fetching file from /remote/path to /local/path'):
connection.fetch_file('/remote/path', '/local/path')
@patch('paramiko.SSHClient')
def test_fetch_file_cat_not_found(mock_ssh, connection):
""" Test command execution when cat is not found """
mock_client = MagicMock()
mock_ssh.return_value = mock_client
mock_channel = MagicMock()
mock_transport = MagicMock()
mock_client.get_transport.return_value = mock_transport
mock_transport.open_session.return_value = mock_channel
mock_channel.recv_exit_status.return_value = 1
mock_channel.makefile.return_value = [to_bytes("")]
mock_channel.makefile_stderr.return_value = [to_bytes('cat: not found')]
connection._connected = True
connection.ssh = mock_client
with pytest.raises(AnsibleError, match='cat not found in path of container:'):
connection.fetch_file('/remote/path', '/local/path')
def test_close(connection):
""" Test connection close """
mock_ssh = MagicMock()
connection.ssh = mock_ssh
connection._connected = True
connection.close()
assert mock_ssh.close.called, 'ssh.close was not called'
assert not connection._connected, 'self._connected is still True'
def test_close_with_lock_file(connection):
""" Test close method with lock file creation """
connection._any_keys_added = MagicMock(return_value=True)
connection._connected = True
connection.keyfile = '/tmp/pct-remote-known_hosts-test'
connection.set_option('host_key_checking', True)
connection.set_option('lock_file_timeout', 5)
connection.set_option('record_host_keys', True)
connection.ssh = MagicMock()
lock_file_path = os.path.join(os.path.dirname(connection.keyfile),
f'ansible-{os.path.basename(connection.keyfile)}.lock')
try:
connection.close()
assert os.path.exists(lock_file_path), 'Lock file was not created'
lock_stat = os.stat(lock_file_path)
assert lock_stat.st_mode & 0o777 == 0o600, 'Incorrect lock file permissions'
finally:
Path(lock_file_path).unlink(missing_ok=True)
@patch('pathlib.Path.unlink')
@patch('os.path.exists')
def test_close_lock_file_time_out_error_handling(mock_exists, mock_unlink, connection):
""" Test close method with lock file timeout error """
connection._any_keys_added = MagicMock(return_value=True)
connection._connected = True
connection._save_ssh_host_keys = MagicMock()
connection.keyfile = '/tmp/pct-remote-known_hosts-test'
connection.set_option('host_key_checking', True)
connection.set_option('lock_file_timeout', 5)
connection.set_option('record_host_keys', True)
connection.ssh = MagicMock()
mock_exists.return_value = False
matcher = f'writing lock file for {connection.keyfile} ran in to the timeout of {connection.get_option("lock_file_timeout")}s'
with pytest.raises(AnsibleError, match=matcher):
with patch('os.getuid', return_value=1000), \
patch('os.getgid', return_value=1000), \
patch('os.chmod'), patch('os.chown'), \
patch('os.rename'), \
patch.object(FileLock, 'lock_file', side_effect=LockTimeout()):
connection.close()
@patch('ansible_collections.community.general.plugins.module_utils._filelock.FileLock.lock_file')
@patch('tempfile.NamedTemporaryFile')
@patch('os.chmod')
@patch('os.chown')
@patch('os.rename')
@patch('os.path.exists')
def test_tempfile_creation_and_move(mock_exists, mock_rename, mock_chown, mock_chmod, mock_tempfile, mock_lock_file, connection):
""" Test tempfile creation and move during close """
connection._any_keys_added = MagicMock(return_value=True)
connection._connected = True
connection._save_ssh_host_keys = MagicMock()
connection.keyfile = '/tmp/pct-remote-known_hosts-test'
connection.set_option('host_key_checking', True)
connection.set_option('lock_file_timeout', 5)
connection.set_option('record_host_keys', True)
connection.ssh = MagicMock()
mock_exists.return_value = False
mock_lock_file_instance = MagicMock()
mock_lock_file.return_value = mock_lock_file_instance
mock_lock_file_instance.__enter__.return_value = None
mock_tempfile_instance = MagicMock()
mock_tempfile_instance.name = '/tmp/mock_tempfile'
mock_tempfile.return_value.__enter__.return_value = mock_tempfile_instance
mode = 0o644
uid = 1000
gid = 1000
key_dir = os.path.dirname(connection.keyfile)
with patch('os.getuid', return_value=uid), patch('os.getgid', return_value=gid):
connection.close()
connection._save_ssh_host_keys.assert_called_once_with('/tmp/mock_tempfile')
mock_chmod.assert_called_once_with('/tmp/mock_tempfile', mode)
mock_chown.assert_called_once_with('/tmp/mock_tempfile', uid, gid)
mock_rename.assert_called_once_with('/tmp/mock_tempfile', connection.keyfile)
mock_tempfile.assert_called_once_with(dir=key_dir, delete=False)
@patch('pathlib.Path.unlink')
@patch('tempfile.NamedTemporaryFile')
@patch('ansible_collections.community.general.plugins.module_utils._filelock.FileLock.lock_file')
@patch('os.path.exists')
def test_close_tempfile_error_handling(mock_exists, mock_lock_file, mock_tempfile, mock_unlink, connection):
""" Test tempfile creation error """
connection._any_keys_added = MagicMock(return_value=True)
connection._connected = True
connection._save_ssh_host_keys = MagicMock()
connection.keyfile = '/tmp/pct-remote-known_hosts-test'
connection.set_option('host_key_checking', True)
connection.set_option('lock_file_timeout', 5)
connection.set_option('record_host_keys', True)
connection.ssh = MagicMock()
mock_exists.return_value = False
mock_lock_file_instance = MagicMock()
mock_lock_file.return_value = mock_lock_file_instance
mock_lock_file_instance.__enter__.return_value = None
mock_tempfile_instance = MagicMock()
mock_tempfile_instance.name = '/tmp/mock_tempfile'
mock_tempfile.return_value.__enter__.return_value = mock_tempfile_instance
with pytest.raises(AnsibleError, match='error occurred while writing SSH host keys!'):
with patch.object(os, 'chmod', side_effect=Exception()):
connection.close()
mock_unlink.assert_called_with(missing_ok=True)
@patch('ansible_collections.community.general.plugins.module_utils._filelock.FileLock.lock_file')
@patch('os.path.exists')
def test_close_with_invalid_host_key(mock_exists, mock_lock_file, connection):
""" Test load_system_host_keys on close with InvalidHostKey error """
connection._any_keys_added = MagicMock(return_value=True)
connection._connected = True
connection._save_ssh_host_keys = MagicMock()
connection.keyfile = '/tmp/pct-remote-known_hosts-test'
connection.set_option('host_key_checking', True)
connection.set_option('lock_file_timeout', 5)
connection.set_option('record_host_keys', True)
connection.ssh = MagicMock()
connection.ssh.load_system_host_keys.side_effect = paramiko.hostkeys.InvalidHostKey(
"Bad Line!", Exception('Something crashed!'))
mock_exists.return_value = False
mock_lock_file_instance = MagicMock()
mock_lock_file.return_value = mock_lock_file_instance
mock_lock_file_instance.__enter__.return_value = None
with pytest.raises(AnsibleConnectionFailure, match="Invalid host key: Bad Line!"):
connection.close()
def test_reset(connection):
""" Test connection reset """
connection._connected = True
connection.close = MagicMock()
connection._connect = MagicMock()
connection.reset()
connection.close.assert_called_once()
connection._connect.assert_called_once()
connection._connected = False
connection.reset()
assert connection.close.call_count == 1

View file

@ -1,786 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright (c) 2020, Jeffrey van Pelt <jeff@vanpelt.one>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
#
# The API responses used in these tests were recorded from PVE version 6.2.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
from ansible.inventory.data import InventoryData
from ansible_collections.community.general.plugins.inventory.proxmox import InventoryModule
@pytest.fixture(scope="module")
def inventory():
r = InventoryModule()
r.inventory = InventoryData()
return r
def test_verify_file(tmp_path, inventory):
file = tmp_path / "foobar.proxmox.yml"
file.touch()
assert inventory.verify_file(str(file)) is True
def test_verify_file_bad_config(inventory):
assert inventory.verify_file('foobar.proxmox.yml') is False
def get_auth():
return True
# NOTE: when updating/adding replies to this function,
# be sure to only add only the _contents_ of the 'data' dict in the API reply
def get_json(url, ignore_errors=None):
if url == "https://localhost:8006/api2/json/nodes":
# _get_nodes
return [{"type": "node",
"cpu": 0.01,
"maxdisk": 500,
"mem": 500,
"node": "testnode",
"id": "node/testnode",
"maxcpu": 1,
"status": "online",
"ssl_fingerprint": "xx",
"disk": 1000,
"maxmem": 1000,
"uptime": 10000,
"level": ""},
{"type": "node",
"node": "testnode2",
"id": "node/testnode2",
"status": "offline",
"ssl_fingerprint": "yy"}]
elif url == "https://localhost:8006/api2/json/pools":
# _get_pools
return [{"poolid": "test"}]
elif url == "https://localhost:8006/api2/json/nodes/testnode/lxc":
# _get_lxc_per_node
return [{"cpus": 1,
"name": "test-lxc",
"cpu": 0.01,
"diskwrite": 0,
"lock": "",
"maxmem": 1000,
"template": "",
"diskread": 0,
"mem": 1000,
"swap": 0,
"type": "lxc",
"maxswap": 0,
"maxdisk": "1000",
"netout": 1000,
"pid": "1000",
"netin": 1000,
"status": "running",
"vmid": "100",
"disk": "1000",
"uptime": 1000}]
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu":
# _get_qemu_per_node
return [{"name": "test-qemu",
"cpus": 1,
"mem": 1000,
"template": "",
"diskread": 0,
"cpu": 0.01,
"maxmem": 1000,
"diskwrite": 0,
"netout": 1000,
"pid": "1001",
"netin": 1000,
"maxdisk": 1000,
"vmid": "101",
"uptime": 1000,
"disk": 0,
"status": "running"},
{"name": "test-qemu-windows",
"cpus": 1,
"mem": 1000,
"template": "",
"diskread": 0,
"cpu": 0.01,
"maxmem": 1000,
"diskwrite": 0,
"netout": 1000,
"pid": "1001",
"netin": 1000,
"maxdisk": 1000,
"vmid": "102",
"uptime": 1000,
"disk": 0,
"status": "running"},
{"name": "test-qemu-multi-nic",
"cpus": 1,
"mem": 1000,
"template": "",
"diskread": 0,
"cpu": 0.01,
"maxmem": 1000,
"diskwrite": 0,
"netout": 1000,
"pid": "1001",
"netin": 1000,
"maxdisk": 1000,
"vmid": "103",
"uptime": 1000,
"disk": 0,
"status": "running"},
{"name": "test-qemu-template",
"cpus": 1,
"mem": 0,
"template": 1,
"diskread": 0,
"cpu": 0,
"maxmem": 1000,
"diskwrite": 0,
"netout": 0,
"pid": "1001",
"netin": 0,
"maxdisk": 1000,
"vmid": "9001",
"uptime": 0,
"disk": 0,
"status": "stopped"}]
elif url == "https://localhost:8006/api2/json/pools/test":
# _get_members_per_pool
return {"members": [{"uptime": 1000,
"template": 0,
"id": "qemu/101",
"mem": 1000,
"status": "running",
"cpu": 0.01,
"maxmem": 1000,
"diskwrite": 1000,
"name": "test-qemu",
"netout": 1000,
"netin": 1000,
"vmid": 101,
"node": "testnode",
"maxcpu": 1,
"type": "qemu",
"maxdisk": 1000,
"disk": 0,
"diskread": 1000}]}
elif url == "https://localhost:8006/api2/json/nodes/testnode/network":
# _get_node_ip
return [{"families": ["inet"],
"priority": 3,
"active": 1,
"cidr": "10.1.1.2/24",
"iface": "eth0",
"method": "static",
"exists": 1,
"type": "eth",
"netmask": "24",
"gateway": "10.1.1.1",
"address": "10.1.1.2",
"method6": "manual",
"autostart": 1},
{"method6": "manual",
"autostart": 1,
"type": "OVSPort",
"exists": 1,
"method": "manual",
"iface": "eth1",
"ovs_bridge": "vmbr0",
"active": 1,
"families": ["inet"],
"priority": 5,
"ovs_type": "OVSPort"},
{"type": "OVSBridge",
"method": "manual",
"iface": "vmbr0",
"families": ["inet"],
"priority": 4,
"ovs_ports": "eth1",
"ovs_type": "OVSBridge",
"method6": "manual",
"autostart": 1,
"active": 1}]
elif url == "https://localhost:8006/api2/json/nodes/testnode/lxc/100/config":
# _get_vm_config (lxc)
return {
"console": 1,
"rootfs": "local-lvm:vm-100-disk-0,size=4G",
"cmode": "tty",
"description": "A testnode",
"cores": 1,
"hostname": "test-lxc",
"arch": "amd64",
"tty": 2,
"swap": 0,
"cpulimit": "0",
"net0": "name=eth0,bridge=vmbr0,gw=10.1.1.1,hwaddr=FF:FF:FF:FF:FF:FF,ip=10.1.1.3/24,type=veth",
"ostype": "ubuntu",
"digest": "123456789abcdef0123456789abcdef01234567890",
"protection": 0,
"memory": 1000,
"onboot": 0,
"cpuunits": 1024,
"tags": "one, two, three",
}
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/101/config":
# _get_vm_config (qemu)
return {
"tags": "one, two, three",
"cores": 1,
"ide2": "none,media=cdrom",
"memory": 1000,
"kvm": 1,
"digest": "0123456789abcdef0123456789abcdef0123456789",
"description": "A test qemu",
"sockets": 1,
"onboot": 1,
"vmgenid": "ffffffff-ffff-ffff-ffff-ffffffffffff",
"numa": 0,
"bootdisk": "scsi0",
"cpu": "host",
"name": "test-qemu",
"ostype": "l26",
"hotplug": "network,disk,usb",
"scsi0": "local-lvm:vm-101-disk-0,size=8G",
"net0": "virtio=ff:ff:ff:ff:ff:ff,bridge=vmbr0,firewall=1",
"agent": "1,fstrim_cloned_disks=1",
"bios": "seabios",
"ide0": "local-lvm:vm-101-cloudinit,media=cdrom,size=4M",
"boot": "cdn",
"scsihw": "virtio-scsi-pci",
"smbios1": "uuid=ffffffff-ffff-ffff-ffff-ffffffffffff"
}
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/102/config":
# _get_vm_config (qemu)
return {
"numa": 0,
"digest": "460add1531a7068d2ae62d54f67e8fb9493dece9",
"ide2": "none,media=cdrom",
"bootdisk": "sata0",
"name": "test-qemu-windows",
"balloon": 0,
"cpulimit": "4",
"agent": "1",
"cores": 6,
"sata0": "storage:vm-102-disk-0,size=100G",
"memory": 10240,
"smbios1": "uuid=127301fc-0122-48d5-8fc5-c04fa78d8146",
"scsihw": "virtio-scsi-pci",
"sockets": 1,
"ostype": "win8",
"net0": "virtio=ff:ff:ff:ff:ff:ff,bridge=vmbr0",
"onboot": 1
}
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/103/config":
# _get_vm_config (qemu)
return {
'scsi1': 'storage:vm-103-disk-3,size=30G',
'sockets': 1,
'memory': 8192,
'ostype': 'l26',
'scsihw': 'virtio-scsi-pci',
"net0": "virtio=ff:ff:ff:ff:ff:ff,bridge=vmbr0",
"net1": "virtio=ff:ff:ff:ff:ff:ff,bridge=vmbr1",
'bootdisk': 'scsi0',
'scsi0': 'storage:vm-103-disk-0,size=10G',
'name': 'test-qemu-multi-nic',
'cores': 4,
'digest': '51b7599f869b9a3f564804a0aed290f3de803292',
'smbios1': 'uuid=863b31c3-42ca-4a92-aed7-4111f342f70a',
'agent': '1,type=virtio',
'ide2': 'none,media=cdrom',
'balloon': 0,
'numa': 0,
'scsi2': 'storage:vm-103-disk-2,size=10G',
'serial0': 'socket',
'vmgenid': 'ddfb79b2-b484-4d66-88e7-6e76f2d1be77',
'onboot': 1,
'tablet': 0
}
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/101/agent/network-get-interfaces":
# _get_agent_network_interfaces
return {"result": [
{
"hardware-address": "00:00:00:00:00:00",
"ip-addresses": [
{
"prefix": 8,
"ip-address-type": "ipv4",
"ip-address": "127.0.0.1"
},
{
"ip-address-type": "ipv6",
"ip-address": "::1",
"prefix": 128
}],
"statistics": {
"rx-errs": 0,
"rx-bytes": 163244,
"rx-packets": 1623,
"rx-dropped": 0,
"tx-dropped": 0,
"tx-packets": 1623,
"tx-bytes": 163244,
"tx-errs": 0},
"name": "lo"},
{
"statistics": {
"rx-packets": 4025,
"rx-dropped": 12,
"rx-bytes": 324105,
"rx-errs": 0,
"tx-errs": 0,
"tx-bytes": 368860,
"tx-packets": 3479,
"tx-dropped": 0},
"name": "eth0",
"ip-addresses": [
{
"prefix": 24,
"ip-address-type": "ipv4",
"ip-address": "10.1.2.3"
},
{
"prefix": 64,
"ip-address": "fd8c:4687:e88d:1be3:5b70:7b88:c79c:293",
"ip-address-type": "ipv6"
}],
"hardware-address": "ff:ff:ff:ff:ff:ff"
},
{
"hardware-address": "ff:ff:ff:ff:ff:ff",
"ip-addresses": [
{
"prefix": 16,
"ip-address": "10.10.2.3",
"ip-address-type": "ipv4"
}],
"name": "docker0",
"statistics": {
"rx-bytes": 0,
"rx-errs": 0,
"rx-dropped": 0,
"rx-packets": 0,
"tx-packets": 0,
"tx-dropped": 0,
"tx-errs": 0,
"tx-bytes": 0
}}]}
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/102/agent/network-get-interfaces":
# _get_agent_network_interfaces
return {"result": {'error': {'desc': 'this feature or command is not currently supported', 'class': 'Unsupported'}}}
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/103/agent/network-get-interfaces":
# _get_agent_network_interfaces
return {
"result": [
{
"statistics": {
"tx-errs": 0,
"rx-errs": 0,
"rx-dropped": 0,
"tx-bytes": 48132932372,
"tx-dropped": 0,
"rx-bytes": 48132932372,
"tx-packets": 178578980,
"rx-packets": 178578980
},
"hardware-address": "ff:ff:ff:ff:ff:ff",
"ip-addresses": [
{
"ip-address-type": "ipv4",
"prefix": 8,
"ip-address": "127.0.0.1"
}
],
"name": "lo"
},
{
"name": "eth0",
"ip-addresses": [
{
"ip-address-type": "ipv4",
"prefix": 24,
"ip-address": "172.16.0.143"
}
],
"statistics": {
"rx-errs": 0,
"tx-errs": 0,
"rx-packets": 660028,
"tx-packets": 304599,
"tx-dropped": 0,
"rx-bytes": 1846743499,
"tx-bytes": 1287844926,
"rx-dropped": 0
},
"hardware-address": "ff:ff:ff:ff:ff:ff"
},
{
"name": "eth1",
"hardware-address": "ff:ff:ff:ff:ff:ff",
"statistics": {
"rx-bytes": 235717091946,
"tx-dropped": 0,
"rx-dropped": 0,
"tx-bytes": 123411636251,
"rx-packets": 540431277,
"tx-packets": 468411864,
"rx-errs": 0,
"tx-errs": 0
},
"ip-addresses": [
{
"ip-address": "10.0.0.133",
"prefix": 24,
"ip-address-type": "ipv4"
}
]
},
{
"name": "docker0",
"ip-addresses": [
{
"ip-address": "172.17.0.1",
"prefix": 16,
"ip-address-type": "ipv4"
}
],
"hardware-address": "ff:ff:ff:ff:ff:ff",
"statistics": {
"rx-errs": 0,
"tx-errs": 0,
"rx-packets": 0,
"tx-packets": 0,
"tx-dropped": 0,
"rx-bytes": 0,
"rx-dropped": 0,
"tx-bytes": 0
}
},
{
"hardware-address": "ff:ff:ff:ff:ff:ff",
"name": "datapath"
},
{
"name": "weave",
"ip-addresses": [
{
"ip-address": "10.42.0.1",
"ip-address-type": "ipv4",
"prefix": 16
}
],
"hardware-address": "ff:ff:ff:ff:ff:ff",
"statistics": {
"rx-bytes": 127289123306,
"tx-dropped": 0,
"rx-dropped": 0,
"tx-bytes": 43827573343,
"rx-packets": 132750542,
"tx-packets": 74218762,
"rx-errs": 0,
"tx-errs": 0
}
},
{
"name": "vethwe-datapath",
"hardware-address": "ff:ff:ff:ff:ff:ff"
},
{
"name": "vethwe-bridge",
"hardware-address": "ff:ff:ff:ff:ff:ff"
},
{
"hardware-address": "ff:ff:ff:ff:ff:ff",
"name": "vxlan-6784"
},
{
"name": "vethwepl0dfe1fe",
"hardware-address": "ff:ff:ff:ff:ff:ff"
},
{
"name": "vethweplf1e7715",
"hardware-address": "ff:ff:ff:ff:ff:ff"
},
{
"hardware-address": "ff:ff:ff:ff:ff:ff",
"name": "vethwepl9d244a1"
},
{
"hardware-address": "ff:ff:ff:ff:ff:ff",
"name": "vethwepl2ca477b"
},
{
"name": "nomacorip",
}
]
}
elif url == "https://localhost:8006/api2/json/nodes/testnode/lxc/100/status/current":
# _get_vm_status (lxc)
return {
"swap": 0,
"name": "test-lxc",
"diskread": 0,
"vmid": 100,
"diskwrite": 0,
"pid": 9000,
"mem": 89980928,
"netin": 1950776396424,
"disk": 4998168576,
"cpu": 0.00163430613110039,
"type": "lxc",
"uptime": 6793736,
"maxmem": 1073741824,
"status": "running",
"cpus": "1",
"ha": {
"group": 'null',
"state": "started",
"managed": 1
},
"maxdisk": 3348329267200,
"netout": 1947793356037,
"maxswap": 1073741824
}
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/101/status/current":
# _get_vm_status (qemu)
return {
"status": "stopped",
"uptime": 0,
"maxmem": 5364514816,
"maxdisk": 34359738368,
"netout": 0,
"cpus": 2,
"ha": {
"managed": 0
},
"diskread": 0,
"vmid": 101,
"diskwrite": 0,
"name": "test-qemu",
"cpu": 0,
"disk": 0,
"netin": 0,
"mem": 0,
"qmpstatus": "stopped"
}
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/102/status/current":
# _get_vm_status (qemu)
return {
"status": "stopped",
"uptime": 0,
"maxmem": 5364514816,
"maxdisk": 34359738368,
"netout": 0,
"cpus": 2,
"ha": {
"managed": 0
},
"diskread": 0,
"vmid": 102,
"diskwrite": 0,
"name": "test-qemu-windows",
"cpu": 0,
"disk": 0,
"netin": 0,
"mem": 0,
"qmpstatus": "prelaunch"
}
elif url == "https://localhost:8006/api2/json/nodes/testnode/qemu/103/status/current":
# _get_vm_status (qemu)
return {
"status": "stopped",
"uptime": 0,
"maxmem": 5364514816,
"maxdisk": 34359738368,
"netout": 0,
"cpus": 2,
"ha": {
"managed": 0
},
"diskread": 0,
"vmid": 103,
"diskwrite": 0,
"name": "test-qemu-multi-nic",
"cpu": 0,
"disk": 0,
"netin": 0,
"mem": 0,
"qmpstatus": "paused"
}
def get_vm_snapshots(node, properties, vmtype, vmid, name):
return [
{"description": "",
"name": "clean",
"snaptime": 1000,
"vmstate": 0
},
{"name": "current",
"digest": "1234689abcdf",
"running": 0,
"description": "You are here!",
"parent": "clean"
}]
def get_option(opts):
def fn(option):
default = opts.get('default', False)
return opts.get(option, default)
return fn
def test_populate(inventory, mocker):
# module settings
inventory.proxmox_user = 'root@pam'
inventory.proxmox_password = 'password'
inventory.proxmox_url = 'https://localhost:8006'
inventory.group_prefix = 'proxmox_'
inventory.facts_prefix = 'proxmox_'
inventory.strict = False
inventory.exclude_nodes = False
opts = {
'group_prefix': 'proxmox_',
'facts_prefix': 'proxmox_',
'want_facts': True,
'want_proxmox_nodes_ansible_host': True,
'qemu_extended_statuses': True,
'exclude_nodes': False
}
# bypass authentication and API fetch calls
inventory._get_auth = mocker.MagicMock(side_effect=get_auth)
inventory._get_json = mocker.MagicMock(side_effect=get_json)
inventory._get_vm_snapshots = mocker.MagicMock(side_effect=get_vm_snapshots)
inventory.get_option = mocker.MagicMock(side_effect=get_option(opts))
inventory._can_add_host = mocker.MagicMock(return_value=True)
inventory._populate()
# get different hosts
host_qemu = inventory.inventory.get_host('test-qemu')
host_qemu_windows = inventory.inventory.get_host('test-qemu-windows')
host_qemu_multi_nic = inventory.inventory.get_host('test-qemu-multi-nic')
host_qemu_template = inventory.inventory.get_host('test-qemu-template')
host_lxc = inventory.inventory.get_host('test-lxc')
# check if qemu-test is in the proxmox_pool_test group
assert 'proxmox_pool_test' in inventory.inventory.groups
group_qemu = inventory.inventory.groups['proxmox_pool_test']
assert group_qemu.hosts == [host_qemu]
# check if qemu-test has eth0 interface in agent_interfaces fact
assert 'eth0' in [d['name'] for d in host_qemu.get_vars()['proxmox_agent_interfaces']]
# check if qemu-multi-nic has multiple network interfaces
for iface_name in ['eth0', 'eth1', 'weave']:
assert iface_name in [d['name'] for d in host_qemu_multi_nic.get_vars()['proxmox_agent_interfaces']]
# check if interface with no mac-address or ip-address defaults correctly
assert [iface for iface in host_qemu_multi_nic.get_vars()['proxmox_agent_interfaces']
if iface['name'] == 'nomacorip'
and iface['mac-address'] == ''
and iface['ip-addresses'] == []
]
# check to make sure qemu-windows doesn't have proxmox_agent_interfaces
assert "proxmox_agent_interfaces" not in host_qemu_windows.get_vars()
# check if lxc-test has been discovered correctly
group_lxc = inventory.inventory.groups['proxmox_all_lxc']
assert group_lxc.hosts == [host_lxc]
# check if qemu template is not present
assert host_qemu_template is None
# check that offline node is in inventory
assert inventory.inventory.get_host('testnode2')
# make sure that ['prelaunch', 'paused'] are in the group list
for group in ['paused', 'prelaunch']:
assert ('%sall_%s' % (inventory.group_prefix, group)) in inventory.inventory.groups
# check if qemu-windows is in the prelaunch group
group_prelaunch = inventory.inventory.groups['proxmox_all_prelaunch']
assert group_prelaunch.hosts == [host_qemu_windows]
# check if qemu-multi-nic is in the paused group
group_paused = inventory.inventory.groups['proxmox_all_paused']
assert group_paused.hosts == [host_qemu_multi_nic]
def test_populate_missing_qemu_extended_groups(inventory, mocker):
# module settings
inventory.proxmox_user = 'root@pam'
inventory.proxmox_password = 'password'
inventory.proxmox_url = 'https://localhost:8006'
inventory.group_prefix = 'proxmox_'
inventory.facts_prefix = 'proxmox_'
inventory.strict = False
inventory.exclude_nodes = False
opts = {
'group_prefix': 'proxmox_',
'facts_prefix': 'proxmox_',
'want_facts': True,
'want_proxmox_nodes_ansible_host': True,
'qemu_extended_statuses': False,
'exclude_nodes': False
}
# bypass authentication and API fetch calls
inventory._get_auth = mocker.MagicMock(side_effect=get_auth)
inventory._get_json = mocker.MagicMock(side_effect=get_json)
inventory._get_vm_snapshots = mocker.MagicMock(side_effect=get_vm_snapshots)
inventory.get_option = mocker.MagicMock(side_effect=get_option(opts))
inventory._can_add_host = mocker.MagicMock(return_value=True)
inventory._populate()
# make sure that ['prelaunch', 'paused'] are not in the group list
for group in ['paused', 'prelaunch']:
assert ('%sall_%s' % (inventory.group_prefix, group)) not in inventory.inventory.groups
def test_populate_exclude_nodes(inventory, mocker):
# module settings
inventory.proxmox_user = 'root@pam'
inventory.proxmox_password = 'password'
inventory.proxmox_url = 'https://localhost:8006'
inventory.group_prefix = 'proxmox_'
inventory.facts_prefix = 'proxmox_'
inventory.strict = False
inventory.exclude_nodes = True
opts = {
'group_prefix': 'proxmox_',
'facts_prefix': 'proxmox_',
'want_facts': True,
'want_proxmox_nodes_ansible_host': True,
'qemu_extended_statuses': False,
'exclude_nodes': True
}
# bypass authentication and API fetch calls
inventory._get_auth = mocker.MagicMock(side_effect=get_auth)
inventory._get_json = mocker.MagicMock(side_effect=get_json)
inventory._get_vm_snapshots = mocker.MagicMock(side_effect=get_vm_snapshots)
inventory.get_option = mocker.MagicMock(side_effect=get_option(opts))
inventory._can_add_host = mocker.MagicMock(return_value=True)
inventory._populate()
# make sure that nodes are not in the inventory
for node in ['testnode', 'testnode2']:
assert node not in inventory.inventory.hosts
# make sure that nodes group is absent
assert ('%s_nodes' % (inventory.group_prefix)) not in inventory.inventory.groups
# make sure that nodes are not in the "ungrouped" group
for node in ['testnode', 'testnode2']:
assert node not in inventory.inventory.get_groups_dict()["ungrouped"]

View file

@ -1,374 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import (absolute_import, division, print_function)
import \
ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
from ansible_collections.community.general.plugins.modules import proxmox_backup
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import (
AnsibleExitJson, AnsibleFailJson, set_module_args, ModuleTestCase)
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import patch
__metaclass__ = type
import pytest
proxmoxer = pytest.importorskip('proxmoxer')
MINIMAL_PERMISSIONS = {
'/sdn/zones': {'Datastore.AllocateSpace': 1, 'Datastore.Audit': 1},
'/nodes': {'Datastore.AllocateSpace': 1, 'Datastore.Audit': 1},
'/sdn': {'Datastore.AllocateSpace': 1, 'Datastore.Audit': 1},
'/vms': {'VM.Audit': 1,
'Sys.Audit': 1,
'Mapping.Audit': 1,
'VM.Backup': 1,
'Datastore.Audit': 1,
'SDN.Audit': 1,
'Pool.Audit': 1},
'/': {'Datastore.Audit': 1, 'Datastore.AllocateSpace': 1},
'/storage/local-zfs': {'Datastore.AllocateSpace': 1,
'Datastore.Audit': 1},
'/storage': {'Datastore.AllocateSpace': 1, 'Datastore.Audit': 1},
'/access': {'Datastore.AllocateSpace': 1, 'Datastore.Audit': 1},
'/vms/101': {'VM.Backup': 1,
'Mapping.Audit': 1,
'Datastore.AllocateSpace': 0,
'Sys.Audit': 1,
'VM.Audit': 1,
'SDN.Audit': 1,
'Pool.Audit': 1,
'Datastore.Audit': 1},
'/vms/100': {'VM.Backup': 1,
'Mapping.Audit': 1,
'Datastore.AllocateSpace': 0,
'Sys.Audit': 1,
'VM.Audit': 1,
'SDN.Audit': 1,
'Pool.Audit': 1,
'Datastore.Audit': 1},
'/pool': {'Datastore.Audit': 1, 'Datastore.AllocateSpace': 1}, }
STORAGE = [{'type': 'pbs',
'username': 'test@pbs',
'datastore': 'Backup-Pool',
'server': '10.0.0.1',
'shared': 1,
'fingerprint': '94:fd:ac:e7:d5:36:0e:11:5b:23:05:40:d2:a4:e1:8a:c1:52:41:01:07:28:c0:4d:c5:ee:df:7f:7c:03:ab:41',
'prune-backups': 'keep-all=1',
'storage': 'backup',
'content': 'backup',
'digest': 'ca46a68d7699de061c139d714892682ea7c9d681'},
{'nodes': 'node1,node2,node3',
'sparse': 1,
'type': 'zfspool',
'content': 'rootdir,images',
'digest': 'ca46a68d7699de061c139d714892682ea7c9d681',
'pool': 'rpool/data',
'storage': 'local-zfs'}]
VMS = [{"diskwrite": 0,
"vmid": 100,
"node": "node1",
"id": "lxc/100",
"maxdisk": 10000,
"template": 0,
"disk": 10000,
"uptime": 10000,
"maxmem": 10000,
"maxcpu": 1,
"netin": 10000,
"type": "lxc",
"netout": 10000,
"mem": 10000,
"diskread": 10000,
"cpu": 0.01,
"name": "test-lxc",
"status": "running"},
{"diskwrite": 0,
"vmid": 101,
"node": "node2",
"id": "kvm/101",
"maxdisk": 10000,
"template": 0,
"disk": 10000,
"uptime": 10000,
"maxmem": 10000,
"maxcpu": 1,
"netin": 10000,
"type": "lxc",
"netout": 10000,
"mem": 10000,
"diskread": 10000,
"cpu": 0.01,
"name": "test-kvm",
"status": "running"}
]
NODES = [{'level': '',
'type': 'node',
'node': 'node1',
'status': 'online',
'id': 'node/node1',
'cgroup-mode': 2},
{'status': 'online',
'id': 'node/node2',
'cgroup-mode': 2,
'level': '',
'node': 'node2',
'type': 'node'},
{'status': 'online',
'id': 'node/node3',
'cgroup-mode': 2,
'level': '',
'node': 'node3',
'type': 'node'},
]
TASK_API_RETURN = {
"node1": {
'starttime': 1732606253,
'status': 'stopped',
'type': 'vzdump',
'pstart': 517463911,
'upid': 'UPID:node1:003F8C63:1E7FB79C:67449780:vzdump:100:root@pam:',
'id': '100',
'node': 'hypervisor',
'pid': 541669,
'user': 'test@pve',
'exitstatus': 'OK'},
"node2": {
'starttime': 1732606253,
'status': 'stopped',
'type': 'vzdump',
'pstart': 517463911,
'upid': 'UPID:node2:000029DD:1599528B:6108F068:vzdump:101:root@pam:',
'id': '101',
'node': 'hypervisor',
'pid': 541669,
'user': 'test@pve',
'exitstatus': 'OK'},
}
VZDUMP_API_RETURN = {
"node1": "UPID:node1:003F8C63:1E7FB79C:67449780:vzdump:100:root@pam:",
"node2": "UPID:node2:000029DD:1599528B:6108F068:vzdump:101:root@pam:",
"node3": "OK",
}
TASKLOG_API_RETURN = {"node1": [{'n': 1,
't': "INFO: starting new backup job: vzdump 100 --mode snapshot --node node1 "
"--notes-template '{{guestname}}' --storage backup --notification-mode auto"},
{'t': 'INFO: Starting Backup of VM 100 (lxc)',
'n': 2},
{'n': 23, 't': 'INFO: adding notes to backup'},
{'n': 24,
't': 'INFO: Finished Backup of VM 100 (00:00:03)'},
{'n': 25,
't': 'INFO: Backup finished at 2024-11-25 16:28:03'},
{'t': 'INFO: Backup job finished successfully',
'n': 26},
{'n': 27, 't': 'TASK OK'}],
"node2": [{'n': 1,
't': "INFO: starting new backup job: vzdump 101 --mode snapshot --node node2 "
"--notes-template '{{guestname}}' --storage backup --notification-mode auto"},
{'t': 'INFO: Starting Backup of VM 101 (kvm)',
'n': 2},
{'n': 24,
't': 'INFO: Finished Backup of VM 100 (00:00:03)'},
{'n': 25,
't': 'INFO: Backup finished at 2024-11-25 16:28:03'},
{'t': 'INFO: Backup job finished successfully',
'n': 26},
{'n': 27, 't': 'TASK OK'}],
}
def return_valid_resources(resource_type, *args, **kwargs):
if resource_type == "vm":
return VMS
if resource_type == "node":
return NODES
def return_vzdump_api(node, *args, **kwargs):
if node in ("node1", "node2", "node3"):
return VZDUMP_API_RETURN[node]
def return_logs_api(node, *args, **kwargs):
if node in ("node1", "node2"):
return TASKLOG_API_RETURN[node]
def return_task_status_api(node, *args, **kwargs):
if node in ("node1", "node2"):
return TASK_API_RETURN[node]
class TestProxmoxBackup(ModuleTestCase):
def setUp(self):
super(TestProxmoxBackup, self).setUp()
proxmox_utils.HAS_PROXMOXER = True
self.module = proxmox_backup
self.connect_mock = patch(
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect",
).start()
self.mock_get_permissions = patch.object(
proxmox_backup.ProxmoxBackupAnsible, "_get_permissions").start()
self.mock_get_storages = patch.object(proxmox_utils.ProxmoxAnsible,
"get_storages").start()
self.mock_get_resources = patch.object(
proxmox_backup.ProxmoxBackupAnsible, "_get_resources").start()
self.mock_get_tasklog = patch.object(
proxmox_backup.ProxmoxBackupAnsible, "_get_tasklog").start()
self.mock_post_vzdump = patch.object(
proxmox_backup.ProxmoxBackupAnsible, "_post_vzdump").start()
self.mock_get_taskok = patch.object(
proxmox_backup.ProxmoxBackupAnsible, "_get_taskok").start()
self.mock_get_permissions.return_value = MINIMAL_PERMISSIONS
self.mock_get_storages.return_value = STORAGE
self.mock_get_resources.side_effect = return_valid_resources
self.mock_get_taskok.side_effect = return_task_status_api
self.mock_get_tasklog.side_effect = return_logs_api
self.mock_post_vzdump.side_effect = return_vzdump_api
def tearDown(self):
self.connect_mock.stop()
self.mock_get_permissions.stop()
self.mock_get_storages.stop()
self.mock_get_resources.stop()
super(TestProxmoxBackup, self).tearDown()
def test_proxmox_backup_without_argument(self):
with set_module_args({}):
with pytest.raises(AnsibleFailJson):
proxmox_backup.main()
def test_create_backup_check_mode(self):
with set_module_args(
{
"api_user": "root@pam",
"api_password": "secret",
"api_host": "127.0.0.1",
"mode": "all",
"storage": "backup",
"_ansible_check_mode": True,
}
):
with pytest.raises(AnsibleExitJson) as exc_info:
proxmox_backup.main()
result = exc_info.value.args[0]
assert result["changed"] is True
assert result["msg"] == "Backups would be created"
assert len(result["backups"]) == 0
assert self.mock_get_taskok.call_count == 0
assert self.mock_get_tasklog.call_count == 0
assert self.mock_post_vzdump.call_count == 0
def test_create_backup_all_mode(self):
with set_module_args({
"api_user": "root@pam",
"api_password": "secret",
"api_host": "127.0.0.1",
"mode": "all",
"storage": "backup",
}):
with pytest.raises(AnsibleExitJson) as exc_info:
proxmox_backup.main()
result = exc_info.value.args[0]
assert result["changed"] is True
assert result["msg"] == "Backup tasks created"
for backup_result in result["backups"]:
assert backup_result["upid"] in {
VZDUMP_API_RETURN[key] for key in VZDUMP_API_RETURN}
assert self.mock_get_taskok.call_count == 0
assert self.mock_post_vzdump.call_count == 3
def test_create_backup_include_mode_with_wait(self):
with set_module_args({
"api_user": "root@pam",
"api_password": "secret",
"api_host": "127.0.0.1",
"mode": "include",
"node": "node1",
"storage": "backup",
"vmids": [100],
"wait": True
}):
with pytest.raises(AnsibleExitJson) as exc_info:
proxmox_backup.main()
result = exc_info.value.args[0]
assert result["changed"] is True
assert result["msg"] == "Backups succeeded"
for backup_result in result["backups"]:
assert backup_result["upid"] in {
VZDUMP_API_RETURN[key] for key in VZDUMP_API_RETURN}
assert self.mock_get_taskok.call_count == 1
assert self.mock_post_vzdump.call_count == 1
def test_fail_insufficient_permissions(self):
with set_module_args({
"api_user": "root@pam",
"api_password": "secret",
"api_host": "127.0.0.1",
"mode": "include",
"storage": "backup",
"performance_tweaks": "max-workers=2",
"vmids": [100],
"wait": True
}):
with pytest.raises(AnsibleFailJson) as exc_info:
proxmox_backup.main()
result = exc_info.value.args[0]
assert result["msg"] == "Insufficient permission: Performance_tweaks and bandwidth require 'Sys.Modify' permission for '/'"
assert self.mock_get_taskok.call_count == 0
assert self.mock_post_vzdump.call_count == 0
def test_fail_missing_node(self):
with set_module_args({
"api_user": "root@pam",
"api_password": "secret",
"api_host": "127.0.0.1",
"mode": "include",
"storage": "backup",
"node": "nonexistingnode",
"vmids": [100],
"wait": True
}):
with pytest.raises(AnsibleFailJson) as exc_info:
proxmox_backup.main()
result = exc_info.value.args[0]
assert result["msg"] == "Node nonexistingnode was specified, but does not exist on the cluster"
assert self.mock_get_taskok.call_count == 0
assert self.mock_post_vzdump.call_count == 0
def test_fail_missing_storage(self):
with set_module_args({
"api_user": "root@pam",
"api_password": "secret",
"api_host": "127.0.0.1",
"mode": "include",
"storage": "nonexistingstorage",
"vmids": [100],
"wait": True
}):
with pytest.raises(AnsibleFailJson) as exc_info:
proxmox_backup.main()
result = exc_info.value.args[0]
assert result["msg"] == "Storage nonexistingstorage does not exist in the cluster"
assert self.mock_get_taskok.call_count == 0
assert self.mock_post_vzdump.call_count == 0

View file

@ -1,275 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2024 Marzieh Raoufnezhad <raoufnezhad at gmail.com>
# Copyright (c) 2024 Maryam Mayabi <mayabi.ahm at gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
proxmoxer = pytest.importorskip("proxmoxer")
from ansible_collections.community.general.plugins.modules import proxmox_backup_info
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import patch
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import (
AnsibleExitJson,
AnsibleFailJson,
ModuleTestCase,
set_module_args,
)
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
RESOURCE_LIST = [
{
"uptime": 0,
"diskwrite": 0,
"name": "test01",
"maxcpu": 0,
"node": "NODE1",
"mem": 0,
"netout": 0,
"netin": 0,
"maxmem": 0,
"diskread": 0,
"disk": 0,
"maxdisk": 0,
"status": "running",
"cpu": 0,
"id": "qemu/100",
"template": 0,
"vmid": 100,
"type": "qemu"
},
{
"uptime": 0,
"diskwrite": 0,
"name": "test02",
"maxcpu": 0,
"node": "NODE1",
"mem": 0,
"netout": 0,
"netin": 0,
"maxmem": 0,
"diskread": 0,
"disk": 0,
"maxdisk": 0,
"status": "running",
"cpu": 0,
"id": "qemu/101",
"template": 0,
"vmid": 101,
"type": "qemu"
},
{
"uptime": 0,
"diskwrite": 0,
"name": "test03",
"maxcpu": 0,
"node": "NODE2",
"mem": 0,
"netout": 0,
"netin": 0,
"maxmem": 0,
"diskread": 0,
"disk": 0,
"maxdisk": 0,
"status": "running",
"cpu": 0,
"id": "qemu/102",
"template": 0,
"vmid": 102,
"type": "qemu"
}
]
BACKUP_JOBS = [
{
"type": "vzdump",
"id": "backup-83831498-c631",
"storage": "local",
"vmid": "100",
"enabled": 1,
"next-run": 1735138800,
"mailnotification": "always",
"schedule": "06,18:30",
"mode": "snapshot",
"notes-template": "guestname"
},
{
"schedule": "sat 15:00",
"notes-template": "guestname",
"mode": "snapshot",
"mailnotification": "always",
"next-run": 1735385400,
"type": "vzdump",
"enabled": 1,
"vmid": "100,101,102",
"storage": "local",
"id": "backup-70025700-2302",
}
]
EXPECTED_BACKUP_OUTPUT = [
{
"bktype": "vzdump",
"enabled": 1,
"id": "backup-83831498-c631",
"mode": "snapshot",
"next-run": "2024-12-25 15:00:00",
"schedule": "06,18:30",
"storage": "local",
"vm_name": "test01",
"vmid": "100"
},
{
"bktype": "vzdump",
"enabled": 1,
"id": "backup-70025700-2302",
"mode": "snapshot",
"next-run": "2024-12-28 11:30:00",
"schedule": "sat 15:00",
"storage": "local",
"vm_name": "test01",
"vmid": "100"
},
{
"bktype": "vzdump",
"enabled": 1,
"id": "backup-70025700-2302",
"mode": "snapshot",
"next-run": "2024-12-28 11:30:00",
"schedule": "sat 15:00",
"storage": "local",
"vm_name": "test02",
"vmid": "101"
},
{
"bktype": "vzdump",
"enabled": 1,
"id": "backup-70025700-2302",
"mode": "snapshot",
"next-run": "2024-12-28 11:30:00",
"schedule": "sat 15:00",
"storage": "local",
"vm_name": "test03",
"vmid": "102"
}
]
EXPECTED_BACKUP_JOBS_OUTPUT = [
{
"enabled": 1,
"id": "backup-83831498-c631",
"mailnotification": "always",
"mode": "snapshot",
"next-run": 1735138800,
"notes-template": "guestname",
"schedule": "06,18:30",
"storage": "local",
"type": "vzdump",
"vmid": "100"
},
{
"enabled": 1,
"id": "backup-70025700-2302",
"mailnotification": "always",
"mode": "snapshot",
"next-run": 1735385400,
"notes-template": "guestname",
"schedule": "sat 15:00",
"storage": "local",
"type": "vzdump",
"vmid": "100,101,102"
}
]
class TestProxmoxBackupInfoModule(ModuleTestCase):
def setUp(self):
super(TestProxmoxBackupInfoModule, self).setUp()
proxmox_utils.HAS_PROXMOXER = True
self.module = proxmox_backup_info
self.connect_mock = patch(
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect",
).start()
self.connect_mock.return_value.cluster.resources.get.return_value = (
RESOURCE_LIST
)
self.connect_mock.return_value.cluster.backup.get.return_value = (
BACKUP_JOBS
)
def tearDown(self):
self.connect_mock.stop()
super(TestProxmoxBackupInfoModule, self).tearDown()
def test_module_fail_when_required_args_missing(self):
with pytest.raises(AnsibleFailJson) as exc_info:
with set_module_args({}):
self.module.main()
result = exc_info.value.args[0]
assert result["msg"] == "missing required arguments: api_host, api_user"
def test_get_all_backups_information(self):
with pytest.raises(AnsibleExitJson) as exc_info:
with set_module_args({
'api_host': 'proxmoxhost',
'api_user': 'root@pam',
'api_password': 'supersecret'
}):
self.module.main()
result = exc_info.value.args[0]
assert result["backup_info"] == EXPECTED_BACKUP_OUTPUT
def test_get_specific_backup_information_by_vmname(self):
with pytest.raises(AnsibleExitJson) as exc_info:
vmname = 'test01'
expected_output = [
backup for backup in EXPECTED_BACKUP_OUTPUT if backup["vm_name"] == vmname
]
with set_module_args({
'api_host': 'proxmoxhost',
'api_user': 'root@pam',
'api_password': 'supersecret',
'vm_name': vmname
}):
self.module.main()
result = exc_info.value.args[0]
assert result["backup_info"] == expected_output
assert len(result["backup_info"]) == 2
def test_get_specific_backup_information_by_vmid(self):
with pytest.raises(AnsibleExitJson) as exc_info:
vmid = "101"
expected_output = [
backup for backup in EXPECTED_BACKUP_OUTPUT if backup["vmid"] == vmid
]
with set_module_args({
'api_host': 'proxmoxhost',
'api_user': 'root@pam',
'api_password': 'supersecret',
'vm_id': vmid
}):
self.module.main()
result = exc_info.value.args[0]
assert result["backup_info"] == expected_output
assert len(result["backup_info"]) == 1
def test_get_specific_backup_information_by_backupjobs(self):
with pytest.raises(AnsibleExitJson) as exc_info:
backupjobs = True
with set_module_args({
'api_host': 'proxmoxhost',
'api_user': 'root@pam',
'api_password': 'supersecret',
'backup_jobs': backupjobs
}):
self.module.main()
result = exc_info.value.args[0]
assert result["backup_info"] == EXPECTED_BACKUP_JOBS_OUTPUT

View file

@ -1,168 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2021, Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import sys
import pytest
proxmoxer = pytest.importorskip("proxmoxer")
mandatory_py_version = pytest.mark.skipif(
sys.version_info < (2, 7),
reason="The proxmoxer dependency requires python2.7 or higher",
)
from ansible_collections.community.general.plugins.modules import proxmox_kvm
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import (
patch,
DEFAULT,
)
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import (
AnsibleExitJson,
AnsibleFailJson,
ModuleTestCase,
set_module_args,
)
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
class TestProxmoxKvmModule(ModuleTestCase):
def setUp(self):
super(TestProxmoxKvmModule, self).setUp()
proxmox_utils.HAS_PROXMOXER = True
self.module = proxmox_kvm
self.connect_mock = patch(
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect"
).start()
self.get_node_mock = patch.object(
proxmox_utils.ProxmoxAnsible, "get_node"
).start()
self.get_vm_mock = patch.object(proxmox_utils.ProxmoxAnsible, "get_vm").start()
self.create_vm_mock = patch.object(
proxmox_kvm.ProxmoxKvmAnsible, "create_vm"
).start()
def tearDown(self):
self.create_vm_mock.stop()
self.get_vm_mock.stop()
self.get_node_mock.stop()
self.connect_mock.stop()
super(TestProxmoxKvmModule, self).tearDown()
def test_module_fail_when_required_args_missing(self):
with self.assertRaises(AnsibleFailJson):
with set_module_args({}):
self.module.main()
def test_module_exits_unchaged_when_provided_vmid_exists(self):
with set_module_args(
{
"api_host": "host",
"api_user": "user",
"api_password": "password",
"vmid": "100",
"node": "pve",
}
):
self.get_vm_mock.return_value = [{"vmid": "100"}]
with pytest.raises(AnsibleExitJson) as exc_info:
self.module.main()
assert self.get_vm_mock.call_count == 1
result = exc_info.value.args[0]
assert result["changed"] is False
assert result["msg"] == "VM with vmid <100> already exists"
def test_vm_created_when_vmid_not_exist_but_name_already_exist(self):
with set_module_args(
{
"api_host": "host",
"api_user": "user",
"api_password": "password",
"vmid": "100",
"name": "existing.vm.local",
"node": "pve",
}
):
self.get_vm_mock.return_value = None
with pytest.raises(AnsibleExitJson) as exc_info:
self.module.main()
assert self.get_vm_mock.call_count == 1
assert self.get_node_mock.call_count == 1
result = exc_info.value.args[0]
assert result["changed"] is True
assert result["msg"] == "VM existing.vm.local with vmid 100 deployed"
def test_vm_not_created_when_name_already_exist_and_vmid_not_set(self):
with set_module_args(
{
"api_host": "host",
"api_user": "user",
"api_password": "password",
"name": "existing.vm.local",
"node": "pve",
}
):
with patch.object(proxmox_utils.ProxmoxAnsible, "get_vmid") as get_vmid_mock:
get_vmid_mock.return_value = {
"vmid": 100,
"name": "existing.vm.local",
}
with pytest.raises(AnsibleExitJson) as exc_info:
self.module.main()
assert get_vmid_mock.call_count == 1
result = exc_info.value.args[0]
assert result["changed"] is False
def test_vm_created_when_name_doesnt_exist_and_vmid_not_set(self):
with set_module_args(
{
"api_host": "host",
"api_user": "user",
"api_password": "password",
"name": "existing.vm.local",
"node": "pve",
}
):
self.get_vm_mock.return_value = None
with patch.multiple(
proxmox_utils.ProxmoxAnsible, get_vmid=DEFAULT, get_nextvmid=DEFAULT
) as utils_mock:
utils_mock["get_vmid"].return_value = None
utils_mock["get_nextvmid"].return_value = 101
with pytest.raises(AnsibleExitJson) as exc_info:
self.module.main()
assert utils_mock["get_vmid"].call_count == 1
assert utils_mock["get_nextvmid"].call_count == 1
result = exc_info.value.args[0]
assert result["changed"] is True
assert result["msg"] == "VM existing.vm.local with vmid 101 deployed"
def test_parse_mac(self):
assert (
proxmox_kvm.parse_mac("virtio=00:11:22:AA:BB:CC,bridge=vmbr0,firewall=1")
== "00:11:22:AA:BB:CC"
)
def test_parse_dev(self):
assert (
proxmox_kvm.parse_dev("local-lvm:vm-1000-disk-0,format=qcow2")
== "local-lvm:vm-1000-disk-0"
)
assert (
proxmox_kvm.parse_dev("local-lvm:vm-101-disk-1,size=8G")
== "local-lvm:vm-101-disk-1"
)
assert (
proxmox_kvm.parse_dev("local-zfs:vm-1001-disk-0")
== "local-zfs:vm-1001-disk-0"
)

View file

@ -1,131 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import sys
import pytest
proxmoxer = pytest.importorskip('proxmoxer')
mandatory_py_version = pytest.mark.skipif(
sys.version_info < (2, 7),
reason='The proxmoxer dependency requires python2.7 or higher'
)
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import MagicMock, patch
from ansible_collections.community.general.plugins.modules import proxmox_snap
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import set_module_args
def get_resources(type):
return [{"diskwrite": 0,
"vmid": 100,
"node": "localhost",
"id": "lxc/100",
"maxdisk": 10000,
"template": 0,
"disk": 10000,
"uptime": 10000,
"maxmem": 10000,
"maxcpu": 1,
"netin": 10000,
"type": "lxc",
"netout": 10000,
"mem": 10000,
"diskread": 10000,
"cpu": 0.01,
"name": "test-lxc",
"status": "running"}]
def fake_api(mocker):
r = mocker.MagicMock()
r.cluster.resources.get = MagicMock(side_effect=get_resources)
return r
def test_proxmox_snap_without_argument(capfd):
with set_module_args({}):
with pytest.raises(SystemExit) as results:
proxmox_snap.main()
out, err = capfd.readouterr()
assert not err
assert json.loads(out)['failed']
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
def test_create_snapshot_check_mode(connect_mock, capfd, mocker):
with set_module_args({
"hostname": "test-lxc",
"api_user": "root@pam",
"api_password": "secret",
"api_host": "127.0.0.1",
"state": "present",
"snapname": "test",
"timeout": "1",
"force": True,
"_ansible_check_mode": True
}):
proxmox_utils.HAS_PROXMOXER = True
connect_mock.side_effect = lambda: fake_api(mocker)
with pytest.raises(SystemExit) as results:
proxmox_snap.main()
out, err = capfd.readouterr()
assert not err
assert not json.loads(out)['changed']
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
def test_remove_snapshot_check_mode(connect_mock, capfd, mocker):
with set_module_args({
"hostname": "test-lxc",
"api_user": "root@pam",
"api_password": "secret",
"api_host": "127.0.0.1",
"state": "absent",
"snapname": "test",
"timeout": "1",
"force": True,
"_ansible_check_mode": True
}):
proxmox_utils.HAS_PROXMOXER = True
connect_mock.side_effect = lambda: fake_api(mocker)
with pytest.raises(SystemExit) as results:
proxmox_snap.main()
out, err = capfd.readouterr()
assert not err
assert not json.loads(out)['changed']
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
def test_rollback_snapshot_check_mode(connect_mock, capfd, mocker):
with set_module_args({
"hostname": "test-lxc",
"api_user": "root@pam",
"api_password": "secret",
"api_host": "127.0.0.1",
"state": "rollback",
"snapname": "test",
"timeout": "1",
"force": True,
"_ansible_check_mode": True
}):
proxmox_utils.HAS_PROXMOXER = True
connect_mock.side_effect = lambda: fake_api(mocker)
with pytest.raises(SystemExit) as results:
proxmox_snap.main()
out, err = capfd.readouterr()
assert not err
output = json.loads(out)
assert not output['changed']
assert output['msg'] == "Snapshot test does not exist"

View file

@ -1,90 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2023, Julian Vanden Broeck <julian.vandenbroeck at dalibo.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
proxmoxer = pytest.importorskip("proxmoxer")
from ansible_collections.community.general.plugins.modules import proxmox_storage_contents_info
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import patch
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import (
AnsibleExitJson,
AnsibleFailJson,
ModuleTestCase,
set_module_args,
)
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
NODE1 = "pve"
RAW_LIST_OUTPUT = [
{
"content": "backup",
"ctime": 1702528474,
"format": "pbs-vm",
"size": 273804166061,
"subtype": "qemu",
"vmid": 931,
"volid": "datastore:backup/vm/931/2023-12-14T04:34:34Z",
},
{
"content": "backup",
"ctime": 1702582560,
"format": "pbs-vm",
"size": 273804166059,
"subtype": "qemu",
"vmid": 931,
"volid": "datastore:backup/vm/931/2023-12-14T19:36:00Z",
},
]
def get_module_args(node, storage, content="all", vmid=None):
return {
"api_host": "host",
"api_user": "user",
"api_password": "password",
"node": node,
"storage": storage,
"content": content,
"vmid": vmid,
}
class TestProxmoxStorageContentsInfo(ModuleTestCase):
def setUp(self):
super(TestProxmoxStorageContentsInfo, self).setUp()
proxmox_utils.HAS_PROXMOXER = True
self.module = proxmox_storage_contents_info
self.connect_mock = patch(
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect",
).start()
self.connect_mock.return_value.nodes.return_value.storage.return_value.content.return_value.get.return_value = (
RAW_LIST_OUTPUT
)
self.connect_mock.return_value.nodes.get.return_value = [{"node": NODE1}]
def tearDown(self):
self.connect_mock.stop()
super(TestProxmoxStorageContentsInfo, self).tearDown()
def test_module_fail_when_required_args_missing(self):
with pytest.raises(AnsibleFailJson) as exc_info:
with set_module_args({}):
self.module.main()
def test_storage_contents_info(self):
with pytest.raises(AnsibleExitJson) as exc_info:
with set_module_args(get_module_args(node=NODE1, storage="datastore")):
expected_output = {}
self.module.main()
result = exc_info.value.args[0]
assert not result["changed"]
assert result["proxmox_storage_content"] == RAW_LIST_OUTPUT

View file

@ -1,206 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2021, Andreas Botzner (@paginabianca) <andreas at botzner dot com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
#
# Proxmox Tasks module unit tests.
# The API responses used in these tests were recorded from PVE version 6.4-8
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import sys
import pytest
proxmoxer = pytest.importorskip('proxmoxer')
mandatory_py_version = pytest.mark.skipif(
sys.version_info < (2, 7),
reason='The proxmoxer dependency requires python2.7 or higher'
)
from ansible_collections.community.general.plugins.modules import proxmox_tasks_info
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import patch
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import set_module_args
NODE = 'node01'
TASK_UPID = 'UPID:iaclab-01-01:000029DD:1599528B:6108F068:srvreload:networking:root@pam:'
TASKS = [
{
"endtime": 1629092710,
"id": "networking",
"node": "iaclab-01-01",
"pid": 3539,
"pstart": 474062216,
"starttime": 1629092709,
"status": "OK",
"type": "srvreload",
"upid": "UPID:iaclab-01-01:00000DD3:1C419D88:6119FB65:srvreload:networking:root@pam:",
"user": "root@pam"
},
{
"endtime": 1627975785,
"id": "networking",
"node": "iaclab-01-01",
"pid": 10717,
"pstart": 362369675,
"starttime": 1627975784,
"status": "command 'ifreload -a' failed: exit code 1",
"type": "srvreload",
"upid": "UPID:iaclab-01-01:000029DD:1599528B:6108F068:srvreload:networking:root@pam:",
"user": "root@pam"
},
{
"endtime": 1627975503,
"id": "networking",
"node": "iaclab-01-01",
"pid": 6778,
"pstart": 362341540,
"starttime": 1627975503,
"status": "OK",
"type": "srvreload",
"upid": "UPID:iaclab-01-01:00001A7A:1598E4A4:6108EF4F:srvreload:networking:root@pam:",
"user": "root@pam"
}
]
EXPECTED_TASKS = [
{
"endtime": 1629092710,
"id": "networking",
"node": "iaclab-01-01",
"pid": 3539,
"pstart": 474062216,
"starttime": 1629092709,
"status": "OK",
"type": "srvreload",
"upid": "UPID:iaclab-01-01:00000DD3:1C419D88:6119FB65:srvreload:networking:root@pam:",
"user": "root@pam",
"failed": False
},
{
"endtime": 1627975785,
"id": "networking",
"node": "iaclab-01-01",
"pid": 10717,
"pstart": 362369675,
"starttime": 1627975784,
"status": "command 'ifreload -a' failed: exit code 1",
"type": "srvreload",
"upid": "UPID:iaclab-01-01:000029DD:1599528B:6108F068:srvreload:networking:root@pam:",
"user": "root@pam",
"failed": True
},
{
"endtime": 1627975503,
"id": "networking",
"node": "iaclab-01-01",
"pid": 6778,
"pstart": 362341540,
"starttime": 1627975503,
"status": "OK",
"type": "srvreload",
"upid": "UPID:iaclab-01-01:00001A7A:1598E4A4:6108EF4F:srvreload:networking:root@pam:",
"user": "root@pam",
"failed": False
}
]
EXPECTED_SINGLE_TASK = [
{
"endtime": 1627975785,
"id": "networking",
"node": "iaclab-01-01",
"pid": 10717,
"pstart": 362369675,
"starttime": 1627975784,
"status": "command 'ifreload -a' failed: exit code 1",
"type": "srvreload",
"upid": "UPID:iaclab-01-01:000029DD:1599528B:6108F068:srvreload:networking:root@pam:",
"user": "root@pam",
"failed": True
},
]
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
def test_without_required_parameters(connect_mock, capfd, mocker):
with set_module_args({}):
with pytest.raises(SystemExit):
proxmox_tasks_info.main()
out, err = capfd.readouterr()
assert not err
assert json.loads(out)['failed']
def mock_api_tasks_response(mocker):
m = mocker.MagicMock()
g = mocker.MagicMock()
m.nodes = mocker.MagicMock(return_value=g)
g.tasks.get = mocker.MagicMock(return_value=TASKS)
return m
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
def test_get_tasks(connect_mock, capfd, mocker):
with set_module_args({
'api_host': 'proxmoxhost',
'api_user': 'root@pam',
'api_password': 'supersecret',
'node': NODE
}):
connect_mock.side_effect = lambda: mock_api_tasks_response(mocker)
proxmox_utils.HAS_PROXMOXER = True
with pytest.raises(SystemExit):
proxmox_tasks_info.main()
out, err = capfd.readouterr()
assert not err
assert len(json.loads(out)['proxmox_tasks']) != 0
assert not json.loads(out)['changed']
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
def test_get_single_task(connect_mock, capfd, mocker):
with set_module_args({
'api_host': 'proxmoxhost',
'api_user': 'root@pam',
'api_password': 'supersecret',
'node': NODE,
'task': TASK_UPID
}):
connect_mock.side_effect = lambda: mock_api_tasks_response(mocker)
proxmox_utils.HAS_PROXMOXER = True
with pytest.raises(SystemExit):
proxmox_tasks_info.main()
out, err = capfd.readouterr()
assert not err
assert len(json.loads(out)['proxmox_tasks']) == 1
assert json.loads(out)
assert not json.loads(out)['changed']
@patch('ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect')
def test_get_non_existent_task(connect_mock, capfd, mocker):
with set_module_args({
'api_host': 'proxmoxhost',
'api_user': 'root@pam',
'api_password': 'supersecret',
'node': NODE,
'task': 'UPID:nonexistent'
}):
connect_mock.side_effect = lambda: mock_api_tasks_response(mocker)
proxmox_utils.HAS_PROXMOXER = True
with pytest.raises(SystemExit):
proxmox_tasks_info.main()
out, err = capfd.readouterr()
assert not err
assert json.loads(out)['failed']
assert 'proxmox_tasks' not in json.loads(out)
assert not json.loads(out)['changed']
assert json.loads(
out)['msg'] == 'Task: UPID:nonexistent does not exist on node: node01.'

View file

@ -1,66 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2023, Sergei Antipov <greendayonfire at gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os
import sys
import pytest
proxmoxer = pytest.importorskip('proxmoxer')
mandatory_py_version = pytest.mark.skipif(
sys.version_info < (2, 7),
reason='The proxmoxer dependency requires python2.7 or higher'
)
from ansible_collections.community.general.plugins.modules import proxmox_template
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import patch, Mock
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import (
AnsibleFailJson,
ModuleTestCase,
set_module_args,
)
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
class TestProxmoxTemplateModule(ModuleTestCase):
def setUp(self):
super(TestProxmoxTemplateModule, self).setUp()
proxmox_utils.HAS_PROXMOXER = True
self.module = proxmox_template
self.connect_mock = patch(
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect"
)
self.connect_mock.start()
def tearDown(self):
self.connect_mock.stop()
super(TestProxmoxTemplateModule, self).tearDown()
@patch("os.stat")
@patch.multiple(os.path, exists=Mock(return_value=True), isfile=Mock(return_value=True))
def test_module_fail_when_toolbelt_not_installed_and_file_size_is_big(self, mock_stat):
self.module.HAS_REQUESTS_TOOLBELT = False
mock_stat.return_value.st_size = 268435460
with set_module_args(
{
"api_host": "host",
"api_user": "user",
"api_password": "password",
"node": "pve",
"src": "/tmp/mock.iso",
"content_type": "iso"
}
):
with pytest.raises(AnsibleFailJson) as exc_info:
self.module.main()
result = exc_info.value.args[0]
assert result["failed"] is True
assert result["msg"] == "'requests_toolbelt' module is required to upload files larger than 256MB"

View file

@ -1,714 +0,0 @@
# -*- coding: utf-8 -*-
#
# Copyright (c) 2023, Sergei Antipov <greendayonfire at gmail.com>
# GNU General Public License v3.0+ (see LICENSES/GPL-3.0-or-later.txt or https://www.gnu.org/licenses/gpl-3.0.txt)
# SPDX-License-Identifier: GPL-3.0-or-later
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import sys
import pytest
proxmoxer = pytest.importorskip("proxmoxer")
mandatory_py_version = pytest.mark.skipif(
sys.version_info < (2, 7),
reason="The proxmoxer dependency requires python2.7 or higher",
)
from ansible_collections.community.general.plugins.modules import proxmox_vm_info
from ansible_collections.community.internal_test_tools.tests.unit.compat.mock import patch
from ansible_collections.community.internal_test_tools.tests.unit.plugins.modules.utils import (
AnsibleExitJson,
AnsibleFailJson,
ModuleTestCase,
set_module_args,
)
import ansible_collections.community.general.plugins.module_utils.proxmox as proxmox_utils
NODE1 = "pve"
NODE2 = "pve2"
RAW_CLUSTER_OUTPUT = [
{
"cpu": 0.174069059487628,
"disk": 0,
"diskread": 6656,
"diskwrite": 0,
"id": "qemu/100",
"maxcpu": 1,
"maxdisk": 34359738368,
"maxmem": 4294967296,
"mem": 35304543,
"name": "pxe.home.arpa",
"netin": 416956,
"netout": 17330,
"node": NODE1,
"status": "running",
"template": 0,
"type": "qemu",
"uptime": 669,
"vmid": 100,
},
{
"cpu": 0,
"disk": 0,
"diskread": 0,
"diskwrite": 0,
"id": "qemu/101",
"maxcpu": 1,
"maxdisk": 0,
"maxmem": 536870912,
"mem": 0,
"name": "test1",
"netin": 0,
"netout": 0,
"node": NODE2,
"pool": "pool1",
"status": "stopped",
"template": 0,
"type": "qemu",
"uptime": 0,
"vmid": 101,
},
{
"cpu": 0,
"disk": 352190464,
"diskread": 0,
"diskwrite": 0,
"id": "lxc/102",
"maxcpu": 2,
"maxdisk": 10737418240,
"maxmem": 536870912,
"mem": 28192768,
"name": "test-lxc.home.arpa",
"netin": 102757,
"netout": 446,
"node": NODE1,
"status": "running",
"template": 0,
"type": "lxc",
"uptime": 161,
"vmid": 102,
},
{
"cpu": 0,
"disk": 0,
"diskread": 0,
"diskwrite": 0,
"id": "lxc/103",
"maxcpu": 2,
"maxdisk": 10737418240,
"maxmem": 536870912,
"mem": 0,
"name": "test1-lxc.home.arpa",
"netin": 0,
"netout": 0,
"node": NODE2,
"pool": "pool1",
"status": "stopped",
"template": 0,
"type": "lxc",
"uptime": 0,
"vmid": 103,
},
{
"cpu": 0,
"disk": 0,
"diskread": 0,
"diskwrite": 0,
"id": "lxc/104",
"maxcpu": 2,
"maxdisk": 10737418240,
"maxmem": 536870912,
"mem": 0,
"name": "test-lxc.home.arpa",
"netin": 0,
"netout": 0,
"node": NODE2,
"pool": "pool1",
"status": "stopped",
"template": 0,
"type": "lxc",
"uptime": 0,
"vmid": 104,
},
{
"cpu": 0,
"disk": 0,
"diskread": 0,
"diskwrite": 0,
"id": "lxc/105",
"maxcpu": 2,
"maxdisk": 10737418240,
"maxmem": 536870912,
"mem": 0,
"name": "",
"netin": 0,
"netout": 0,
"node": NODE2,
"pool": "pool1",
"status": "stopped",
"template": 0,
"type": "lxc",
"uptime": 0,
"vmid": 105,
},
]
RAW_LXC_OUTPUT = [
{
"cpu": 0,
"cpus": 2,
"disk": 0,
"diskread": 0,
"diskwrite": 0,
"maxdisk": 10737418240,
"maxmem": 536870912,
"maxswap": 536870912,
"mem": 0,
"name": "test1-lxc.home.arpa",
"netin": 0,
"netout": 0,
"status": "stopped",
"swap": 0,
"type": "lxc",
"uptime": 0,
"vmid": "103",
},
{
"cpu": 0,
"cpus": 2,
"disk": 352190464,
"diskread": 0,
"diskwrite": 0,
"maxdisk": 10737418240,
"maxmem": 536870912,
"maxswap": 536870912,
"mem": 28192768,
"name": "test-lxc.home.arpa",
"netin": 102757,
"netout": 446,
"pid": 4076752,
"status": "running",
"swap": 0,
"type": "lxc",
"uptime": 161,
"vmid": "102",
},
{
"cpu": 0,
"cpus": 2,
"disk": 0,
"diskread": 0,
"diskwrite": 0,
"maxdisk": 10737418240,
"maxmem": 536870912,
"maxswap": 536870912,
"mem": 0,
"name": "test-lxc.home.arpa",
"netin": 0,
"netout": 0,
"status": "stopped",
"swap": 0,
"type": "lxc",
"uptime": 0,
"vmid": "104",
},
{
"cpu": 0,
"cpus": 2,
"disk": 0,
"diskread": 0,
"diskwrite": 0,
"maxdisk": 10737418240,
"maxmem": 536870912,
"maxswap": 536870912,
"mem": 0,
"name": "",
"netin": 0,
"netout": 0,
"status": "stopped",
"swap": 0,
"type": "lxc",
"uptime": 0,
"vmid": "105",
},
]
RAW_QEMU_OUTPUT = [
{
"cpu": 0,
"cpus": 1,
"disk": 0,
"diskread": 0,
"diskwrite": 0,
"maxdisk": 0,
"maxmem": 536870912,
"mem": 0,
"name": "test1",
"netin": 0,
"netout": 0,
"status": "stopped",
"uptime": 0,
"vmid": 101,
},
{
"cpu": 0.174069059487628,
"cpus": 1,
"disk": 0,
"diskread": 6656,
"diskwrite": 0,
"maxdisk": 34359738368,
"maxmem": 4294967296,
"mem": 35304543,
"name": "pxe.home.arpa",
"netin": 416956,
"netout": 17330,
"pid": 4076688,
"status": "running",
"uptime": 669,
"vmid": 100,
},
]
EXPECTED_VMS_OUTPUT = [
{
"cpu": 0.174069059487628,
"cpus": 1,
"disk": 0,
"diskread": 6656,
"diskwrite": 0,
"id": "qemu/100",
"maxcpu": 1,
"maxdisk": 34359738368,
"maxmem": 4294967296,
"mem": 35304543,
"name": "pxe.home.arpa",
"netin": 416956,
"netout": 17330,
"node": NODE1,
"pid": 4076688,
"status": "running",
"template": False,
"type": "qemu",
"uptime": 669,
"vmid": 100,
},
{
"cpu": 0,
"cpus": 1,
"disk": 0,
"diskread": 0,
"diskwrite": 0,
"id": "qemu/101",
"maxcpu": 1,
"maxdisk": 0,
"maxmem": 536870912,
"mem": 0,
"name": "test1",
"netin": 0,
"netout": 0,
"node": NODE2,
"pool": "pool1",
"status": "stopped",
"template": False,
"type": "qemu",
"uptime": 0,
"vmid": 101,
},
{
"cpu": 0,
"cpus": 2,
"disk": 352190464,
"diskread": 0,
"diskwrite": 0,
"id": "lxc/102",
"maxcpu": 2,
"maxdisk": 10737418240,
"maxmem": 536870912,
"maxswap": 536870912,
"mem": 28192768,
"name": "test-lxc.home.arpa",
"netin": 102757,
"netout": 446,
"node": NODE1,
"pid": 4076752,
"status": "running",
"swap": 0,
"template": False,
"type": "lxc",
"uptime": 161,
"vmid": 102,
},
{
"cpu": 0,
"cpus": 2,
"disk": 0,
"diskread": 0,
"diskwrite": 0,
"id": "lxc/103",
"maxcpu": 2,
"maxdisk": 10737418240,
"maxmem": 536870912,
"maxswap": 536870912,
"mem": 0,
"name": "test1-lxc.home.arpa",
"netin": 0,
"netout": 0,
"node": NODE2,
"pool": "pool1",
"status": "stopped",
"swap": 0,
"template": False,
"type": "lxc",
"uptime": 0,
"vmid": 103,
},
{
"cpu": 0,
"cpus": 2,
"disk": 0,
"diskread": 0,
"diskwrite": 0,
"id": "lxc/104",
"maxcpu": 2,
"maxdisk": 10737418240,
"maxmem": 536870912,
"maxswap": 536870912,
"mem": 0,
"name": "test-lxc.home.arpa",
"netin": 0,
"netout": 0,
"node": NODE2,
"pool": "pool1",
"status": "stopped",
"swap": 0,
"template": False,
"type": "lxc",
"uptime": 0,
"vmid": 104,
},
{
"cpu": 0,
"cpus": 2,
"disk": 0,
"diskread": 0,
"diskwrite": 0,
"id": "lxc/105",
"maxcpu": 2,
"maxdisk": 10737418240,
"maxmem": 536870912,
"maxswap": 536870912,
"mem": 0,
"name": "",
"netin": 0,
"netout": 0,
"node": NODE2,
"pool": "pool1",
"status": "stopped",
"swap": 0,
"template": False,
"type": "lxc",
"uptime": 0,
"vmid": 105,
},
]
def get_module_args(type="all", node=None, vmid=None, name=None, config="none"):
return {
"api_host": "host",
"api_user": "user",
"api_password": "password",
"node": node,
"type": type,
"vmid": vmid,
"name": name,
"config": config,
}
class TestProxmoxVmInfoModule(ModuleTestCase):
def setUp(self):
super(TestProxmoxVmInfoModule, self).setUp()
proxmox_utils.HAS_PROXMOXER = True
self.module = proxmox_vm_info
self.connect_mock = patch(
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible._connect",
).start()
self.connect_mock.return_value.nodes.return_value.lxc.return_value.get.return_value = (
RAW_LXC_OUTPUT
)
self.connect_mock.return_value.nodes.return_value.qemu.return_value.get.return_value = (
RAW_QEMU_OUTPUT
)
self.connect_mock.return_value.cluster.return_value.resources.return_value.get.return_value = (
RAW_CLUSTER_OUTPUT
)
self.connect_mock.return_value.nodes.get.return_value = [{"node": NODE1}]
def tearDown(self):
self.connect_mock.stop()
super(TestProxmoxVmInfoModule, self).tearDown()
def test_module_fail_when_required_args_missing(self):
with pytest.raises(AnsibleFailJson) as exc_info:
with set_module_args({}):
self.module.main()
result = exc_info.value.args[0]
assert result["msg"] == "missing required arguments: api_host, api_user"
def test_get_lxc_vms_information(self):
with pytest.raises(AnsibleExitJson) as exc_info:
with set_module_args(get_module_args(type="lxc")):
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["type"] == "lxc"]
self.module.main()
result = exc_info.value.args[0]
assert result["changed"] is False
assert result["proxmox_vms"] == expected_output
def test_get_qemu_vms_information(self):
with pytest.raises(AnsibleExitJson) as exc_info:
with set_module_args(get_module_args(type="qemu")):
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["type"] == "qemu"]
self.module.main()
result = exc_info.value.args[0]
assert result["proxmox_vms"] == expected_output
def test_get_all_vms_information(self):
with pytest.raises(AnsibleExitJson) as exc_info:
with set_module_args(get_module_args()):
self.module.main()
result = exc_info.value.args[0]
assert result["proxmox_vms"] == EXPECTED_VMS_OUTPUT
def test_vmid_is_converted_to_int(self):
with pytest.raises(AnsibleExitJson) as exc_info:
with set_module_args(get_module_args(type="lxc")):
self.module.main()
result = exc_info.value.args[0]
assert isinstance(result["proxmox_vms"][0]["vmid"], int)
def test_get_specific_lxc_vm_information(self):
with pytest.raises(AnsibleExitJson) as exc_info:
vmid = 102
expected_output = [
vm
for vm in EXPECTED_VMS_OUTPUT
if vm["vmid"] == vmid and vm["type"] == "lxc"
]
with set_module_args(get_module_args(type="lxc", vmid=vmid)):
self.module.main()
result = exc_info.value.args[0]
assert result["proxmox_vms"] == expected_output
assert len(result["proxmox_vms"]) == 1
def test_get_specific_qemu_vm_information(self):
with pytest.raises(AnsibleExitJson) as exc_info:
vmid = 100
expected_output = [
vm
for vm in EXPECTED_VMS_OUTPUT
if vm["vmid"] == vmid and vm["type"] == "qemu"
]
with set_module_args(get_module_args(type="qemu", vmid=vmid)):
self.module.main()
result = exc_info.value.args[0]
assert result["proxmox_vms"] == expected_output
assert len(result["proxmox_vms"]) == 1
def test_get_specific_vm_information(self):
with pytest.raises(AnsibleExitJson) as exc_info:
vmid = 100
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["vmid"] == vmid]
with set_module_args(get_module_args(type="all", vmid=vmid)):
self.module.main()
result = exc_info.value.args[0]
assert result["proxmox_vms"] == expected_output
assert len(result["proxmox_vms"]) == 1
def test_get_specific_vm_information_by_using_name(self):
name = "test1-lxc.home.arpa"
self.connect_mock.return_value.cluster.resources.get.return_value = [
{"name": name, "vmid": "103"}
]
with pytest.raises(AnsibleExitJson) as exc_info:
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["name"] == name]
with set_module_args(get_module_args(type="all", name=name)):
self.module.main()
result = exc_info.value.args[0]
assert result["proxmox_vms"] == expected_output
assert len(result["proxmox_vms"]) == 1
def test_get_multiple_vms_with_the_same_name(self):
name = "test-lxc.home.arpa"
self.connect_mock.return_value.cluster.resources.get.return_value = [
{"name": name, "vmid": "102"},
{"name": name, "vmid": "104"},
]
with pytest.raises(AnsibleExitJson) as exc_info:
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["name"] == name]
with set_module_args(get_module_args(type="all", name=name)):
self.module.main()
result = exc_info.value.args[0]
assert result["proxmox_vms"] == expected_output
assert len(result["proxmox_vms"]) == 2
def test_get_vm_with_an_empty_name(self):
name = ""
self.connect_mock.return_value.cluster.resources.get.return_value = [
{"name": name, "vmid": "105"},
]
with pytest.raises(AnsibleExitJson) as exc_info:
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["name"] == name]
with set_module_args(get_module_args(type="all", name=name)):
self.module.main()
result = exc_info.value.args[0]
assert result["proxmox_vms"] == expected_output
assert len(result["proxmox_vms"]) == 1
def test_get_all_lxc_vms_from_specific_node(self):
with pytest.raises(AnsibleExitJson) as exc_info:
expected_output = [
vm
for vm in EXPECTED_VMS_OUTPUT
if vm["node"] == NODE1 and vm["type"] == "lxc"
]
with set_module_args(get_module_args(type="lxc", node=NODE1)):
self.module.main()
result = exc_info.value.args[0]
assert result["proxmox_vms"] == expected_output
assert len(result["proxmox_vms"]) == 1
def test_get_all_qemu_vms_from_specific_node(self):
with pytest.raises(AnsibleExitJson) as exc_info:
expected_output = [
vm
for vm in EXPECTED_VMS_OUTPUT
if vm["node"] == NODE1 and vm["type"] == "qemu"
]
with set_module_args(get_module_args(type="qemu", node=NODE1)):
self.module.main()
result = exc_info.value.args[0]
assert result["proxmox_vms"] == expected_output
assert len(result["proxmox_vms"]) == 1
def test_get_all_vms_from_specific_node(self):
with pytest.raises(AnsibleExitJson) as exc_info:
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["node"] == NODE1]
with set_module_args(get_module_args(node=NODE1)):
self.module.main()
result = exc_info.value.args[0]
assert result["proxmox_vms"] == expected_output
assert len(result["proxmox_vms"]) == 2
def test_module_returns_empty_list_when_vm_does_not_exist(self):
with pytest.raises(AnsibleExitJson) as exc_info:
vmid = 200
with set_module_args(get_module_args(type="all", vmid=vmid)):
self.module.main()
result = exc_info.value.args[0]
assert result["proxmox_vms"] == []
def test_module_fail_when_qemu_request_fails(self):
self.connect_mock.return_value.nodes.return_value.qemu.return_value.get.side_effect = IOError(
"Some mocked connection error."
)
with pytest.raises(AnsibleFailJson) as exc_info:
with set_module_args(get_module_args(type="qemu")):
self.module.main()
result = exc_info.value.args[0]
assert "Failed to retrieve QEMU VMs information:" in result["msg"]
def test_module_fail_when_lxc_request_fails(self):
self.connect_mock.return_value.nodes.return_value.lxc.return_value.get.side_effect = IOError(
"Some mocked connection error."
)
with pytest.raises(AnsibleFailJson) as exc_info:
with set_module_args(get_module_args(type="lxc")):
self.module.main()
result = exc_info.value.args[0]
assert "Failed to retrieve LXC VMs information:" in result["msg"]
def test_module_fail_when_cluster_resources_request_fails(self):
self.connect_mock.return_value.cluster.return_value.resources.return_value.get.side_effect = IOError(
"Some mocked connection error."
)
with pytest.raises(AnsibleFailJson) as exc_info:
with set_module_args(get_module_args()):
self.module.main()
result = exc_info.value.args[0]
assert (
"Failed to retrieve VMs information from cluster resources:"
in result["msg"]
)
def test_module_fail_when_node_does_not_exist(self):
with pytest.raises(AnsibleFailJson) as exc_info:
with set_module_args(get_module_args(type="all", node="NODE3")):
self.module.main()
result = exc_info.value.args[0]
assert result["msg"] == "Node NODE3 doesn't exist in PVE cluster"
def test_call_to_get_vmid_is_not_used_when_vmid_provided(self):
with patch(
"ansible_collections.community.general.plugins.module_utils.proxmox.ProxmoxAnsible.get_vmid"
) as get_vmid_mock:
with pytest.raises(AnsibleExitJson):
vmid = 100
with set_module_args(
get_module_args(type="all", vmid=vmid, name="something")
):
self.module.main()
assert get_vmid_mock.call_count == 0
def test_config_returned_when_specified_qemu_vm_with_config(self):
config_vm_value = {
'scsi0': 'local-lvm:vm-101-disk-0,iothread=1,size=32G',
'net0': 'virtio=4E:79:9F:A8:EE:E4,bridge=vmbr0,firewall=1',
'scsihw': 'virtio-scsi-single',
'cores': 1,
'name': 'test1',
'ostype': 'l26',
'boot': 'order=scsi0;ide2;net0',
'memory': 2048,
'sockets': 1,
}
(self.connect_mock.return_value.nodes.return_value.qemu.return_value.
config.return_value.get.return_value) = config_vm_value
with pytest.raises(AnsibleExitJson) as exc_info:
vmid = 101
with set_module_args(get_module_args(
type="qemu",
vmid=vmid,
config="current",
)):
expected_output = [vm for vm in EXPECTED_VMS_OUTPUT if vm["vmid"] == vmid]
expected_output[0]["config"] = config_vm_value
self.module.main()
result = exc_info.value.args[0]
assert result["proxmox_vms"] == expected_output