Move modules and module_utils unit tests to correct place (#81)

* Move modules and module_utils unit tests to correct place.

* Update ignore.txt

* Fix imports.

* Fix typos.

* Fix more typos.
This commit is contained in:
Felix Fontein 2020-03-31 10:42:38 +02:00 committed by GitHub
parent ab3c2120fb
commit be191cce6c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
1170 changed files with 732 additions and 751 deletions

View file

@ -0,0 +1,47 @@
import random
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.plugins.module_utils.cloud import _exponential_backoff, \
_full_jitter_backoff
class ExponentialBackoffStrategyTestCase(unittest.TestCase):
def test_no_retries(self):
strategy = _exponential_backoff(retries=0)
result = list(strategy())
self.assertEqual(result, [], 'list should be empty')
def test_exponential_backoff(self):
strategy = _exponential_backoff(retries=5, delay=1, backoff=2)
result = list(strategy())
self.assertEqual(result, [1, 2, 4, 8, 16])
def test_max_delay(self):
strategy = _exponential_backoff(retries=7, delay=1, backoff=2, max_delay=60)
result = list(strategy())
self.assertEqual(result, [1, 2, 4, 8, 16, 32, 60])
def test_max_delay_none(self):
strategy = _exponential_backoff(retries=7, delay=1, backoff=2, max_delay=None)
result = list(strategy())
self.assertEqual(result, [1, 2, 4, 8, 16, 32, 64])
class FullJitterBackoffStrategyTestCase(unittest.TestCase):
def test_no_retries(self):
strategy = _full_jitter_backoff(retries=0)
result = list(strategy())
self.assertEqual(result, [], 'list should be empty')
def test_full_jitter(self):
retries = 5
seed = 1
r = random.Random(seed)
expected = [r.randint(0, 2**i) for i in range(0, retries)]
strategy = _full_jitter_backoff(
retries=retries, delay=1, _random=random.Random(seed))
result = list(strategy())
self.assertEqual(result, expected)

View file

@ -0,0 +1,69 @@
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
import json
import sys
from io import BytesIO
import pytest
import ansible.module_utils.basic
from ansible.module_utils.six import PY3, string_types
from ansible.module_utils._text import to_bytes
from ansible.module_utils.common._collections_compat import MutableMapping
@pytest.fixture
def stdin(mocker, request):
old_args = ansible.module_utils.basic._ANSIBLE_ARGS
ansible.module_utils.basic._ANSIBLE_ARGS = None
old_argv = sys.argv
sys.argv = ['ansible_unittest']
if isinstance(request.param, string_types):
args = request.param
elif isinstance(request.param, MutableMapping):
if 'ANSIBLE_MODULE_ARGS' not in request.param:
request.param = {'ANSIBLE_MODULE_ARGS': request.param}
if '_ansible_remote_tmp' not in request.param['ANSIBLE_MODULE_ARGS']:
request.param['ANSIBLE_MODULE_ARGS']['_ansible_remote_tmp'] = '/tmp'
if '_ansible_keep_remote_files' not in request.param['ANSIBLE_MODULE_ARGS']:
request.param['ANSIBLE_MODULE_ARGS']['_ansible_keep_remote_files'] = False
args = json.dumps(request.param)
else:
raise Exception('Malformed data to the stdin pytest fixture')
fake_stdin = BytesIO(to_bytes(args, errors='surrogate_or_strict'))
if PY3:
mocker.patch('ansible.module_utils.basic.sys.stdin', mocker.MagicMock())
mocker.patch('ansible.module_utils.basic.sys.stdin.buffer', fake_stdin)
else:
mocker.patch('ansible.module_utils.basic.sys.stdin', fake_stdin)
yield fake_stdin
ansible.module_utils.basic._ANSIBLE_ARGS = old_args
sys.argv = old_argv
@pytest.fixture
def am(stdin, request):
old_args = ansible.module_utils.basic._ANSIBLE_ARGS
ansible.module_utils.basic._ANSIBLE_ARGS = None
old_argv = sys.argv
sys.argv = ['ansible_unittest']
argspec = {}
if hasattr(request, 'param'):
if isinstance(request.param, dict):
argspec = request.param
am = ansible.module_utils.basic.AnsibleModule(
argument_spec=argspec,
)
am._name = 'ansible_unittest'
yield am
ansible.module_utils.basic._ANSIBLE_ARGS = old_args
sys.argv = old_argv

View file

@ -0,0 +1,518 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
from ansible_collections.community.general.plugins.module_utils.docker.common import (
compare_dict_allow_more_present,
compare_generic,
convert_duration_to_nanosecond,
parse_healthcheck
)
DICT_ALLOW_MORE_PRESENT = (
{
'av': {},
'bv': {'a': 1},
'result': True
},
{
'av': {'a': 1},
'bv': {'a': 1, 'b': 2},
'result': True
},
{
'av': {'a': 1},
'bv': {'b': 2},
'result': False
},
{
'av': {'a': 1},
'bv': {'a': None, 'b': 1},
'result': False
},
{
'av': {'a': None},
'bv': {'b': 1},
'result': False
},
)
COMPARE_GENERIC = [
########################################################################################
# value
{
'a': 1,
'b': 2,
'method': 'strict',
'type': 'value',
'result': False
},
{
'a': 'hello',
'b': 'hello',
'method': 'strict',
'type': 'value',
'result': True
},
{
'a': None,
'b': 'hello',
'method': 'strict',
'type': 'value',
'result': False
},
{
'a': None,
'b': None,
'method': 'strict',
'type': 'value',
'result': True
},
{
'a': 1,
'b': 2,
'method': 'ignore',
'type': 'value',
'result': True
},
{
'a': None,
'b': 2,
'method': 'ignore',
'type': 'value',
'result': True
},
########################################################################################
# list
{
'a': [
'x',
],
'b': [
'y',
],
'method': 'strict',
'type': 'list',
'result': False
},
{
'a': [
'x',
],
'b': [
'x',
'x',
],
'method': 'strict',
'type': 'list',
'result': False
},
{
'a': [
'x',
'y',
],
'b': [
'x',
'y',
],
'method': 'strict',
'type': 'list',
'result': True
},
{
'a': [
'x',
'y',
],
'b': [
'y',
'x',
],
'method': 'strict',
'type': 'list',
'result': False
},
{
'a': [
'x',
'y',
],
'b': [
'x',
],
'method': 'allow_more_present',
'type': 'list',
'result': False
},
{
'a': [
'x',
],
'b': [
'x',
'y',
],
'method': 'allow_more_present',
'type': 'list',
'result': True
},
{
'a': [
'x',
'x',
'y',
],
'b': [
'x',
'y',
],
'method': 'allow_more_present',
'type': 'list',
'result': False
},
{
'a': [
'x',
'z',
],
'b': [
'x',
'y',
'x',
'z',
],
'method': 'allow_more_present',
'type': 'list',
'result': True
},
{
'a': [
'x',
'y',
],
'b': [
'y',
'x',
],
'method': 'ignore',
'type': 'list',
'result': True
},
########################################################################################
# set
{
'a': [
'x',
],
'b': [
'y',
],
'method': 'strict',
'type': 'set',
'result': False
},
{
'a': [
'x',
],
'b': [
'x',
'x',
],
'method': 'strict',
'type': 'set',
'result': True
},
{
'a': [
'x',
'y',
],
'b': [
'x',
'y',
],
'method': 'strict',
'type': 'set',
'result': True
},
{
'a': [
'x',
'y',
],
'b': [
'y',
'x',
],
'method': 'strict',
'type': 'set',
'result': True
},
{
'a': [
'x',
'y',
],
'b': [
'x',
],
'method': 'allow_more_present',
'type': 'set',
'result': False
},
{
'a': [
'x',
],
'b': [
'x',
'y',
],
'method': 'allow_more_present',
'type': 'set',
'result': True
},
{
'a': [
'x',
'x',
'y',
],
'b': [
'x',
'y',
],
'method': 'allow_more_present',
'type': 'set',
'result': True
},
{
'a': [
'x',
'z',
],
'b': [
'x',
'y',
'x',
'z',
],
'method': 'allow_more_present',
'type': 'set',
'result': True
},
{
'a': [
'x',
'a',
],
'b': [
'y',
'z',
],
'method': 'ignore',
'type': 'set',
'result': True
},
########################################################################################
# set(dict)
{
'a': [
{'x': 1},
],
'b': [
{'y': 1},
],
'method': 'strict',
'type': 'set(dict)',
'result': False
},
{
'a': [
{'x': 1},
],
'b': [
{'x': 1},
],
'method': 'strict',
'type': 'set(dict)',
'result': True
},
{
'a': [
{'x': 1},
],
'b': [
{'x': 1, 'y': 2},
],
'method': 'strict',
'type': 'set(dict)',
'result': True
},
{
'a': [
{'x': 1},
{'x': 2, 'y': 3},
],
'b': [
{'x': 1},
{'x': 2, 'y': 3},
],
'method': 'strict',
'type': 'set(dict)',
'result': True
},
{
'a': [
{'x': 1},
],
'b': [
{'x': 1, 'z': 2},
{'x': 2, 'y': 3},
],
'method': 'allow_more_present',
'type': 'set(dict)',
'result': True
},
{
'a': [
{'x': 1, 'y': 2},
],
'b': [
{'x': 1},
{'x': 2, 'y': 3},
],
'method': 'allow_more_present',
'type': 'set(dict)',
'result': False
},
{
'a': [
{'x': 1, 'y': 3},
],
'b': [
{'x': 1},
{'x': 1, 'y': 3, 'z': 4},
],
'method': 'allow_more_present',
'type': 'set(dict)',
'result': True
},
{
'a': [
{'x': 1},
{'x': 2, 'y': 3},
],
'b': [
{'x': 1},
],
'method': 'ignore',
'type': 'set(dict)',
'result': True
},
########################################################################################
# dict
{
'a': {'x': 1},
'b': {'y': 1},
'method': 'strict',
'type': 'dict',
'result': False
},
{
'a': {'x': 1},
'b': {'x': 1, 'y': 2},
'method': 'strict',
'type': 'dict',
'result': False
},
{
'a': {'x': 1},
'b': {'x': 1},
'method': 'strict',
'type': 'dict',
'result': True
},
{
'a': {'x': 1, 'z': 2},
'b': {'x': 1, 'y': 2},
'method': 'strict',
'type': 'dict',
'result': False
},
{
'a': {'x': 1, 'z': 2},
'b': {'x': 1, 'y': 2},
'method': 'ignore',
'type': 'dict',
'result': True
},
] + [{
'a': entry['av'],
'b': entry['bv'],
'method': 'allow_more_present',
'type': 'dict',
'result': entry['result']
} for entry in DICT_ALLOW_MORE_PRESENT]
@pytest.mark.parametrize("entry", DICT_ALLOW_MORE_PRESENT)
def test_dict_allow_more_present(entry):
assert compare_dict_allow_more_present(entry['av'], entry['bv']) == entry['result']
@pytest.mark.parametrize("entry", COMPARE_GENERIC)
def test_compare_generic(entry):
assert compare_generic(entry['a'], entry['b'], entry['method'], entry['type']) == entry['result']
def test_convert_duration_to_nanosecond():
nanoseconds = convert_duration_to_nanosecond('5s')
assert nanoseconds == 5000000000
nanoseconds = convert_duration_to_nanosecond('1m5s')
assert nanoseconds == 65000000000
with pytest.raises(ValueError):
convert_duration_to_nanosecond([1, 2, 3])
with pytest.raises(ValueError):
convert_duration_to_nanosecond('10x')
def test_parse_healthcheck():
result, disabled = parse_healthcheck({
'test': 'sleep 1',
'interval': '1s',
})
assert disabled is False
assert result == {
'test': ['CMD-SHELL', 'sleep 1'],
'interval': 1000000000
}
result, disabled = parse_healthcheck({
'test': ['NONE'],
})
assert result is None
assert disabled
result, disabled = parse_healthcheck({
'test': 'sleep 1',
'interval': '1s423ms'
})
assert result == {
'test': ['CMD-SHELL', 'sleep 1'],
'interval': 1423000000
}
assert disabled is False
result, disabled = parse_healthcheck({
'test': 'sleep 1',
'interval': '1h1m2s3ms4us'
})
assert result == {
'test': ['CMD-SHELL', 'sleep 1'],
'interval': 3662003004000
}
assert disabled is False

View file

@ -0,0 +1,171 @@
# -*- coding: utf-8 -*-
# (c) 2016, Tom Melendez (@supertom) <tom@supertom.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import pytest
from ansible_collections.community.general.tests.unit.compat import mock, unittest
from ansible_collections.community.general.plugins.module_utils.gcp import (_get_gcp_ansible_credentials, _get_gcp_credentials, _get_gcp_environ_var,
_get_gcp_environment_credentials,
_validate_credentials_file)
# Fake data/function used for testing
fake_env_data = {'GCE_EMAIL': 'gce-email'}
def fake_get_gcp_environ_var(var_name, default_value):
if var_name not in fake_env_data:
return default_value
else:
return fake_env_data[var_name]
# Fake AnsibleModule for use in tests
class FakeModule(object):
class Params():
data = {}
def get(self, key, alt=None):
if key in self.data:
return self.data[key]
else:
return alt
def __init__(self, data=None):
data = {} if data is None else data
self.params = FakeModule.Params()
self.params.data = data
def fail_json(self, **kwargs):
raise ValueError("fail_json")
def deprecate(self, **kwargs):
return None
class GCPAuthTestCase(unittest.TestCase):
"""Tests to verify different Auth mechanisms."""
def setup_method(self, method):
global fake_env_data
fake_env_data = {'GCE_EMAIL': 'gce-email'}
def test_get_gcp_ansible_credentials(self):
input_data = {'service_account_email': 'mysa',
'credentials_file': 'path-to-file.json',
'project_id': 'my-cool-project'}
module = FakeModule(input_data)
actual = _get_gcp_ansible_credentials(module)
expected = tuple(input_data.values())
self.assertEqual(sorted(expected), sorted(actual))
def test_get_gcp_environ_var(self):
# Chose not to mock this so we could really verify that it
# works as expected.
existing_var_name = 'gcp_ansible_auth_test_54321'
non_existing_var_name = 'doesnt_exist_gcp_ansible_auth_test_12345'
os.environ[existing_var_name] = 'foobar'
self.assertEqual('foobar', _get_gcp_environ_var(
existing_var_name, None))
del os.environ[existing_var_name]
self.assertEqual('default_value', _get_gcp_environ_var(
non_existing_var_name, 'default_value'))
def test_validate_credentials_file(self):
# TODO(supertom): Only dealing with p12 here, check the other states
# of this function
module = FakeModule()
with mock.patch('ansible_collections.community.general.plugins.module_utils.gcp.open',
mock.mock_open(read_data='foobar'), create=True):
# pem condition, warning is suppressed with the return_value
credentials_file = '/foopath/pem.pem'
with self.assertRaises(ValueError):
_validate_credentials_file(module,
credentials_file=credentials_file,
require_valid_json=False,
check_libcloud=False)
@mock.patch('ansible_collections.community.general.plugins.module_utils.gcp._get_gcp_environ_var',
side_effect=fake_get_gcp_environ_var)
def test_get_gcp_environment_credentials(self, mockobj):
global fake_env_data
actual = _get_gcp_environment_credentials(None, None, None)
expected = tuple(['gce-email', None, None])
self.assertEqual(expected, actual)
fake_env_data = {'GCE_PEM_FILE_PATH': '/path/to/pem.pem'}
expected = tuple([None, '/path/to/pem.pem', None])
actual = _get_gcp_environment_credentials(None, None, None)
self.assertEqual(expected, actual)
# pem and creds are set, expect creds
fake_env_data = {'GCE_PEM_FILE_PATH': '/path/to/pem.pem',
'GCE_CREDENTIALS_FILE_PATH': '/path/to/creds.json'}
expected = tuple([None, '/path/to/creds.json', None])
actual = _get_gcp_environment_credentials(None, None, None)
self.assertEqual(expected, actual)
# expect GOOGLE_APPLICATION_CREDENTIALS over PEM
fake_env_data = {'GCE_PEM_FILE_PATH': '/path/to/pem.pem',
'GOOGLE_APPLICATION_CREDENTIALS': '/path/to/appcreds.json'}
expected = tuple([None, '/path/to/appcreds.json', None])
actual = _get_gcp_environment_credentials(None, None, None)
self.assertEqual(expected, actual)
# project tests
fake_env_data = {'GCE_PROJECT': 'my-project'}
expected = tuple([None, None, 'my-project'])
actual = _get_gcp_environment_credentials(None, None, None)
self.assertEqual(expected, actual)
fake_env_data = {'GOOGLE_CLOUD_PROJECT': 'my-cloud-project'}
expected = tuple([None, None, 'my-cloud-project'])
actual = _get_gcp_environment_credentials(None, None, None)
self.assertEqual(expected, actual)
# data passed in, picking up project id only
fake_env_data = {'GOOGLE_CLOUD_PROJECT': 'my-project'}
expected = tuple(['my-sa-email', '/path/to/creds.json', 'my-project'])
actual = _get_gcp_environment_credentials(
'my-sa-email', '/path/to/creds.json', None)
self.assertEqual(expected, actual)
@mock.patch('ansible_collections.community.general.plugins.module_utils.gcp._get_gcp_environ_var',
side_effect=fake_get_gcp_environ_var)
def test_get_gcp_credentials(self, mockobj):
global fake_env_data
fake_env_data = {}
module = FakeModule()
module.params.data = {}
# Nothing is set, calls fail_json
with pytest.raises(ValueError):
_get_gcp_credentials(module)
# project_id (only) is set from Ansible params.
module.params.data['project_id'] = 'my-project'
actual = _get_gcp_credentials(
module, require_valid_json=True, check_libcloud=False)
expected = {'service_account_email': '',
'project_id': 'my-project',
'credentials_file': ''}
self.assertEqual(expected, actual)

View file

@ -0,0 +1,245 @@
# -*- coding: utf-8 -*-
# (c) 2016, Tom Melendez <tom@supertom.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.google.cloud.plugins.module_utils.gcp_utils import (GcpRequest,
navigate_hash,
remove_nones_from_dict,
replace_resource_dict)
class ReplaceResourceDictTestCase(unittest.TestCase):
def test_given_dict(self):
value = {
'selfLink': 'value'
}
self.assertEqual(replace_resource_dict(value, 'selfLink'), value['selfLink'])
def test_given_array(self):
value = {
'selfLink': 'value'
}
self.assertEqual(replace_resource_dict([value] * 3, 'selfLink'), [value['selfLink']] * 3)
class NavigateHashTestCase(unittest.TestCase):
def test_one_level(self):
value = {
'key': 'value'
}
self.assertEqual(navigate_hash(value, ['key']), value['key'])
def test_multilevel(self):
value = {
'key': {
'key2': 'value'
}
}
self.assertEqual(navigate_hash(value, ['key', 'key2']), value['key']['key2'])
def test_default(self):
value = {
'key': 'value'
}
default = 'not found'
self.assertEqual(navigate_hash(value, ['key', 'key2'], default), default)
class RemoveNonesFromDictTestCase(unittest.TestCase):
def test_remove_nones(self):
value = {
'key': None,
'good': 'value'
}
value_correct = {
'good': 'value'
}
self.assertEqual(remove_nones_from_dict(value), value_correct)
def test_remove_empty_arrays(self):
value = {
'key': [],
'good': 'value'
}
value_correct = {
'good': 'value'
}
self.assertEqual(remove_nones_from_dict(value), value_correct)
def test_remove_empty_dicts(self):
value = {
'key': {},
'good': 'value'
}
value_correct = {
'good': 'value'
}
self.assertEqual(remove_nones_from_dict(value), value_correct)
class GCPRequestDifferenceTestCase(unittest.TestCase):
def test_simple_no_difference(self):
value1 = {
'foo': 'bar',
'test': 'original'
}
request = GcpRequest(value1)
self.assertEqual(request, request)
def test_simple_different(self):
value1 = {
'foo': 'bar',
'test': 'original'
}
value2 = {
'foo': 'bar',
'test': 'different'
}
difference = {
'test': 'original'
}
request1 = GcpRequest(value1)
request2 = GcpRequest(value2)
self.assertNotEquals(request1, request2)
self.assertEqual(request1.difference(request2), difference)
def test_nested_dictionaries_no_difference(self):
value1 = {
'foo': {
'quiet': {
'tree': 'test'
},
'bar': 'baz'
},
'test': 'original'
}
request = GcpRequest(value1)
self.assertEqual(request, request)
def test_nested_dictionaries_with_difference(self):
value1 = {
'foo': {
'quiet': {
'tree': 'test'
},
'bar': 'baz'
},
'test': 'original'
}
value2 = {
'foo': {
'quiet': {
'tree': 'baz'
},
'bar': 'hello'
},
'test': 'original'
}
difference = {
'foo': {
'quiet': {
'tree': 'test'
},
'bar': 'baz'
}
}
request1 = GcpRequest(value1)
request2 = GcpRequest(value2)
self.assertNotEquals(request1, request2)
self.assertEqual(request1.difference(request2), difference)
def test_arrays_strings_no_difference(self):
value1 = {
'foo': [
'baz',
'bar'
]
}
request = GcpRequest(value1)
self.assertEqual(request, request)
def test_arrays_strings_with_difference(self):
value1 = {
'foo': [
'baz',
'bar',
]
}
value2 = {
'foo': [
'baz',
'hello'
]
}
difference = {
'foo': [
'bar',
]
}
request1 = GcpRequest(value1)
request2 = GcpRequest(value2)
self.assertNotEquals(request1, request2)
self.assertEqual(request1.difference(request2), difference)
def test_arrays_dicts_with_no_difference(self):
value1 = {
'foo': [
{
'test': 'value',
'foo': 'bar'
},
{
'different': 'dict'
}
]
}
request = GcpRequest(value1)
self.assertEqual(request, request)
def test_arrays_dicts_with_difference(self):
value1 = {
'foo': [
{
'test': 'value',
'foo': 'bar'
},
{
'different': 'dict'
}
]
}
value2 = {
'foo': [
{
'test': 'value2',
'foo': 'bar2'
},
]
}
difference = {
'foo': [
{
'test': 'value',
'foo': 'bar'
}
]
}
request1 = GcpRequest(value1)
request2 = GcpRequest(value2)
self.assertNotEquals(request1, request2)
self.assertEqual(request1.difference(request2), difference)

View file

@ -0,0 +1,371 @@
# -*- coding: utf-8 -*-
# (c) 2016, Tom Melendez <tom@supertom.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible_collections.community.general.tests.unit.compat import mock, unittest
from ansible_collections.community.general.plugins.module_utils.gcp import check_min_pkg_version, GCPUtils, GCPInvalidURLError
def build_distribution(version):
obj = mock.MagicMock()
obj.version = '0.5.0'
return obj
class GCPUtilsTestCase(unittest.TestCase):
params_dict = {
'url_map_name': 'foo_url_map_name',
'description': 'foo_url_map description',
'host_rules': [
{
'description': 'host rules description',
'hosts': [
'www.example.com',
'www2.example.com'
],
'path_matcher': 'host_rules_path_matcher'
}
],
'path_matchers': [
{
'name': 'path_matcher_one',
'description': 'path matcher one',
'defaultService': 'bes-pathmatcher-one-default',
'pathRules': [
{
'service': 'my-one-bes',
'paths': [
'/',
'/aboutus'
]
}
]
},
{
'name': 'path_matcher_two',
'description': 'path matcher two',
'defaultService': 'bes-pathmatcher-two-default',
'pathRules': [
{
'service': 'my-two-bes',
'paths': [
'/webapp',
'/graphs'
]
}
]
}
]
}
@mock.patch("pkg_resources.get_distribution", side_effect=build_distribution)
def test_check_minimum_pkg_version(self, mockobj):
self.assertTrue(check_min_pkg_version('foobar', '0.4.0'))
self.assertTrue(check_min_pkg_version('foobar', '0.5.0'))
self.assertFalse(check_min_pkg_version('foobar', '0.6.0'))
def test_parse_gcp_url(self):
# region, resource, entity, method
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/regions/us-east1/instanceGroupManagers/my-mig/recreateInstances'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEqual('compute', actual['service'])
self.assertEqual('v1', actual['api_version'])
self.assertEqual('myproject', actual['project'])
self.assertEqual('us-east1', actual['region'])
self.assertEqual('instanceGroupManagers', actual['resource_name'])
self.assertEqual('my-mig', actual['entity_name'])
self.assertEqual('recreateInstances', actual['method_name'])
# zone, resource, entity, method
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/zones/us-east1-c/instanceGroupManagers/my-mig/recreateInstances'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEqual('compute', actual['service'])
self.assertEqual('v1', actual['api_version'])
self.assertEqual('myproject', actual['project'])
self.assertEqual('us-east1-c', actual['zone'])
self.assertEqual('instanceGroupManagers', actual['resource_name'])
self.assertEqual('my-mig', actual['entity_name'])
self.assertEqual('recreateInstances', actual['method_name'])
# global, resource
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/global/urlMaps'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEqual('compute', actual['service'])
self.assertEqual('v1', actual['api_version'])
self.assertEqual('myproject', actual['project'])
self.assertTrue('global' in actual)
self.assertTrue(actual['global'])
self.assertEqual('urlMaps', actual['resource_name'])
# global, resource, entity
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/global/urlMaps/my-url-map'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEqual('myproject', actual['project'])
self.assertTrue('global' in actual)
self.assertTrue(actual['global'])
self.assertEqual('v1', actual['api_version'])
self.assertEqual('compute', actual['service'])
# global URL, resource, entity, method_name
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/mybackendservice/getHealth'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEqual('compute', actual['service'])
self.assertEqual('v1', actual['api_version'])
self.assertEqual('myproject', actual['project'])
self.assertTrue('global' in actual)
self.assertTrue(actual['global'])
self.assertEqual('backendServices', actual['resource_name'])
self.assertEqual('mybackendservice', actual['entity_name'])
self.assertEqual('getHealth', actual['method_name'])
# no location in URL
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/targetHttpProxies/mytargetproxy/setUrlMap'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEqual('compute', actual['service'])
self.assertEqual('v1', actual['api_version'])
self.assertEqual('myproject', actual['project'])
self.assertFalse('global' in actual)
self.assertEqual('targetHttpProxies', actual['resource_name'])
self.assertEqual('mytargetproxy', actual['entity_name'])
self.assertEqual('setUrlMap', actual['method_name'])
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/targetHttpProxies/mytargetproxy'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEqual('compute', actual['service'])
self.assertEqual('v1', actual['api_version'])
self.assertEqual('myproject', actual['project'])
self.assertFalse('global' in actual)
self.assertEqual('targetHttpProxies', actual['resource_name'])
self.assertEqual('mytargetproxy', actual['entity_name'])
input_url = 'https://www.googleapis.com/compute/v1/projects/myproject/targetHttpProxies'
actual = GCPUtils.parse_gcp_url(input_url)
self.assertEqual('compute', actual['service'])
self.assertEqual('v1', actual['api_version'])
self.assertEqual('myproject', actual['project'])
self.assertFalse('global' in actual)
self.assertEqual('targetHttpProxies', actual['resource_name'])
# test exceptions
no_projects_input_url = 'https://www.googleapis.com/compute/v1/not-projects/myproject/global/backendServices/mybackendservice/getHealth'
no_resource_input_url = 'https://www.googleapis.com/compute/v1/not-projects/myproject/global'
no_resource_no_loc_input_url = 'https://www.googleapis.com/compute/v1/not-projects/myproject'
with self.assertRaises(GCPInvalidURLError) as cm:
GCPUtils.parse_gcp_url(no_projects_input_url)
self.assertTrue(cm.exception, GCPInvalidURLError)
with self.assertRaises(GCPInvalidURLError) as cm:
GCPUtils.parse_gcp_url(no_resource_input_url)
self.assertTrue(cm.exception, GCPInvalidURLError)
with self.assertRaises(GCPInvalidURLError) as cm:
GCPUtils.parse_gcp_url(no_resource_no_loc_input_url)
self.assertTrue(cm.exception, GCPInvalidURLError)
def test_params_to_gcp_dict(self):
expected = {
'description': 'foo_url_map description',
'hostRules': [
{
'description': 'host rules description',
'hosts': [
'www.example.com',
'www2.example.com'
],
'pathMatcher': 'host_rules_path_matcher'
}
],
'name': 'foo_url_map_name',
'pathMatchers': [
{
'defaultService': 'bes-pathmatcher-one-default',
'description': 'path matcher one',
'name': 'path_matcher_one',
'pathRules': [
{
'paths': [
'/',
'/aboutus'
],
'service': 'my-one-bes'
}
]
},
{
'defaultService': 'bes-pathmatcher-two-default',
'description': 'path matcher two',
'name': 'path_matcher_two',
'pathRules': [
{
'paths': [
'/webapp',
'/graphs'
],
'service': 'my-two-bes'
}
]
}
]
}
actual = GCPUtils.params_to_gcp_dict(self.params_dict, 'url_map_name')
self.assertEqual(expected, actual)
def test_get_gcp_resource_from_methodId(self):
input_data = 'compute.urlMaps.list'
actual = GCPUtils.get_gcp_resource_from_methodId(input_data)
self.assertEqual('urlMaps', actual)
input_data = None
actual = GCPUtils.get_gcp_resource_from_methodId(input_data)
self.assertFalse(actual)
input_data = 666
actual = GCPUtils.get_gcp_resource_from_methodId(input_data)
self.assertFalse(actual)
def test_get_entity_name_from_resource_name(self):
input_data = 'urlMaps'
actual = GCPUtils.get_entity_name_from_resource_name(input_data)
self.assertEqual('urlMap', actual)
input_data = 'targetHttpProxies'
actual = GCPUtils.get_entity_name_from_resource_name(input_data)
self.assertEqual('targetHttpProxy', actual)
input_data = 'globalForwardingRules'
actual = GCPUtils.get_entity_name_from_resource_name(input_data)
self.assertEqual('forwardingRule', actual)
input_data = ''
actual = GCPUtils.get_entity_name_from_resource_name(input_data)
self.assertEqual(None, actual)
input_data = 666
actual = GCPUtils.get_entity_name_from_resource_name(input_data)
self.assertEqual(None, actual)
def test_are_params_equal(self):
params1 = {'one': 1}
params2 = {'one': 1}
actual = GCPUtils.are_params_equal(params1, params2)
self.assertTrue(actual)
params1 = {'one': 1}
params2 = {'two': 2}
actual = GCPUtils.are_params_equal(params1, params2)
self.assertFalse(actual)
params1 = {'three': 3, 'two': 2, 'one': 1}
params2 = {'one': 1, 'two': 2, 'three': 3}
actual = GCPUtils.are_params_equal(params1, params2)
self.assertTrue(actual)
params1 = {
"creationTimestamp": "2017-04-21T11:19:20.718-07:00",
"defaultService": "https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/default-backend-service",
"description": "",
"fingerprint": "ickr_pwlZPU=",
"hostRules": [
{
"description": "",
"hosts": [
"*."
],
"pathMatcher": "path-matcher-one"
}
],
"id": "8566395781175047111",
"kind": "compute#urlMap",
"name": "newtesturlmap-foo",
"pathMatchers": [
{
"defaultService": "https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/bes-pathmatcher-one-default",
"description": "path matcher one",
"name": "path-matcher-one",
"pathRules": [
{
"paths": [
"/data",
"/aboutus"
],
"service": "https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/my-one-bes"
}
]
}
],
"selfLink": "https://www.googleapis.com/compute/v1/projects/myproject/global/urlMaps/newtesturlmap-foo"
}
params2 = {
"defaultService": "https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/default-backend-service",
"hostRules": [
{
"description": "",
"hosts": [
"*."
],
"pathMatcher": "path-matcher-one"
}
],
"name": "newtesturlmap-foo",
"pathMatchers": [
{
"defaultService": "https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/bes-pathmatcher-one-default",
"description": "path matcher one",
"name": "path-matcher-one",
"pathRules": [
{
"paths": [
"/data",
"/aboutus"
],
"service": "https://www.googleapis.com/compute/v1/projects/myproject/global/backendServices/my-one-bes"
}
]
}
],
}
# params1 has exclude fields, params2 doesn't. Should be equal
actual = GCPUtils.are_params_equal(params1, params2)
self.assertTrue(actual)
def test_filter_gcp_fields(self):
input_data = {
u'kind': u'compute#httpsHealthCheck',
u'description': u'',
u'timeoutSec': 5,
u'checkIntervalSec': 5,
u'port': 443,
u'healthyThreshold': 2,
u'host': u'',
u'requestPath': u'/',
u'unhealthyThreshold': 2,
u'creationTimestamp': u'2017-05-16T15:09:36.546-07:00',
u'id': u'8727093129334146639',
u'selfLink': u'https://www.googleapis.com/compute/v1/projects/myproject/global/httpsHealthChecks/myhealthcheck',
u'name': u'myhealthcheck'}
expected = {
'name': 'myhealthcheck',
'checkIntervalSec': 5,
'port': 443,
'unhealthyThreshold': 2,
'healthyThreshold': 2,
'host': '',
'timeoutSec': 5,
'requestPath': '/'}
actual = GCPUtils.filter_gcp_fields(input_data)
self.assertEqual(expected, actual)

View file

@ -0,0 +1,176 @@
# -*- coding: utf-8 -*-
# 2018.07.26 --- use DictComparison instead of GcpRequest
#
# (c) 2016, Tom Melendez <tom@supertom.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.plugins.module_utils.hwc_utils import are_different_dicts
class HwcDictComparisonTestCase(unittest.TestCase):
def test_simple_no_difference(self):
value1 = {
'foo': 'bar',
'test': 'original'
}
self.assertFalse(are_different_dicts(value1, value1))
def test_simple_different(self):
value1 = {
'foo': 'bar',
'test': 'original'
}
value2 = {
'foo': 'bar',
'test': 'different'
}
value3 = {
'test': 'original'
}
self.assertTrue(are_different_dicts(value1, value2))
self.assertTrue(are_different_dicts(value1, value3))
self.assertTrue(are_different_dicts(value2, value3))
def test_nested_dictionaries_no_difference(self):
value1 = {
'foo': {
'quiet': {
'tree': 'test'
},
'bar': 'baz'
},
'test': 'original'
}
self.assertFalse(are_different_dicts(value1, value1))
def test_nested_dictionaries_with_difference(self):
value1 = {
'foo': {
'quiet': {
'tree': 'test'
},
'bar': 'baz'
},
'test': 'original'
}
value2 = {
'foo': {
'quiet': {
'tree': 'baz'
},
'bar': 'hello'
},
'test': 'original'
}
value3 = {
'foo': {
'quiet': {
'tree': 'test'
},
'bar': 'baz'
}
}
self.assertTrue(are_different_dicts(value1, value2))
self.assertTrue(are_different_dicts(value1, value3))
self.assertTrue(are_different_dicts(value2, value3))
def test_arrays_strings_no_difference(self):
value1 = {
'foo': [
'baz',
'bar'
]
}
self.assertFalse(are_different_dicts(value1, value1))
def test_arrays_strings_with_difference(self):
value1 = {
'foo': [
'baz',
'bar',
]
}
value2 = {
'foo': [
'baz',
'hello'
]
}
value3 = {
'foo': [
'bar',
]
}
self.assertTrue(are_different_dicts(value1, value2))
self.assertTrue(are_different_dicts(value1, value3))
self.assertTrue(are_different_dicts(value2, value3))
def test_arrays_dicts_with_no_difference(self):
value1 = {
'foo': [
{
'test': 'value',
'foo': 'bar'
},
{
'different': 'dict'
}
]
}
self.assertFalse(are_different_dicts(value1, value1))
def test_arrays_dicts_with_difference(self):
value1 = {
'foo': [
{
'test': 'value',
'foo': 'bar'
},
{
'different': 'dict'
}
]
}
value2 = {
'foo': [
{
'test': 'value2',
'foo': 'bar2'
},
]
}
value3 = {
'foo': [
{
'test': 'value',
'foo': 'bar'
}
]
}
self.assertTrue(are_different_dicts(value1, value2))
self.assertTrue(are_different_dicts(value1, value3))
self.assertTrue(are_different_dicts(value2, value3))

View file

@ -0,0 +1,34 @@
# -*- coding: utf-8 -*-
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.plugins.module_utils.hwc_utils import (HwcModuleException, navigate_value)
class HwcUtilsTestCase(unittest.TestCase):
def test_navigate_value(self):
value = {
'foo': {
'quiet': {
'tree': 'test',
"trees": [0, 1]
},
}
}
self.assertEqual(navigate_value(value, ["foo", "quiet", "tree"]),
"test")
self.assertEqual(
navigate_value(value, ["foo", "quiet", "trees"],
{"foo.quiet.trees": 1}),
1)
self.assertRaisesRegexp(HwcModuleException,
r".* key\(q\) is not exist in dict",
navigate_value, value, ["foo", "q", "tree"])
self.assertRaisesRegexp(HwcModuleException,
r".* the index is out of list",
navigate_value, value,
["foo", "quiet", "trees"],
{"foo.quiet.trees": 2})

View file

@ -0,0 +1,169 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
from itertools import count
from ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak import (
get_token,
KeycloakError,
)
from ansible.module_utils.six import StringIO
from ansible.module_utils.six.moves.urllib.error import HTTPError
def build_mocked_request(get_id_user_count, response_dict):
def _mocked_requests(*args, **kwargs):
url = args[0]
method = kwargs['method']
future_response = response_dict.get(url, None)
return get_response(future_response, method, get_id_user_count)
return _mocked_requests
def get_response(object_with_future_response, method, get_id_call_count):
if callable(object_with_future_response):
return object_with_future_response()
if isinstance(object_with_future_response, dict):
return get_response(
object_with_future_response[method], method, get_id_call_count)
if isinstance(object_with_future_response, list):
try:
call_number = get_id_call_count.__next__()
except AttributeError:
# manage python 2 versions.
call_number = get_id_call_count.next()
return get_response(
object_with_future_response[call_number], method, get_id_call_count)
return object_with_future_response
def create_wrapper(text_as_string):
"""Allow to mock many times a call to one address.
Without this function, the StringIO is empty for the second call.
"""
def _create_wrapper():
return StringIO(text_as_string)
return _create_wrapper
@pytest.fixture()
def mock_good_connection(mocker):
token_response = {
'http://keycloak.url/auth/realms/master/protocol/openid-connect/token': create_wrapper('{"access_token": "alongtoken"}'), }
return mocker.patch(
'ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak.open_url',
side_effect=build_mocked_request(count(), token_response),
autospec=True
)
def test_connect_to_keycloak(mock_good_connection):
keycloak_header = get_token(
base_url='http://keycloak.url/auth',
validate_certs=True,
auth_realm='master',
client_id='admin-cli',
auth_username='admin',
auth_password='admin',
client_secret=None
)
assert keycloak_header == {
'Authorization': 'Bearer alongtoken',
'Content-Type': 'application/json'
}
@pytest.fixture()
def mock_bad_json_returned(mocker):
token_response = {
'http://keycloak.url/auth/realms/master/protocol/openid-connect/token': create_wrapper('{"access_token":'), }
return mocker.patch(
'ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak.open_url',
side_effect=build_mocked_request(count(), token_response),
autospec=True
)
def test_bad_json_returned(mock_bad_json_returned):
with pytest.raises(KeycloakError) as raised_error:
get_token(
base_url='http://keycloak.url/auth',
validate_certs=True,
auth_realm='master',
client_id='admin-cli',
auth_username='admin',
auth_password='admin',
client_secret=None
)
# cannot check all the message, different errors message for the value
# error in python 2.6, 2.7 and 3.*.
assert (
'API returned invalid JSON when trying to obtain access token from '
'http://keycloak.url/auth/realms/master/protocol/openid-connect/token: '
) in str(raised_error.value)
def raise_401(url):
def _raise_401():
raise HTTPError(url=url, code=401, msg='Unauthorized', hdrs='', fp=StringIO(''))
return _raise_401
@pytest.fixture()
def mock_401_returned(mocker):
token_response = {
'http://keycloak.url/auth/realms/master/protocol/openid-connect/token': raise_401(
'http://keycloak.url/auth/realms/master/protocol/openid-connect/token'),
}
return mocker.patch(
'ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak.open_url',
side_effect=build_mocked_request(count(), token_response),
autospec=True
)
def test_error_returned(mock_401_returned):
with pytest.raises(KeycloakError) as raised_error:
get_token(
base_url='http://keycloak.url/auth',
validate_certs=True,
auth_realm='master',
client_id='admin-cli',
auth_username='notadminuser',
auth_password='notadminpassword',
client_secret=None
)
assert str(raised_error.value) == (
'Could not obtain access token from http://keycloak.url'
'/auth/realms/master/protocol/openid-connect/token: '
'HTTP Error 401: Unauthorized'
)
@pytest.fixture()
def mock_json_without_token_returned(mocker):
token_response = {
'http://keycloak.url/auth/realms/master/protocol/openid-connect/token': create_wrapper('{"not_token": "It is not a token"}'), }
return mocker.patch(
'ansible_collections.community.general.plugins.module_utils.identity.keycloak.keycloak.open_url',
side_effect=build_mocked_request(count(), token_response),
autospec=True
)
def test_json_without_token_returned(mock_json_without_token_returned):
with pytest.raises(KeycloakError) as raised_error:
get_token(
base_url='http://keycloak.url/auth',
validate_certs=True,
auth_realm='master',
client_id='admin-cli',
auth_username='admin',
auth_password='admin',
client_secret=None
)
assert str(raised_error.value) == (
'Could not obtain access token from http://keycloak.url'
'/auth/realms/master/protocol/openid-connect/token'
)

View file

@ -0,0 +1,251 @@
# (c) 2018 Red Hat, Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import copy
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.tests.unit.compat.mock import patch, MagicMock, Mock
from ansible_collections.community.general.plugins.module_utils.net_tools.nios import api
class TestNiosApi(unittest.TestCase):
def setUp(self):
super(TestNiosApi, self).setUp()
self.module = MagicMock(name='AnsibleModule')
self.module.check_mode = False
self.module.params = {'provider': None}
self.mock_connector = patch('ansible_collections.community.general.plugins.module_utils.net_tools.nios.api.get_connector')
self.mock_connector.start()
def tearDown(self):
super(TestNiosApi, self).tearDown()
self.mock_connector.stop()
def test_get_provider_spec(self):
provider_options = ['host', 'username', 'password', 'validate_certs', 'silent_ssl_warnings',
'http_request_timeout', 'http_pool_connections',
'http_pool_maxsize', 'max_retries', 'wapi_version', 'max_results']
res = api.WapiBase.provider_spec
self.assertIsNotNone(res)
self.assertIn('provider', res)
self.assertIn('options', res['provider'])
returned_options = res['provider']['options']
self.assertEqual(sorted(provider_options), sorted(returned_options.keys()))
def _get_wapi(self, test_object):
wapi = api.WapiModule(self.module)
wapi.get_object = Mock(name='get_object', return_value=test_object)
wapi.create_object = Mock(name='create_object')
wapi.update_object = Mock(name='update_object')
wapi.delete_object = Mock(name='delete_object')
return wapi
def test_wapi_no_change(self):
self.module.params = {'provider': None, 'state': 'present', 'name': 'default',
'comment': 'test comment', 'extattrs': None}
test_object = [
{
"comment": "test comment",
"_ref": "networkview/ZG5zLm5ldHdvcmtfdmlldyQw:default/true",
"name": self.module._check_type_dict().__getitem__(),
"extattrs": {}
}
]
test_spec = {
"name": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertFalse(res['changed'])
def test_wapi_change(self):
self.module.params = {'provider': None, 'state': 'present', 'name': 'default',
'comment': 'updated comment', 'extattrs': None}
test_object = [
{
"comment": "test comment",
"_ref": "networkview/ZG5zLm5ldHdvcmtfdmlldyQw:default/true",
"name": "default",
"extattrs": {}
}
]
test_spec = {
"name": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.update_object.called_once_with(test_object)
def test_wapi_change_false(self):
self.module.params = {'provider': None, 'state': 'present', 'name': 'default',
'comment': 'updated comment', 'extattrs': None, 'fqdn': 'foo'}
test_object = [
{
"comment": "test comment",
"_ref": "networkview/ZG5zLm5ldHdvcmtfdmlldyQw:default/true",
"name": "default",
"extattrs": {}
}
]
test_spec = {
"name": {"ib_req": True},
"fqdn": {"ib_req": True, 'update': False},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.update_object.called_once_with(test_object)
def test_wapi_extattrs_change(self):
self.module.params = {'provider': None, 'state': 'present', 'name': 'default',
'comment': 'test comment', 'extattrs': {'Site': 'update'}}
ref = "networkview/ZG5zLm5ldHdvcmtfdmlldyQw:default/true"
test_object = [{
"comment": "test comment",
"_ref": ref,
"name": "default",
"extattrs": {'Site': {'value': 'test'}}
}]
test_spec = {
"name": {"ib_req": True},
"comment": {},
"extattrs": {}
}
kwargs = copy.deepcopy(test_object[0])
kwargs['extattrs']['Site']['value'] = 'update'
kwargs['name'] = self.module._check_type_dict().__getitem__()
del kwargs['_ref']
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.update_object.assert_called_once_with(ref, kwargs)
def test_wapi_extattrs_nochange(self):
self.module.params = {'provider': None, 'state': 'present', 'name': 'default',
'comment': 'test comment', 'extattrs': {'Site': 'test'}}
test_object = [{
"comment": "test comment",
"_ref": "networkview/ZG5zLm5ldHdvcmtfdmlldyQw:default/true",
"name": self.module._check_type_dict().__getitem__(),
"extattrs": {'Site': {'value': 'test'}}
}]
test_spec = {
"name": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertFalse(res['changed'])
def test_wapi_create(self):
self.module.params = {'provider': None, 'state': 'present', 'name': 'ansible',
'comment': None, 'extattrs': None}
test_object = None
test_spec = {
"name": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'name': self.module._check_type_dict().__getitem__()})
def test_wapi_delete(self):
self.module.params = {'provider': None, 'state': 'absent', 'name': 'ansible',
'comment': None, 'extattrs': None}
ref = "networkview/ZG5zLm5ldHdvcmtfdmlldyQw:ansible/false"
test_object = [{
"comment": "test comment",
"_ref": ref,
"name": "ansible",
"extattrs": {'Site': {'value': 'test'}}
}]
test_spec = {
"name": {"ib_req": True},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.delete_object.assert_called_once_with(ref)
def test_wapi_strip_network_view(self):
self.module.params = {'provider': None, 'state': 'present', 'name': 'ansible',
'comment': 'updated comment', 'extattrs': None,
'network_view': 'default'}
test_object = [{
"comment": "test comment",
"_ref": "view/ZG5zLm5ldHdvcmtfdmlldyQw:ansible/true",
"name": "ansible",
"extattrs": {},
"network_view": "default"
}]
test_spec = {
"name": {"ib_req": True},
"network_view": {"ib_req": True},
"comment": {},
"extattrs": {}
}
kwargs = test_object[0].copy()
ref = kwargs.pop('_ref')
kwargs['comment'] = 'updated comment'
kwargs['name'] = self.module._check_type_dict().__getitem__()
del kwargs['network_view']
del kwargs['extattrs']
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.update_object.assert_called_once_with(ref, kwargs)

View file

@ -0,0 +1,328 @@
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Dag Wieers (@dagwieers) <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.cisco.aci.plugins.module_utils.aci import ACIModule
from ansible.module_utils.six import PY2
from ansible.module_utils._text import to_native
import pytest
class AltModule():
params = dict(
hostname='dummy',
port=123,
protocol='https',
state='present',
)
class AltACIModule(ACIModule):
def __init__(self):
self.result = dict(changed=False)
self.module = AltModule
self.params = self.module.params
aci = AltACIModule()
try:
from lxml import etree
if sys.version_info >= (2, 7):
from xmljson import cobra
except ImportError:
pytestmark = pytest.mark.skip("ACI Ansible modules require the lxml and xmljson Python libraries")
class AciRest(unittest.TestCase):
def test_invalid_aci_login(self):
self.maxDiff = None
error = dict(
code='401',
text='Username or password is incorrect - FAILED local authentication',
)
imdata = [{
'error': {
'attributes': {
'code': '401',
'text': 'Username or password is incorrect - FAILED local authentication',
},
},
}]
totalCount = 1
json_response = '{"totalCount":"1","imdata":[{"error":{"attributes":{"code":"401","text":"Username or password is incorrect - FAILED local authentication"}}}]}' # NOQA
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
xml_response = '''<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">
<error code="401" text="Username or password is incorrect - FAILED local authentication"/>
</imdata>
'''
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
def test_valid_aci_login(self):
self.maxDiff = None
imdata = [{
'aaaLogin': {
'attributes': {
'token': 'ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem', # NOQA
'siteFingerprint': 'NdxD72K/uXaUK0wn',
'refreshTimeoutSeconds': '600',
'maximumLifetimeSeconds': '86400',
'guiIdleTimeoutSeconds': '1200',
'restTimeoutSeconds': '90',
'creationTime': '1500134817',
'firstLoginTime': '1500134817',
'userName': 'admin',
'remoteUser': 'false',
'unixUserId': '15374',
'sessionId': 'o7hObsqNTfCmDGcZI5c4ng==',
'lastName': '',
'firstName': '',
'version': '2.0(2f)',
'buildTime': 'Sat Aug 20 23:07:07 PDT 2016',
'node': 'topology/pod-1/node-1',
},
'children': [{
'aaaUserDomain': {
'attributes': {
'name': 'all',
'rolesR': 'admin',
'rolesW': 'admin',
},
'children': [{
'aaaReadRoles': {
'attributes': {},
},
}, {
'aaaWriteRoles': {
'attributes': {},
'children': [{
'role': {
'attributes': {
'name': 'admin',
},
},
}],
},
}],
},
}, {
'DnDomainMapEntry': {
'attributes': {
'dn': 'uni/tn-common',
'readPrivileges': 'admin',
'writePrivileges': 'admin',
},
},
}, {
'DnDomainMapEntry': {
'attributes': {
'dn': 'uni/tn-infra',
'readPrivileges': 'admin',
'writePrivileges': 'admin',
},
},
}, {
'DnDomainMapEntry': {
'attributes': {
'dn': 'uni/tn-mgmt',
'readPrivileges': 'admin',
'writePrivileges': 'admin',
},
},
}],
},
}]
totalCount = 1
json_response = '{"totalCount":"1","imdata":[{"aaaLogin":{"attributes":{"token":"ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem","siteFingerprint":"NdxD72K/uXaUK0wn","refreshTimeoutSeconds":"600","maximumLifetimeSeconds":"86400","guiIdleTimeoutSeconds":"1200","restTimeoutSeconds":"90","creationTime":"1500134817","firstLoginTime":"1500134817","userName":"admin","remoteUser":"false","unixUserId":"15374","sessionId":"o7hObsqNTfCmDGcZI5c4ng==","lastName":"","firstName":"","version":"2.0(2f)","buildTime":"Sat Aug 20 23:07:07 PDT 2016","node":"topology/pod-1/node-1"},"children":[{"aaaUserDomain":{"attributes":{"name":"all","rolesR":"admin","rolesW":"admin"},"children":[{"aaaReadRoles":{"attributes":{}}},{"aaaWriteRoles":{"attributes":{},"children":[{"role":{"attributes":{"name":"admin"}}}]}}]}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-common","readPrivileges":"admin","writePrivileges":"admin"}}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-infra","readPrivileges":"admin","writePrivileges":"admin"}}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-mgmt","readPrivileges":"admin","writePrivileges":"admin"}}}]}}]}' # NOQA
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
xml_response = '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">\n<aaaLogin token="ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem" siteFingerprint="NdxD72K/uXaUK0wn" refreshTimeoutSeconds="600" maximumLifetimeSeconds="86400" guiIdleTimeoutSeconds="1200" restTimeoutSeconds="90" creationTime="1500134817" firstLoginTime="1500134817" userName="admin" remoteUser="false" unixUserId="15374" sessionId="o7hObsqNTfCmDGcZI5c4ng==" lastName="" firstName="" version="2.0(2f)" buildTime="Sat Aug 20 23:07:07 PDT 2016" node="topology/pod-1/node-1">\n<aaaUserDomain name="all" rolesR="admin" rolesW="admin">\n<aaaReadRoles/>\n<aaaWriteRoles>\n<role name="admin"/>\n</aaaWriteRoles>\n</aaaUserDomain>\n<DnDomainMapEntry dn="uni/tn-common" readPrivileges="admin" writePrivileges="admin"/>\n<DnDomainMapEntry dn="uni/tn-infra" readPrivileges="admin" writePrivileges="admin"/>\n<DnDomainMapEntry dn="uni/tn-mgmt" readPrivileges="admin" writePrivileges="admin"/>\n</aaaLogin></imdata>\n''' # NOQA
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
def test_invalid_input(self):
self.maxDiff = None
error = dict(
code='401',
text='Username or password is incorrect - FAILED local authentication',
)
imdata = [{
'error': {
'attributes': {
'code': '401',
'text': 'Username or password is incorrect - FAILED local authentication',
},
},
}]
totalCount = 1
json_response = '{"totalCount":"1","imdata":[{"error":{"attributes":{"code":"401","text":"Username or password is incorrect - FAILED local authentication"}}}]}' # NOQA
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
xml_response = '''<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">
<error code="401" text="Username or password is incorrect - FAILED local authentication"/>
</imdata>
'''
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
def test_empty_response(self):
self.maxDiffi = None
if PY2:
error_text = "Unable to parse output as JSON, see 'raw' output. No JSON object could be decoded"
else:
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting value: line 1 column 1 (char 0)"
error = dict(
code=-1,
text=error_text,
)
raw = ''
json_response = ''
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
elif etree.LXML_VERSION < (3, 3, 0, 0):
error_text = "Unable to parse output as XML, see 'raw' output. None",
elif etree.LXML_VERSION < (4, 0, 0, 0):
error_text = to_native(u"Unable to parse output as XML, see 'raw' output. None (line 0)", errors='surrogate_or_strict')
elif PY2:
error_text = "Unable to parse output as XML, see 'raw' output. Document is empty, line 1, column 1 (line 1)"
else:
error_text = None
xml_response = ''
aci.response_xml(xml_response)
if error_text is None:
# errors vary on Python 3.8+ for unknown reasons
# accept any of the following error messages
errors = (
"Unable to parse output as XML, see 'raw' output. None (line 0)",
"Unable to parse output as XML, see 'raw' output. Document is empty, line 1, column 1 (<string>, line 1)",
)
for error in errors:
if error in aci.error['text']:
error_text = error
break
error = dict(
code=-1,
text=error_text,
)
raw = ''
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
def test_invalid_response(self):
self.maxDiff = None
if sys.version_info < (2, 7):
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting object: line 1 column 8 (char 8)"
elif PY2:
error_text = "Unable to parse output as JSON, see 'raw' output. No JSON object could be decoded"
else:
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting value: line 1 column 9 (char 8)"
error = dict(
code=-1,
text=error_text,
)
raw = '{ "aaa":'
json_response = '{ "aaa":'
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
elif etree.LXML_VERSION < (3, 3, 0, 0):
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 5" # NOQA
elif PY2:
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 6 (line 1)" # NOQA
else:
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 6 (<string>, line 1)" # NOQA
error = dict(
code=-1,
text=error_text,
)
raw = '<aaa '
xml_response = '<aaa '
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)

View file

@ -0,0 +1,699 @@
'''
Created on Aug 16, 2016
@author: grastogi
'''
import unittest
from ansible_collections.community.general.plugins.module_utils.network.avi.ansible_utils import \
cleanup_absent_fields, avi_obj_cmp
class TestAviApiUtils(unittest.TestCase):
def test_avi_obj_cmp(self):
obj = {'name': 'testpool'}
existing_obj = {
'lb_algorithm': 'LB_ALGORITHM_LEAST_CONNECTIONS',
'use_service_port': False,
'server_auto_scale': False,
'host_check_enabled': False,
'enabled': True,
'capacity_estimation': False,
'fewest_tasks_feedback_delay': 10,
'_last_modified': '1471377748747040',
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9',
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-0e8ce760-fed2-4650-9397-5b3e4966376e',
'inline_health_monitor': True,
'default_server_port': 80,
'request_queue_depth': 128,
'graceful_disable_timeout': 1,
'server_count': 0,
'sni_enabled': True,
'request_queue_enabled': False,
'name': 'testpool',
'max_concurrent_connections_per_server': 0,
'url': 'https://192.0.2.42/api/pool/pool-20084ee1-872e-4103-98e1-899103e2242a',
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
'uuid': 'pool-20084ee1-872e-4103-98e1-899103e2242a',
'connection_ramp_duration': 10}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def test_avi_obj_cmp_w_refs(self):
obj = {'name': 'testpool',
'health_monitor_refs': ['/api/healthmonitor?name=System-HTTP'],
'enabled': True}
existing_obj = {
'lb_algorithm': 'LB_ALGORITHM_LEAST_CONNECTIONS',
'use_service_port': False,
'server_auto_scale': False,
'host_check_enabled': False,
'enabled': True,
'capacity_estimation': False,
'fewest_tasks_feedback_delay': 10,
'_last_modified': '1471377748747040',
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9',
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-0e8ce760-fed2-4650-9397-5b3e4966376e',
'inline_health_monitor': True,
'default_server_port': 80,
'request_queue_depth': 128,
'graceful_disable_timeout': 1,
'server_count': 0,
'sni_enabled': True,
'request_queue_enabled': False,
'name': 'testpool',
'max_concurrent_connections_per_server': 0,
'url': 'https://192.0.2.42/api/pool/pool-20084ee1-872e-4103-98e1-899103e2242a',
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
'uuid': 'pool-20084ee1-872e-4103-98e1-899103e2242a',
'connection_ramp_duration': 10,
'health_monitor_refs': [
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9#System-HTTP"],
}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
obj = {'name': 'testpool',
'health_monitor_refs': ['/api/healthmonitor?name=System-HTTP'],
'server_count': 1}
diff = avi_obj_cmp(obj, existing_obj)
assert not diff
obj = {'name': 'testpool',
'health_monitor_refs': ['api/healthmonitor?name=System-HTTP'],
'server_count': 0}
diff = avi_obj_cmp(obj, existing_obj)
assert not diff
obj = {'name': 'testpool',
'health_monitor_refs': ['healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9'],
'server_count': 0}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
obj = {'name': 'testpool#asdfasf',
'health_monitor_refs': ['api/healthmonitor?name=System-HTTP'],
'server_count': 0}
diff = avi_obj_cmp(obj, existing_obj)
assert not diff
obj = {'name': 'testpool',
'health_monitor_refs': ['/api/healthmonitor?name=System-HTTP#'],
'server_count': 0}
diff = avi_obj_cmp(obj, existing_obj)
assert not diff
def test_avi_obj_cmp_empty_list(self):
obj = {'name': 'testpool',
'health_monitor_refs': [],
'enabled': True}
existing_obj = {
'lb_algorithm': 'LB_ALGORITHM_LEAST_CONNECTIONS',
'use_service_port': False,
'server_auto_scale': False,
'host_check_enabled': False,
'enabled': True,
'capacity_estimation': False,
'fewest_tasks_feedback_delay': 10,
'_last_modified': '1471377748747040',
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9',
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-0e8ce760-fed2-4650-9397-5b3e4966376e',
'inline_health_monitor': True,
'default_server_port': 80,
'request_queue_depth': 128,
'graceful_disable_timeout': 1,
'server_count': 0,
'sni_enabled': True,
'request_queue_enabled': False,
'name': 'testpool',
'max_concurrent_connections_per_server': 0,
'url': 'https://192.0.2.42/api/pool/pool-20084ee1-872e-4103-98e1-899103e2242a',
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
'uuid': 'pool-20084ee1-872e-4103-98e1-899103e2242a',
'connection_ramp_duration': 10
}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def test_avi_obj_cmp_w_refs_n_name(self):
existing_obj = {
'use_service_port': False,
'server_auto_scale': False,
'host_check_enabled': False,
'enabled': True,
'capacity_estimation': False,
'fewest_tasks_feedback_delay': 10,
'_last_modified': '1471377748747040',
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9',
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-0e8ce760-fed2-4650-9397-5b3e4966376e',
'inline_health_monitor': True,
'default_server_port': 80,
'request_queue_depth': 128,
'graceful_disable_timeout': 1,
'server_count': 0,
'sni_enabled': True,
'request_queue_enabled': False,
'name': 'testpool',
'max_concurrent_connections_per_server': 0,
'url': 'https://192.0.2.42/api/pool/pool-20084ee1-872e-4103-98e1-899103e2242a',
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
'uuid': 'pool-20084ee1-872e-4103-98e1-899103e2242a',
'connection_ramp_duration': 10,
'health_monitor_refs': [
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9#System-HTTP",
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc8",
],
}
obj = {'name': 'testpool',
'health_monitor_refs': ['https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9',
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc8"],
'server_count': 0}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
obj = {'name': 'testpool',
'health_monitor_refs': [
'https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9#System-HTTP',
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc8"],
'server_count': 0}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
obj = {'name': 'testpool',
'health_monitor_refs': [
'https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9#System-HTTP',
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc8#System-HTTP2"],
'server_count': 0,
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9#Default-Cloud',
}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def test_avi_list_update(self):
existing_obj = {
'services': [
{
"enable_ssl": False,
"port_range_end": 80,
"port": 80
},
{
"enable_ssl": False,
"port_range_end": 443,
"port": 443
}
],
"name": "vs-health-test",
"url": "https://192.0.2.42/api/virtualservice/virtualservice-526c55c2-df89-40b9-9de6-e45a472290aa",
}
obj = {
'services': [
{
"enable_ssl": False,
"port_range_end": 80,
"port": 80
}
]
}
diff = avi_obj_cmp(obj, existing_obj)
assert not diff
obj = {
'services': [
{
"enable_ssl": False,
"port_range_end": 80,
"port": 80
},
{
"enable_ssl": False,
"port_range_end": 443,
"port": 80
}
],
"name": "vs-health-test",
"url": "https://192.0.2.42/api/virtualservice/virtualservice-526c55c2-df89-40b9-9de6-e45a472290aa",
}
diff = avi_obj_cmp(obj, existing_obj)
assert not diff
def test_cleanup_abset(self):
obj = {'x': 10,
'y': {'state': 'absent'},
'z': {'a': {'state': 'absent'}},
'l': [{'y1': {'state': 'absent'}}],
'z1': {'a': {'state': 'absent'}, 'b': {}, 'c': 42},
'empty': []}
obj = cleanup_absent_fields(obj)
assert 'y' not in obj
assert 'z' not in obj
assert 'l' not in obj
assert 'z1' in obj
assert 'b' not in obj['z1']
assert 'a' not in obj['z1']
assert 'empty' not in obj
def test_complex_obj(self):
obj = {
'lb_algorithm': 'LB_ALGORITHM_ROUND_ROBIN',
'use_service_port': False, 'server_auto_scale': False,
'host_check_enabled': False,
'tenant_ref': 'https://192.0.2.42/api/tenant/admin#admin',
'capacity_estimation': False,
'servers': [{
'hostname': 'grastogi-server6', 'ratio': 1,
'ip': {'type': 'V4', 'addr': '198.51.100.62'},
'discovered_networks': [{
'subnet': [{
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
},
'mask': 24
}],
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10#PG-964'
}],
'enabled': True, 'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10#PG-964',
'verify_network': False,
'static': False,
'resolve_server_by_dns': False,
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993#grastogi-server6'
}, {
'hostname': 'grastogi-server6',
'ratio': 1,
'ip': {
'type': 'V4',
'addr': '198.51.100.61'
},
'discovered_networks': [{
'subnet': [{
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
},
'mask': 24
}],
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10#PG-964'
}],
'enabled': True,
'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10#PG-964',
'verify_network': False,
'static': False,
'resolve_server_by_dns': False,
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993#grastogi-server6'
}, {
'hostname': 'grastogi-server6',
'ratio': 1,
'ip': {
'type': 'V4',
'addr': '198.51.100.65'
},
'discovered_networks': [{
'subnet': [{
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
}, 'mask': 24
}],
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10#PG-964'
}],
'enabled': True,
'verify_network': False,
'static': False,
'resolve_server_by_dns': False
}],
'fewest_tasks_feedback_delay': 10,
'_last_modified': '1473292763246107',
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-e0696a58-8b72-4026-923c-9a87c38a2489#Default-Cloud',
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-33dfbcd7-867c-4e3e-acf7-96bf679d5a0d#global',
'inline_health_monitor': True,
'default_server_port': 8000,
'request_queue_depth': 128,
'graceful_disable_timeout': 1,
'sni_enabled': True,
'server_count': 3,
'uuid': 'pool-09201181-747e-41ea-872d-e9a7df71b726',
'request_queue_enabled': False,
'name': 'p1',
'max_concurrent_connections_per_server': 0,
'url': 'https://192.0.2.42/api/pool/pool-09201181-747e-41ea-872d-e9a7df71b726#p1',
'enabled': True,
'connection_ramp_duration': 10}
existing_obj = {
'lb_algorithm': 'LB_ALGORITHM_ROUND_ROBIN',
'use_service_port': False,
'server_auto_scale': False,
'host_check_enabled': False,
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
'capacity_estimation': False,
'servers': [{
'hostname': 'grastogi-server6', 'ratio': 1,
'ip': {
'type': 'V4',
'addr': '198.51.100.62'
},
'discovered_networks': [{
'subnet': [{
'mask': 24,
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
}
}],
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10'
}],
'enabled': True,
'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10',
'verify_network': False,
'static': False,
'resolve_server_by_dns': False,
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993'
}, {
'hostname': 'grastogi-server6',
'ratio': 1,
'ip': {
'type': 'V4',
'addr': '198.51.100.61'
},
'discovered_networks': [{
'subnet': [{
'mask': 24,
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
}
}],
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10'
}],
'enabled': True,
'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10',
'verify_network': False,
'static': False,
'resolve_server_by_dns': False,
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993'
}, {
'hostname': 'grastogi-server6',
'ratio': 1,
'ip': {
'type': 'V4',
'addr': '198.51.100.65'
},
'discovered_networks': [{
'subnet': [{
'mask': 24,
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
}
}],
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10'
}],
'enabled': True,
'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10',
'verify_network': False,
'static': False,
'resolve_server_by_dns': False,
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993'
}],
'fewest_tasks_feedback_delay': 10,
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-e0696a58-8b72-4026-923c-9a87c38a2489',
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-33dfbcd7-867c-4e3e-acf7-96bf679d5a0d',
'inline_health_monitor': True,
'default_server_port': 8000,
'request_queue_depth': 128,
'graceful_disable_timeout': 1,
'sni_enabled': True,
'server_count': 3,
'uuid': 'pool-09201181-747e-41ea-872d-e9a7df71b726',
'request_queue_enabled': False,
'name': 'p1',
'max_concurrent_connections_per_server': 0,
'url': 'https://192.0.2.42/api/pool/pool-09201181-747e-41ea-872d-e9a7df71b726',
'enabled': True,
'connection_ramp_duration': 10
}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def testAWSVs(self):
existing_obj = {
'network_profile_ref': 'https://12.97.16.202/api/networkprofile/networkprofile-9a0a9896-6876-44c8-a3ee-512a968905f2#System-TCP-Proxy',
'port_uuid': 'eni-4144e73c',
'weight': 1,
'availability_zone': 'us-west-2a',
'enabled': True,
'flow_dist': 'LOAD_AWARE',
'subnet_uuid': 'subnet-91f0b6f4',
'delay_fairness': False,
'avi_allocated_vip': True,
'vrf_context_ref': 'https://12.97.16.202/api/vrfcontext/vrfcontext-722b280d-b555-4d82-9b35-af9442c0cb86#global',
'subnet': {
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
},
'mask': 24
},
'cloud_type': 'CLOUD_AWS', 'uuid': 'virtualservice-a5f49b99-22c8-42e6-aa65-3ca5f1e36b9e',
'network_ref': 'https://12.97.16.202/api/network/subnet-91f0b6f4',
'cloud_ref': 'https://12.97.16.202/api/cloud/cloud-49829414-c704-43ca-9dff-05b9e8474dcb#AWS Cloud',
'avi_allocated_fip': False,
'se_group_ref': 'https://12.97.16.202/api/serviceenginegroup/serviceenginegroup-3bef6320-5a2d-4801-85c4-ef4f9841f235#Default-Group',
'scaleout_ecmp': False,
'max_cps_per_client': 0,
'type': 'VS_TYPE_NORMAL',
'analytics_profile_ref': 'https://12.97.16.202/api/analyticsprofile/analyticsprofile-70f8b06f-7b6a-4500-b829-c869bbca2009#System-Analytics-Profile',
'use_bridge_ip_as_vip': False,
'application_profile_ref': 'https://12.97.16.202/api/applicationprofile/applicationprofile-103cbc31-cac5-46ab-8e66-bbbb2c8f551f#System-HTTP',
'auto_allocate_floating_ip': False,
'services': [{
'enable_ssl': False,
'port_range_end': 80,
'port': 80
}],
'active_standby_se_tag': 'ACTIVE_STANDBY_SE_1',
'ip_address': {
'type': 'V4',
'addr': '198.51.100.33'
},
'ign_pool_net_reach': False,
'east_west_placement': False,
'limit_doser': False,
'name': 'wwwawssit.ebiz.verizon.com',
'url': 'https://12.97.16.202/api/virtualservice/virtualservice-a5f49b99-22c8-42e6-aa65-3ca5f1e36b9e#wwwawssit.ebiz.verizon.com',
'ssl_sess_cache_avg_size': 1024,
'enable_autogw': True,
'auto_allocate_ip': True,
'tenant_ref': 'https://12.97.16.202/api/tenant/tenant-f52f7a3e-6876-4bb9-b8f7-3cab636dadf2#Sales',
'remove_listening_port_on_vs_down': False
}
obj = {'auto_allocate_ip': True, 'subnet_uuid': 'subnet-91f0b6f4', 'cloud_ref': '/api/cloud?name=AWS Cloud', 'services': [{'port': 80}],
'name': 'wwwawssit.ebiz.verizon.com'}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def testhttppolicy(self):
existing_obj = {
"http_request_policy": {
"rules": [{
"enable": True,
"index": 0,
"match": {
"path": {
"match_case": "INSENSITIVE",
"match_criteria": "CONTAINS",
"match_str": ["xvz", "rst"]
}
},
"name": "blah",
"switching_action": {
"action": "HTTP_SWITCHING_SELECT_POOL",
"pool_ref": "https://12.97.16.202/api/pool/pool-d7f6f5e7-bd26-49ad-aeed-965719eb140b#abc",
"status_code": "HTTP_LOCAL_RESPONSE_STATUS_CODE_200"
}
}]
},
"is_internal_policy": False,
"name": "blah",
"tenant_ref": "https://12.97.16.202/api/tenant/tenant-f52f7a3e-6876-4bb9-b8f7-3cab636dadf2#Sales",
"url": "https://12.97.16.202/api/httppolicyset/httppolicyset-ffd8354b-671b-48d5-92cc-69a9057aad0c#blah",
"uuid": "httppolicyset-ffd8354b-671b-48d5-92cc-69a9057aad0c"
}
obj = {
"http_request_policy": {
"rules": [{
"enable": True,
"index": "0",
"match": {
"path": {
"match_case": "INSENSITIVE",
"match_criteria": "CONTAINS",
"match_str": ["xvz", "rst"]
}
},
"name": "blah",
"switching_action": {
"action": "HTTP_SWITCHING_SELECT_POOL",
"pool_ref": "/api/pool?name=abc",
"status_code": "HTTP_LOCAL_RESPONSE_STATUS_CODE_200"
}
}]
},
"is_internal_policy": False,
"tenant": "Sales"
}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def testCleanupFields(self):
obj = {'name': 'testpool',
'scalar_field': {'state': 'absent'},
'list_fields': [{'x': '1'}, {'y': {'state': 'absent'}}]}
cleanup_absent_fields(obj)
assert 'scalar_field' not in obj
for elem in obj['list_fields']:
assert 'y' not in elem
def testGSLB(self):
obj = {
'domain_names': ['cloud5.avi.com', 'cloud6.avi.com'],
'health_monitor_scope': 'GSLB_SERVICE_HEALTH_MONITOR_ALL_MEMBERS',
'groups': [{
'priority': 20,
'members': [{
'ip': {
'type': 'V4',
'addr': '198.51.100.1'
},
'enabled': True, 'ratio': 1
}, {
'ip': {
'type': 'V4',
'addr': '198.51.100.10'
},
'enabled': True,
'ratio': 1
}],
'algorithm': 'GSLB_ALGORITHM_CONSISTENT_HASH',
'name': 'sc'
}, {
'priority': 14,
'members': [{
'ip': {
'type': 'V4',
'addr': '198.51.100.2'
},
'enabled': True,
'ratio': 1
}],
'algorithm': 'GSLB_ALGORITHM_ROUND_ROBIN',
'name': 'cn'
}, {
'priority': 15,
'members': [{
'ip': {
'type': 'V4',
'addr': '198.51.100.3'
},
'enabled': True, 'ratio': 1
}],
'algorithm': 'GSLB_ALGORITHM_ROUND_ROBIN',
'name': 'in'
}],
'name': 'gs-3',
'num_dns_ip': 2
}
existing_obj = {
u'controller_health_status_enabled': True,
u'uuid': u'gslbservice-ab9b36bd-3e95-4c2e-80f8-92905c2eccb2',
u'wildcard_match': False,
u'url': u'https://192.0.2.42/api/gslbservice/gslbservice-ab9b36bd-3e95-4c2e-80f8-92905c2eccb2#gs-3',
u'tenant_ref': u'https://192.0.2.42/api/tenant/admin#admin',
u'enabled': True,
u'domain_names': [u'cloud5.avi.com', u'cloud6.avi.com'],
u'use_edns_client_subnet': True,
u'groups': [{
u'priority': 20,
u'members': [{
u'ip': {
u'type': u'V4',
u'addr': u'198.51.100.1'
},
u'ratio': 1,
u'enabled': True
}, {
u'ip': {
u'type': u'V4',
u'addr': u'198.51.100.10'
},
u'ratio': 1,
u'enabled': True
}],
u'name': u'sc',
u'algorithm': u'GSLB_ALGORITHM_CONSISTENT_HASH'
}, {
u'priority': 14,
u'members': [{
u'ip': {
u'type': u'V4',
u'addr': u'198.51.100.2'
},
u'ratio': 1,
u'enabled': True
}],
u'name': u'cn',
u'algorithm': u'GSLB_ALGORITHM_ROUND_ROBIN'
}, {
u'priority': 15,
u'members': [{
u'ip': {
u'type': u'V4',
u'addr': u'198.51.100.3'
},
u'ratio': 1,
u'enabled': True
}],
u'name': u'in',
u'algorithm': u'GSLB_ALGORITHM_ROUND_ROBIN'
}],
u'num_dns_ip': 2,
u'health_monitor_scope': u'GSLB_SERVICE_HEALTH_MONITOR_ALL_MEMBERS',
u'name': u'gs-3'
}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def testNoneParams(self):
objwnone = {
'name': 'testpool',
'scalar_field': None,
'list_fields': {
'y': None,
'z': 'zz'
}
}
obj = {
'name': 'testpool',
'list_fields': {
'z': 'zz'
}
}
result = avi_obj_cmp(objwnone, obj)
assert result

View file

@ -0,0 +1,46 @@
# -*- coding: utf-8 -*-
#
# (c) 2017 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.parsing import Conditional
test_results = ['result_1', 'result_2', 'result_3']
c1 = Conditional('result[1] == result_2')
c2 = Conditional('result[2] not == result_2')
c3 = Conditional('result[0] neq not result_1')
class TestNotKeyword(unittest.TestCase):
def test_negate_instance_variable_assignment(self):
assert c1.negate is False and c2.negate is True
def test_key_value_instance_variable_assignment(self):
c1_assignments = c1.key == 'result[1]' and c1.value == 'result_2'
c2_assignments = c2.key == 'result[2]' and c2.value == 'result_2'
assert c1_assignments and c2_assignments
def test_conditionals_w_not_keyword(self):
assert c1(test_results) and c2(test_results) and c3(test_results)
if __name__ == '__main__':
unittest.main()

View file

@ -0,0 +1,213 @@
# -*- coding: utf-8 -*-
#
# (c) 2017 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import to_list, sort_list
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import dict_diff, dict_merge
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import conditional, Template
from ansible.module_utils.common.network import (
to_masklen, to_netmask, to_subnet, to_ipv6_network, to_ipv6_subnet, is_masklen, is_netmask
)
def test_to_list():
for scalar in ('string', 1, True, False, None):
assert isinstance(to_list(scalar), list)
for container in ([1, 2, 3], {'one': 1}):
assert isinstance(to_list(container), list)
test_list = [1, 2, 3]
assert id(test_list) != id(to_list(test_list))
def test_sort():
data = [3, 1, 2]
assert [1, 2, 3] == sort_list(data)
string_data = '123'
assert string_data == sort_list(string_data)
def test_dict_diff():
base = dict(obj2=dict(), b1=True, b2=False, b3=False,
one=1, two=2, three=3, obj1=dict(key1=1, key2=2),
l1=[1, 3], l2=[1, 2, 3], l4=[4],
nested=dict(n1=dict(n2=2)))
other = dict(b1=True, b2=False, b3=True, b4=True,
one=1, three=4, four=4, obj1=dict(key1=2),
l1=[2, 1], l2=[3, 2, 1], l3=[1],
nested=dict(n1=dict(n2=2, n3=3)))
result = dict_diff(base, other)
# string assertions
assert 'one' not in result
assert 'two' not in result
assert result['three'] == 4
assert result['four'] == 4
# dict assertions
assert 'obj1' in result
assert 'key1' in result['obj1']
assert 'key2' not in result['obj1']
# list assertions
assert result['l1'] == [2, 1]
assert 'l2' not in result
assert result['l3'] == [1]
assert 'l4' not in result
# nested assertions
assert 'obj1' in result
assert result['obj1']['key1'] == 2
assert 'key2' not in result['obj1']
# bool assertions
assert 'b1' not in result
assert 'b2' not in result
assert result['b3']
assert result['b4']
def test_dict_merge():
base = dict(obj2=dict(), b1=True, b2=False, b3=False,
one=1, two=2, three=3, obj1=dict(key1=1, key2=2),
l1=[1, 3], l2=[1, 2, 3], l4=[4],
nested=dict(n1=dict(n2=2)))
other = dict(b1=True, b2=False, b3=True, b4=True,
one=1, three=4, four=4, obj1=dict(key1=2),
l1=[2, 1], l2=[3, 2, 1], l3=[1],
nested=dict(n1=dict(n2=2, n3=3)))
result = dict_merge(base, other)
# string assertions
assert 'one' in result
assert 'two' in result
assert result['three'] == 4
assert result['four'] == 4
# dict assertions
assert 'obj1' in result
assert 'key1' in result['obj1']
assert 'key2' in result['obj1']
# list assertions
assert result['l1'] == [1, 2, 3]
assert 'l2' in result
assert result['l3'] == [1]
assert 'l4' in result
# nested assertions
assert 'obj1' in result
assert result['obj1']['key1'] == 2
assert 'key2' in result['obj1']
# bool assertions
assert 'b1' in result
assert 'b2' in result
assert result['b3']
assert result['b4']
def test_conditional():
assert conditional(10, 10)
assert conditional('10', '10')
assert conditional('foo', 'foo')
assert conditional(True, True)
assert conditional(False, False)
assert conditional(None, None)
assert conditional("ge(1)", 1)
assert conditional("gt(1)", 2)
assert conditional("le(2)", 2)
assert conditional("lt(3)", 2)
assert conditional("eq(1)", 1)
assert conditional("neq(0)", 1)
assert conditional("min(1)", 1)
assert conditional("max(1)", 1)
assert conditional("exactly(1)", 1)
def test_template():
tmpl = Template()
assert 'foo' == tmpl('{{ test }}', {'test': 'foo'})
def test_to_masklen():
assert 24 == to_masklen('255.255.255.0')
def test_to_masklen_invalid():
with pytest.raises(ValueError):
to_masklen('255')
def test_to_netmask():
assert '255.0.0.0' == to_netmask(8)
assert '255.0.0.0' == to_netmask('8')
def test_to_netmask_invalid():
with pytest.raises(ValueError):
to_netmask(128)
def test_to_subnet():
result = to_subnet('192.168.1.1', 24)
assert '192.168.1.0/24' == result
result = to_subnet('192.168.1.1', 24, dotted_notation=True)
assert '192.168.1.0 255.255.255.0' == result
def test_to_subnet_invalid():
with pytest.raises(ValueError):
to_subnet('foo', 'bar')
def test_is_masklen():
assert is_masklen(32)
assert not is_masklen(33)
assert not is_masklen('foo')
def test_is_netmask():
assert is_netmask('255.255.255.255')
assert not is_netmask(24)
assert not is_netmask('foo')
def test_to_ipv6_network():
assert '2001:db8::' == to_ipv6_network('2001:db8::')
assert '2001:0db8:85a3::' == to_ipv6_network('2001:0db8:85a3:0000:0000:8a2e:0370:7334')
assert '2001:0db8:85a3::' == to_ipv6_network('2001:0db8:85a3:0:0:8a2e:0370:7334')
def test_to_ipv6_subnet():
assert '2001:db8::' == to_ipv6_subnet('2001:db8::')
assert '2001:0db8:85a3:4242::' == to_ipv6_subnet('2001:0db8:85a3:4242:0000:8a2e:0370:7334')
assert '2001:0db8:85a3:4242::' == to_ipv6_subnet('2001:0db8:85a3:4242:0:8a2e:0370:7334')

View file

@ -0,0 +1,446 @@
# Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from ansible_collections.community.general.plugins.module_utils.network.ftd.common import equal_objects, delete_ref_duplicates, construct_ansible_facts
# simple objects
def test_equal_objects_return_false_with_different_length():
assert not equal_objects(
{'foo': 1},
{'foo': 1, 'bar': 2}
)
def test_equal_objects_return_false_with_different_fields():
assert not equal_objects(
{'foo': 1},
{'bar': 1}
)
def test_equal_objects_return_false_with_different_value_types():
assert not equal_objects(
{'foo': 1},
{'foo': '1'}
)
def test_equal_objects_return_false_with_different_values():
assert not equal_objects(
{'foo': 1},
{'foo': 2}
)
def test_equal_objects_return_false_with_different_nested_values():
assert not equal_objects(
{'foo': {'bar': 1}},
{'foo': {'bar': 2}}
)
def test_equal_objects_return_false_with_different_list_length():
assert not equal_objects(
{'foo': []},
{'foo': ['bar']}
)
def test_equal_objects_return_true_with_equal_objects():
assert equal_objects(
{'foo': 1, 'bar': 2},
{'bar': 2, 'foo': 1}
)
def test_equal_objects_return_true_with_equal_str_like_values():
assert equal_objects(
{'foo': b'bar'},
{'foo': u'bar'}
)
def test_equal_objects_return_true_with_equal_nested_dicts():
assert equal_objects(
{'foo': {'bar': 1, 'buz': 2}},
{'foo': {'buz': 2, 'bar': 1}}
)
def test_equal_objects_return_true_with_equal_lists():
assert equal_objects(
{'foo': ['bar']},
{'foo': ['bar']}
)
def test_equal_objects_return_true_with_ignored_fields():
assert equal_objects(
{'foo': 1, 'version': '123', 'id': '123123'},
{'foo': 1}
)
# objects with object references
def test_equal_objects_return_true_with_different_ref_ids():
assert not equal_objects(
{'foo': {'id': '1', 'type': 'network', 'ignored_field': 'foo'}},
{'foo': {'id': '2', 'type': 'network', 'ignored_field': 'bar'}}
)
def test_equal_objects_return_true_with_different_ref_types():
assert not equal_objects(
{'foo': {'id': '1', 'type': 'network', 'ignored_field': 'foo'}},
{'foo': {'id': '1', 'type': 'accessRule', 'ignored_field': 'bar'}}
)
def test_equal_objects_return_true_with_same_object_refs():
assert equal_objects(
{'foo': {'id': '1', 'type': 'network', 'ignored_field': 'foo'}},
{'foo': {'id': '1', 'type': 'network', 'ignored_field': 'bar'}}
)
# objects with array of object references
def test_equal_objects_return_false_with_different_array_length():
assert not equal_objects(
{'foo': [
{'id': '1', 'type': 'network', 'ignored_field': 'foo'}
]},
{'foo': []}
)
def test_equal_objects_return_false_with_different_array_order():
assert not equal_objects(
{'foo': [
{'id': '1', 'type': 'network', 'ignored_field': 'foo'},
{'id': '2', 'type': 'network', 'ignored_field': 'bar'}
]},
{'foo': [
{'id': '2', 'type': 'network', 'ignored_field': 'foo'},
{'id': '1', 'type': 'network', 'ignored_field': 'bar'}
]}
)
def test_equal_objects_return_true_with_equal_ref_arrays():
assert equal_objects(
{'foo': [
{'id': '1', 'type': 'network', 'ignored_field': 'foo'}
]},
{'foo': [
{'id': '1', 'type': 'network', 'ignored_field': 'bar'}
]}
)
# objects with nested structures and object references
def test_equal_objects_return_true_with_equal_nested_object_references():
assert equal_objects(
{
'name': 'foo',
'config': {
'version': '1',
'port': {
'name': 'oldPortName',
'type': 'port',
'id': '123'
}
}
},
{
'name': 'foo',
'config': {
'version': '1',
'port': {
'name': 'newPortName',
'type': 'port',
'id': '123'
}
}
}
)
def test_equal_objects_return_false_with_different_nested_object_references():
assert not equal_objects(
{
'name': 'foo',
'config': {
'version': '1',
'port': {
'name': 'oldPortName',
'type': 'port',
'id': '123'
}
}
},
{
'name': 'foo',
'config': {
'version': '1',
'port': {
'name': 'oldPortName',
'type': 'port',
'id': '234'
}
}
}
)
def test_equal_objects_return_true_with_equal_nested_list_of_object_references():
assert equal_objects(
{
'name': 'foo',
'config': {
'version': '1',
'ports': [{
'name': 'oldPortName',
'type': 'port',
'id': '123'
}, {
'name': 'oldPortName2',
'type': 'port',
'id': '234'
}]
}
},
{
'name': 'foo',
'config': {
'version': '1',
'ports': [{
'name': 'newPortName',
'type': 'port',
'id': '123'
}, {
'name': 'newPortName2',
'type': 'port',
'id': '234',
'extraField': 'foo'
}]
}
}
)
def test_equal_objects_return_true_with_reference_list_containing_duplicates():
assert equal_objects(
{
'name': 'foo',
'config': {
'version': '1',
'ports': [{
'name': 'oldPortName',
'type': 'port',
'id': '123'
}, {
'name': 'oldPortName',
'type': 'port',
'id': '123'
}, {
'name': 'oldPortName2',
'type': 'port',
'id': '234'
}]
}
},
{
'name': 'foo',
'config': {
'version': '1',
'ports': [{
'name': 'newPortName',
'type': 'port',
'id': '123'
}, {
'name': 'newPortName2',
'type': 'port',
'id': '234',
'extraField': 'foo'
}]
}
}
)
def test_delete_ref_duplicates_with_none():
assert delete_ref_duplicates(None) is None
def test_delete_ref_duplicates_with_empty_dict():
assert {} == delete_ref_duplicates({})
def test_delete_ref_duplicates_with_simple_object():
data = {
'id': '123',
'name': 'foo',
'type': 'bar',
'values': ['a', 'b']
}
assert data == delete_ref_duplicates(data)
def test_delete_ref_duplicates_with_object_containing_refs():
data = {
'id': '123',
'name': 'foo',
'type': 'bar',
'refs': [
{'id': '123', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'foo'}
]
}
assert data == delete_ref_duplicates(data)
def test_delete_ref_duplicates_with_object_containing_duplicate_refs():
data = {
'id': '123',
'name': 'foo',
'type': 'bar',
'refs': [
{'id': '123', 'type': 'baz'},
{'id': '123', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'foo'}
]
}
assert {
'id': '123',
'name': 'foo',
'type': 'bar',
'refs': [
{'id': '123', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'foo'}
]
} == delete_ref_duplicates(data)
def test_delete_ref_duplicates_with_object_containing_duplicate_refs_in_nested_object():
data = {
'id': '123',
'name': 'foo',
'type': 'bar',
'children': {
'refs': [
{'id': '123', 'type': 'baz'},
{'id': '123', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'foo'}
]
}
}
assert {
'id': '123',
'name': 'foo',
'type': 'bar',
'children': {
'refs': [
{'id': '123', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'foo'}
]
}
} == delete_ref_duplicates(data)
def test_construct_ansible_facts_should_make_default_fact_with_name_and_type():
response = {
'id': '123',
'name': 'foo',
'type': 'bar'
}
assert {'bar_foo': response} == construct_ansible_facts(response, {})
def test_construct_ansible_facts_should_not_make_default_fact_with_no_name():
response = {
'id': '123',
'name': 'foo'
}
assert {} == construct_ansible_facts(response, {})
def test_construct_ansible_facts_should_not_make_default_fact_with_no_type():
response = {
'id': '123',
'type': 'bar'
}
assert {} == construct_ansible_facts(response, {})
def test_construct_ansible_facts_should_use_register_as_when_given():
response = {
'id': '123',
'name': 'foo',
'type': 'bar'
}
params = {'register_as': 'fact_name'}
assert {'fact_name': response} == construct_ansible_facts(response, params)
def test_construct_ansible_facts_should_extract_items():
response = {'items': [
{
'id': '123',
'name': 'foo',
'type': 'bar'
}, {
'id': '123',
'name': 'foo',
'type': 'bar'
}
]}
params = {'register_as': 'fact_name'}
assert {'fact_name': response['items']} == construct_ansible_facts(response, params)
def test_construct_ansible_facts_should_ignore_items_with_no_register_as():
response = {'items': [
{
'id': '123',
'name': 'foo',
'type': 'bar'
}, {
'id': '123',
'name': 'foo',
'type': 'bar'
}
]}
assert {} == construct_ansible_facts(response, {})

View file

@ -0,0 +1,588 @@
# Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import json
import unittest
import pytest
from ansible_collections.community.general.tests.unit.compat import mock
from ansible_collections.community.general.tests.unit.compat.mock import call, patch
from ansible_collections.community.general.plugins.module_utils.network.ftd.common import HTTPMethod, FtdUnexpectedResponse
from ansible_collections.community.general.plugins.module_utils.network.ftd.configuration import iterate_over_pageable_resource, BaseConfigurationResource, \
OperationChecker, OperationNamePrefix, ParamName, QueryParams
from ansible_collections.community.general.plugins.module_utils.network.ftd.fdm_swagger_client import ValidationError, OperationField
class TestBaseConfigurationResource(object):
@pytest.fixture
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible_collections.community.general.plugins.modules.network.ftd.ftd_configuration.Connection')
connection_instance = connection_class_mock.return_value
connection_instance.validate_data.return_value = True, None
connection_instance.validate_query_params.return_value = True, None
connection_instance.validate_path_params.return_value = True, None
return connection_instance
@patch.object(BaseConfigurationResource, '_fetch_system_info')
@patch.object(BaseConfigurationResource, '_send_request')
def test_get_objects_by_filter_with_multiple_filters(self, send_request_mock, fetch_system_info_mock,
connection_mock):
objects = [
{'name': 'obj1', 'type': 1, 'foo': {'bar': 'buzz'}},
{'name': 'obj2', 'type': 1, 'foo': {'bar': 'buz'}},
{'name': 'obj3', 'type': 2, 'foo': {'bar': 'buzz'}}
]
fetch_system_info_mock.return_value = {
'databaseInfo': {
'buildVersion': '6.3.0'
}
}
connection_mock.get_operation_spec.return_value = {
'method': HTTPMethod.GET,
'url': '/object/'
}
resource = BaseConfigurationResource(connection_mock, False)
send_request_mock.side_effect = [{'items': objects}, {'items': []}]
# resource.get_objects_by_filter returns generator so to be able compare generated list with expected list
# we need evaluate it.
assert objects == list(resource.get_objects_by_filter('test', {}))
send_request_mock.assert_has_calls(
[
mock.call('/object/', 'get', {}, {}, {'limit': 10, 'offset': 0})
]
)
send_request_mock.reset_mock()
send_request_mock.side_effect = [{'items': objects}, {'items': []}]
# resource.get_objects_by_filter returns generator so to be able compare generated list with expected list
# we need evaluate it.
assert [objects[0]] == list(resource.get_objects_by_filter('test', {ParamName.FILTERS: {'name': 'obj1'}}))
send_request_mock.assert_has_calls(
[
mock.call('/object/', 'get', {}, {}, {QueryParams.FILTER: 'name:obj1', 'limit': 10, 'offset': 0})
]
)
send_request_mock.reset_mock()
send_request_mock.side_effect = [{'items': objects}, {'items': []}]
# resource.get_objects_by_filter returns generator so to be able compare generated list with expected list
# we need evaluate it.
assert [objects[1]] == list(resource.get_objects_by_filter(
'test',
{ParamName.FILTERS: {'name': 'obj2', 'type': 1, 'foo': {'bar': 'buz'}}}))
send_request_mock.assert_has_calls(
[
mock.call('/object/', 'get', {}, {}, {QueryParams.FILTER: 'name:obj2', 'limit': 10, 'offset': 0})
]
)
@patch.object(BaseConfigurationResource, '_fetch_system_info')
@patch.object(BaseConfigurationResource, '_send_request')
def test_get_objects_by_filter_with_multiple_responses(self, send_request_mock, fetch_system_info_mock,
connection_mock):
send_request_mock.side_effect = [
{'items': [
{'name': 'obj1', 'type': 'foo'},
{'name': 'obj2', 'type': 'bar'}
]},
{'items': [
{'name': 'obj3', 'type': 'foo'}
]},
{'items': []}
]
fetch_system_info_mock.return_value = {
'databaseInfo': {
'buildVersion': '6.3.0'
}
}
connection_mock.get_operation_spec.return_value = {
'method': HTTPMethod.GET,
'url': '/object/'
}
resource = BaseConfigurationResource(connection_mock, False)
assert [{'name': 'obj1', 'type': 'foo'}] == list(resource.get_objects_by_filter(
'test',
{ParamName.FILTERS: {'type': 'foo'}}))
send_request_mock.assert_has_calls(
[
mock.call('/object/', 'get', {}, {}, {'limit': 10, 'offset': 0})
]
)
send_request_mock.reset_mock()
send_request_mock.side_effect = [
{'items': [
{'name': 'obj1', 'type': 'foo'},
{'name': 'obj2', 'type': 'bar'}
]},
{'items': [
{'name': 'obj3', 'type': 'foo'}
]},
{'items': []}
]
resp = list(resource.get_objects_by_filter(
'test',
{
ParamName.FILTERS: {'type': 'foo'},
ParamName.QUERY_PARAMS: {'limit': 2}
}))
assert [{'name': 'obj1', 'type': 'foo'}, {'name': 'obj3', 'type': 'foo'}] == resp
send_request_mock.assert_has_calls(
[
mock.call('/object/', 'get', {}, {}, {'limit': 2, 'offset': 0}),
mock.call('/object/', 'get', {}, {}, {'limit': 2, 'offset': 2})
]
)
def test_module_should_fail_if_validation_error_in_data(self, connection_mock):
connection_mock.get_operation_spec.return_value = {'method': HTTPMethod.POST, 'url': '/test'}
report = {
'required': ['objects[0].type'],
'invalid_type': [
{
'path': 'objects[3].id',
'expected_type': 'string',
'actually_value': 1
}
]
}
connection_mock.validate_data.return_value = (False, json.dumps(report, sort_keys=True, indent=4))
with pytest.raises(ValidationError) as e_info:
resource = BaseConfigurationResource(connection_mock, False)
resource.crud_operation('addTest', {'data': {}})
result = e_info.value.args[0]
key = 'Invalid data provided'
assert result[key]
result[key] = json.loads(result[key])
assert result == {key: {
'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
'required': ['objects[0].type']
}}
def test_module_should_fail_if_validation_error_in_query_params(self, connection_mock):
connection_mock.get_operation_spec.return_value = {'method': HTTPMethod.GET, 'url': '/test',
'returnMultipleItems': False}
report = {
'required': ['objects[0].type'],
'invalid_type': [
{
'path': 'objects[3].id',
'expected_type': 'string',
'actually_value': 1
}
]
}
connection_mock.validate_query_params.return_value = (False, json.dumps(report, sort_keys=True, indent=4))
with pytest.raises(ValidationError) as e_info:
resource = BaseConfigurationResource(connection_mock, False)
resource.crud_operation('getTestList', {'data': {}})
result = e_info.value.args[0]
key = 'Invalid query_params provided'
assert result[key]
result[key] = json.loads(result[key])
assert result == {key: {
'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
'required': ['objects[0].type']}}
def test_module_should_fail_if_validation_error_in_path_params(self, connection_mock):
connection_mock.get_operation_spec.return_value = {'method': HTTPMethod.GET, 'url': '/test',
'returnMultipleItems': False}
report = {
'path_params': {
'required': ['objects[0].type'],
'invalid_type': [
{
'path': 'objects[3].id',
'expected_type': 'string',
'actually_value': 1
}
]
}
}
connection_mock.validate_path_params.return_value = (False, json.dumps(report, sort_keys=True, indent=4))
with pytest.raises(ValidationError) as e_info:
resource = BaseConfigurationResource(connection_mock, False)
resource.crud_operation('putTest', {'data': {}})
result = e_info.value.args[0]
key = 'Invalid path_params provided'
assert result[key]
result[key] = json.loads(result[key])
assert result == {key: {
'path_params': {
'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
'required': ['objects[0].type']}}}
def test_module_should_fail_if_validation_error_in_all_params(self, connection_mock):
connection_mock.get_operation_spec.return_value = {'method': HTTPMethod.POST, 'url': '/test'}
report = {
'data': {
'required': ['objects[0].type'],
'invalid_type': [
{
'path': 'objects[3].id',
'expected_type': 'string',
'actually_value': 1
}
]
},
'path_params': {
'required': ['some_param'],
'invalid_type': [
{
'path': 'name',
'expected_type': 'string',
'actually_value': True
}
]
},
'query_params': {
'required': ['other_param'],
'invalid_type': [
{
'path': 'f_integer',
'expected_type': 'integer',
'actually_value': "test"
}
]
}
}
connection_mock.validate_data.return_value = (False, json.dumps(report['data'], sort_keys=True, indent=4))
connection_mock.validate_query_params.return_value = (False,
json.dumps(report['query_params'], sort_keys=True,
indent=4))
connection_mock.validate_path_params.return_value = (False,
json.dumps(report['path_params'], sort_keys=True,
indent=4))
with pytest.raises(ValidationError) as e_info:
resource = BaseConfigurationResource(connection_mock, False)
resource.crud_operation('putTest', {'data': {}})
result = e_info.value.args[0]
key_data = 'Invalid data provided'
assert result[key_data]
result[key_data] = json.loads(result[key_data])
key_path_params = 'Invalid path_params provided'
assert result[key_path_params]
result[key_path_params] = json.loads(result[key_path_params])
key_query_params = 'Invalid query_params provided'
assert result[key_query_params]
result[key_query_params] = json.loads(result[key_query_params])
assert result == {
key_data: {'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
'required': ['objects[0].type']},
key_path_params: {'invalid_type': [{'actually_value': True, 'expected_type': 'string', 'path': 'name'}],
'required': ['some_param']},
key_query_params: {
'invalid_type': [{'actually_value': 'test', 'expected_type': 'integer', 'path': 'f_integer'}],
'required': ['other_param']}}
@pytest.mark.parametrize("test_api_version, expected_result",
[
("6.2.3", "name:object_name"),
("6.3.0", "name:object_name"),
("6.4.0", "fts~object_name")
]
)
def test_stringify_name_filter(self, test_api_version, expected_result, connection_mock):
filters = {"name": "object_name"}
with patch.object(BaseConfigurationResource, '_fetch_system_info') as fetch_system_info_mock:
fetch_system_info_mock.return_value = {
'databaseInfo': {
'buildVersion': test_api_version
}
}
resource = BaseConfigurationResource(connection_mock, False)
assert resource._stringify_name_filter(filters) == expected_result, "Unexpected result for version %s" % (
test_api_version)
class TestIterateOverPageableResource(object):
def test_iterate_over_pageable_resource_with_no_items(self):
resource_func = mock.Mock(return_value={'items': []})
items = iterate_over_pageable_resource(resource_func, {'query_params': {}})
assert [] == list(items)
def test_iterate_over_pageable_resource_with_one_page(self):
resource_func = mock.Mock(side_effect=[
{'items': ['foo', 'bar']},
{'items': []},
])
items = iterate_over_pageable_resource(resource_func, {'query_params': {}})
assert ['foo', 'bar'] == list(items)
resource_func.assert_has_calls([
call(params={'query_params': {'offset': 0, 'limit': 10}})
])
def test_iterate_over_pageable_resource_with_multiple_pages(self):
objects = [
{'items': ['foo']},
{'items': ['bar']},
{'items': ['buzz']},
{'items': []},
]
resource_func = mock.Mock(side_effect=objects)
items = iterate_over_pageable_resource(resource_func, {'query_params': {}})
assert ['foo'] == list(items)
resource_func.reset_mock()
resource_func = mock.Mock(side_effect=objects)
items = iterate_over_pageable_resource(resource_func, {'query_params': {'limit': 1}})
assert ['foo', 'bar', 'buzz'] == list(items)
def test_iterate_over_pageable_resource_should_preserve_query_params(self):
resource_func = mock.Mock(return_value={'items': []})
items = iterate_over_pageable_resource(resource_func, {'query_params': {'filter': 'name:123'}})
assert [] == list(items)
resource_func.assert_called_once_with(params={'query_params': {'filter': 'name:123', 'offset': 0, 'limit': 10}})
def test_iterate_over_pageable_resource_should_preserve_limit(self):
resource_func = mock.Mock(side_effect=[
{'items': ['foo']},
{'items': []},
])
items = iterate_over_pageable_resource(resource_func, {'query_params': {'limit': 1}})
assert ['foo'] == list(items)
resource_func.assert_has_calls([
call(params={'query_params': {'offset': 0, 'limit': 1}})
])
def test_iterate_over_pageable_resource_should_preserve_offset(self):
resource_func = mock.Mock(side_effect=[
{'items': ['foo']},
{'items': []},
])
items = iterate_over_pageable_resource(resource_func, {'query_params': {'offset': 3}})
assert ['foo'] == list(items)
resource_func.assert_has_calls([
call(params={'query_params': {'offset': 3, 'limit': 10}}),
])
def test_iterate_over_pageable_resource_should_pass_with_string_offset_and_limit(self):
resource_func = mock.Mock(side_effect=[
{'items': ['foo']},
{'items': []},
])
items = iterate_over_pageable_resource(resource_func, {'query_params': {'offset': '1', 'limit': '1'}})
assert ['foo'] == list(items)
resource_func.assert_has_calls([
call(params={'query_params': {'offset': '1', 'limit': '1'}}),
call(params={'query_params': {'offset': 2, 'limit': '1'}})
])
def test_iterate_over_pageable_resource_raises_exception_when_server_returned_more_items_than_requested(self):
resource_func = mock.Mock(side_effect=[
{'items': ['foo', 'redundant_bar']},
{'items': []},
])
with pytest.raises(FtdUnexpectedResponse):
list(iterate_over_pageable_resource(resource_func, {'query_params': {'offset': '1', 'limit': '1'}}))
resource_func.assert_has_calls([
call(params={'query_params': {'offset': '1', 'limit': '1'}})
])
class TestOperationCheckerClass(unittest.TestCase):
def setUp(self):
self._checker = OperationChecker
def test_is_add_operation_positive(self):
operation_name = OperationNamePrefix.ADD + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.POST}
assert self._checker.is_add_operation(operation_name, operation_spec)
def test_is_add_operation_wrong_method_in_spec(self):
operation_name = OperationNamePrefix.ADD + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.GET}
assert not self._checker.is_add_operation(operation_name, operation_spec)
def test_is_add_operation_negative_wrong_operation_name(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.POST}
assert not self._checker.is_add_operation(operation_name, operation_spec)
def test_is_edit_operation_positive(self):
operation_name = OperationNamePrefix.EDIT + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.PUT}
assert self._checker.is_edit_operation(operation_name, operation_spec)
def test_is_edit_operation_wrong_method_in_spec(self):
operation_name = OperationNamePrefix.EDIT + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.GET}
assert not self._checker.is_edit_operation(operation_name, operation_spec)
def test_is_edit_operation_negative_wrong_operation_name(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.PUT}
assert not self._checker.is_edit_operation(operation_name, operation_spec)
def test_is_delete_operation_positive(self):
operation_name = OperationNamePrefix.DELETE + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.DELETE}
self.assertTrue(
self._checker.is_delete_operation(operation_name, operation_spec)
)
def test_is_delete_operation_wrong_method_in_spec(self):
operation_name = OperationNamePrefix.DELETE + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.GET}
assert not self._checker.is_delete_operation(operation_name, operation_spec)
def test_is_delete_operation_negative_wrong_operation_name(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.DELETE}
assert not self._checker.is_delete_operation(operation_name, operation_spec)
def test_is_get_list_operation_positive(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.GET,
OperationField.RETURN_MULTIPLE_ITEMS: True
}
assert self._checker.is_get_list_operation(operation_name, operation_spec)
def test_is_get_list_operation_wrong_method_in_spec(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.POST,
OperationField.RETURN_MULTIPLE_ITEMS: True
}
assert not self._checker.is_get_list_operation(operation_name, operation_spec)
def test_is_get_list_operation_does_not_return_list(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.GET,
OperationField.RETURN_MULTIPLE_ITEMS: False
}
assert not self._checker.is_get_list_operation(operation_name, operation_spec)
def test_is_get_operation_positive(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.GET,
OperationField.RETURN_MULTIPLE_ITEMS: False
}
self.assertTrue(
self._checker.is_get_operation(operation_name, operation_spec)
)
def test_is_get_operation_wrong_method_in_spec(self):
operation_name = OperationNamePrefix.ADD + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.POST,
OperationField.RETURN_MULTIPLE_ITEMS: False
}
assert not self._checker.is_get_operation(operation_name, operation_spec)
def test_is_get_operation_negative_when_returns_multiple(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.GET,
OperationField.RETURN_MULTIPLE_ITEMS: True
}
assert not self._checker.is_get_operation(operation_name, operation_spec)
def test_is_upsert_operation_positive(self):
operation_name = OperationNamePrefix.UPSERT + "Object"
assert self._checker.is_upsert_operation(operation_name)
def test_is_upsert_operation_with_wrong_operation_name(self):
for op_type in [OperationNamePrefix.ADD, OperationNamePrefix.GET, OperationNamePrefix.EDIT,
OperationNamePrefix.DELETE]:
operation_name = op_type + "Object"
assert not self._checker.is_upsert_operation(operation_name)
def test_is_find_by_filter_operation(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.GET,
OperationField.RETURN_MULTIPLE_ITEMS: True
}
params = {ParamName.FILTERS: 1}
self.assertTrue(
self._checker.is_find_by_filter_operation(
operation_name, params, operation_spec
)
)
def test_is_find_by_filter_operation_negative_when_filters_empty(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.GET,
OperationField.RETURN_MULTIPLE_ITEMS: True
}
params = {ParamName.FILTERS: None}
assert not self._checker.is_find_by_filter_operation(
operation_name, params, operation_spec
)
params = {}
assert not self._checker.is_find_by_filter_operation(
operation_name, params, operation_spec
)
def test_is_upsert_operation_supported_operation(self):
get_list_op_spec = {OperationField.METHOD: HTTPMethod.GET, OperationField.RETURN_MULTIPLE_ITEMS: True}
add_op_spec = {OperationField.METHOD: HTTPMethod.POST}
edit_op_spec = {OperationField.METHOD: HTTPMethod.PUT}
assert self._checker.is_upsert_operation_supported({'getList': get_list_op_spec, 'edit': edit_op_spec})
assert self._checker.is_upsert_operation_supported(
{'add': add_op_spec, 'getList': get_list_op_spec, 'edit': edit_op_spec})
assert not self._checker.is_upsert_operation_supported({'getList': get_list_op_spec})
assert not self._checker.is_upsert_operation_supported({'edit': edit_op_spec})
assert not self._checker.is_upsert_operation_supported({'getList': get_list_op_spec, 'add': add_op_spec})

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,145 @@
# Copyright (c) 2019 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import pytest
pytest.importorskip("kick")
from ansible_collections.community.general.plugins.module_utils.network.ftd.device import FtdPlatformFactory, FtdModel, FtdAsa5500xPlatform, \
Ftd2100Platform, AbstractFtdPlatform
from ansible_collections.community.general.tests.unit.plugins.modules.network.ftd.test_ftd_install import DEFAULT_MODULE_PARAMS
class TestFtdModel(object):
def test_has_value_should_return_true_for_existing_models(self):
assert FtdModel.FTD_2120 in FtdModel.supported_models()
assert FtdModel.FTD_ASA5516_X in FtdModel.supported_models()
def test_has_value_should_return_false_for_non_existing_models(self):
assert 'nonExistingModel' not in FtdModel.supported_models()
assert None not in FtdModel.supported_models()
class TestFtdPlatformFactory(object):
@pytest.fixture(autouse=True)
def mock_devices(self, mocker):
mocker.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.device.Kp')
mocker.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.device.Ftd5500x')
def test_factory_should_return_corresponding_platform(self):
ftd_platform = FtdPlatformFactory.create(FtdModel.FTD_ASA5508_X, dict(DEFAULT_MODULE_PARAMS))
assert type(ftd_platform) is FtdAsa5500xPlatform
ftd_platform = FtdPlatformFactory.create(FtdModel.FTD_2130, dict(DEFAULT_MODULE_PARAMS))
assert type(ftd_platform) is Ftd2100Platform
def test_factory_should_raise_error_with_not_supported_model(self):
with pytest.raises(ValueError) as ex:
FtdPlatformFactory.create('nonExistingModel', dict(DEFAULT_MODULE_PARAMS))
assert "FTD model 'nonExistingModel' is not supported by this module." == ex.value.args[0]
class TestAbstractFtdPlatform(object):
def test_install_ftd_image_raise_error_on_abstract_class(self):
with pytest.raises(NotImplementedError):
AbstractFtdPlatform().install_ftd_image(dict(DEFAULT_MODULE_PARAMS))
def test_supports_ftd_model_should_return_true_for_supported_models(self):
assert Ftd2100Platform.supports_ftd_model(FtdModel.FTD_2120)
assert FtdAsa5500xPlatform.supports_ftd_model(FtdModel.FTD_ASA5516_X)
def test_supports_ftd_model_should_return_false_for_non_supported_models(self):
assert not AbstractFtdPlatform.supports_ftd_model(FtdModel.FTD_2120)
assert not Ftd2100Platform.supports_ftd_model(FtdModel.FTD_ASA5508_X)
assert not FtdAsa5500xPlatform.supports_ftd_model(FtdModel.FTD_2120)
def test_parse_rommon_file_location(self):
server, path = AbstractFtdPlatform.parse_rommon_file_location('tftp://1.2.3.4/boot/rommon-boot.foo')
assert '1.2.3.4' == server
assert '/boot/rommon-boot.foo' == path
def test_parse_rommon_file_location_should_fail_for_non_tftp_protocol(self):
with pytest.raises(ValueError) as ex:
AbstractFtdPlatform.parse_rommon_file_location('http://1.2.3.4/boot/rommon-boot.foo')
assert 'The ROMMON image must be downloaded from TFTP server' in str(ex.value)
class TestFtd2100Platform(object):
@pytest.fixture
def kp_mock(self, mocker):
return mocker.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.device.Kp')
@pytest.fixture
def module_params(self):
return dict(DEFAULT_MODULE_PARAMS)
def test_install_ftd_image_should_call_kp_module(self, kp_mock, module_params):
ftd = FtdPlatformFactory.create(FtdModel.FTD_2110, module_params)
ftd.install_ftd_image(module_params)
assert kp_mock.called
assert kp_mock.return_value.ssh_console.called
ftd_line = kp_mock.return_value.ssh_console.return_value
assert ftd_line.baseline_fp2k_ftd.called
assert ftd_line.disconnect.called
def test_install_ftd_image_should_call_disconnect_when_install_fails(self, kp_mock, module_params):
ftd_line = kp_mock.return_value.ssh_console.return_value
ftd_line.baseline_fp2k_ftd.side_effect = Exception('Something went wrong')
ftd = FtdPlatformFactory.create(FtdModel.FTD_2120, module_params)
with pytest.raises(Exception):
ftd.install_ftd_image(module_params)
assert ftd_line.baseline_fp2k_ftd.called
assert ftd_line.disconnect.called
class TestFtdAsa5500xPlatform(object):
@pytest.fixture
def asa5500x_mock(self, mocker):
return mocker.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.device.Ftd5500x')
@pytest.fixture
def module_params(self):
return dict(DEFAULT_MODULE_PARAMS)
def test_install_ftd_image_should_call_kp_module(self, asa5500x_mock, module_params):
ftd = FtdPlatformFactory.create(FtdModel.FTD_ASA5508_X, module_params)
ftd.install_ftd_image(module_params)
assert asa5500x_mock.called
assert asa5500x_mock.return_value.ssh_console.called
ftd_line = asa5500x_mock.return_value.ssh_console.return_value
assert ftd_line.rommon_to_new_image.called
assert ftd_line.disconnect.called
def test_install_ftd_image_should_call_disconnect_when_install_fails(self, asa5500x_mock, module_params):
ftd_line = asa5500x_mock.return_value.ssh_console.return_value
ftd_line.rommon_to_new_image.side_effect = Exception('Something went wrong')
ftd = FtdPlatformFactory.create(FtdModel.FTD_ASA5516_X, module_params)
with pytest.raises(Exception):
ftd.install_ftd_image(module_params)
assert ftd_line.rommon_to_new_image.called
assert ftd_line.disconnect.called

View file

@ -0,0 +1,379 @@
# Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import copy
import os
import unittest
from ansible_collections.community.general.plugins.module_utils.network.ftd.common import HTTPMethod
from ansible_collections.community.general.plugins.module_utils.network.ftd.fdm_swagger_client import FdmSwaggerParser
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
TEST_DATA_FOLDER = os.path.join(DIR_PATH, 'test_data')
base = {
'basePath': "/api/fdm/v2",
'definitions': {"NetworkObject": {"type": "object",
"properties": {"version": {"type": "string"}, "name": {"type": "string"},
"description": {"type": "string"},
"subType": {"type": "object",
"$ref": "#/definitions/NetworkObjectType"},
"value": {"type": "string"},
"isSystemDefined": {"type": "boolean"},
"dnsResolution": {"type": "object",
"$ref": "#/definitions/FQDNDNSResolution"},
"id": {"type": "string"},
"type": {"type": "string", "default": "networkobject"}},
"required": ["subType", "type", "value", "name"]},
"NetworkObjectWrapper": {
"allOf": [{"$ref": "#/definitions/NetworkObject"}, {"$ref": "#/definitions/LinksWrapper"}]}
},
'paths': {
"/object/networks": {
"get": {"tags": ["NetworkObject"],
"operationId": "getNetworkObjectList",
"responses": {
"200": {
"description": "",
"schema": {"type": "object",
"title": "NetworkObjectList",
"properties": {
"items": {
"type": "array",
"items": {"$ref": "#/definitions/NetworkObjectWrapper"}},
"paging": {
"$ref": "#/definitions/Paging"}},
"required": ["items", "paging"]}}},
"parameters": [
{"name": "offset", "in": "query", "required": False, "type": "integer"},
{"name": "limit", "in": "query", "required": False, "type": "integer"},
{"name": "sort", "in": "query", "required": False, "type": "string"},
{"name": "filter", "in": "query", "required": False, "type": "string"}]},
"post": {"tags": ["NetworkObject"], "operationId": "addNetworkObject",
"responses": {
"200": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/NetworkObjectWrapper"}},
"422": {"description": "",
"schema": {"type": "object", "$ref": "#/definitions/ErrorWrapper"}}},
"parameters": [{"in": "body", "name": "body",
"required": True,
"schema": {"$ref": "#/definitions/NetworkObject"}}]}
},
"/object/networks/{objId}": {
"get": {"tags": ["NetworkObject"], "operationId": "getNetworkObject",
"responses": {"200": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/NetworkObjectWrapper"}},
"404": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/ErrorWrapper"}}},
"parameters": [{"name": "objId", "in": "path", "required": True,
"type": "string"}]},
"put": {"tags": ["NetworkObject"], "operationId": "editNetworkObject",
"responses": {"200": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/NetworkObjectWrapper"}},
"422": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/ErrorWrapper"}}},
"parameters": [{"name": "objId", "in": "path", "required": True,
"type": "string"},
{"in": "body", "name": "body", "required": True,
"schema": {"$ref": "#/definitions/NetworkObject"}}]},
"delete": {"tags": ["NetworkObject"], "operationId": "deleteNetworkObject",
"responses": {"204": {"description": ""},
"422": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/ErrorWrapper"}}},
"parameters": [{"name": "objId", "in": "path", "required": True,
"type": "string"}]}}}
}
def _get_objects(base_object, key_names):
return dict((_key, base_object[_key]) for _key in key_names)
class TestFdmSwaggerParser(unittest.TestCase):
def test_simple_object(self):
self._data = copy.deepcopy(base)
self.fdm_data = FdmSwaggerParser().parse_spec(self._data)
expected_operations = {
'getNetworkObjectList': {
'method': HTTPMethod.GET,
'url': '/api/fdm/v2/object/networks',
'modelName': 'NetworkObject',
'parameters': {
'path': {},
'query': {
'offset': {
'required': False,
'type': 'integer'
},
'limit': {
'required': False,
'type': 'integer'
},
'sort': {
'required': False,
'type': 'string'
},
'filter': {
'required': False,
'type': 'string'
}
}
},
'returnMultipleItems': True,
"tags": ["NetworkObject"]
},
'addNetworkObject': {
'method': HTTPMethod.POST,
'url': '/api/fdm/v2/object/networks',
'modelName': 'NetworkObject',
'parameters': {'path': {},
'query': {}},
'returnMultipleItems': False,
"tags": ["NetworkObject"]
},
'getNetworkObject': {
'method': HTTPMethod.GET,
'url': '/api/fdm/v2/object/networks/{objId}',
'modelName': 'NetworkObject',
'parameters': {
'path': {
'objId': {
'required': True,
'type': "string"
}
},
'query': {}
},
'returnMultipleItems': False,
"tags": ["NetworkObject"]
},
'editNetworkObject': {
'method': HTTPMethod.PUT,
'url': '/api/fdm/v2/object/networks/{objId}',
'modelName': 'NetworkObject',
'parameters': {
'path': {
'objId': {
'required': True,
'type': "string"
}
},
'query': {}
},
'returnMultipleItems': False,
"tags": ["NetworkObject"]
},
'deleteNetworkObject': {
'method': HTTPMethod.DELETE,
'url': '/api/fdm/v2/object/networks/{objId}',
'modelName': 'NetworkObject',
'parameters': {
'path': {
'objId': {
'required': True,
'type': "string"
}
},
'query': {}
},
'returnMultipleItems': False,
"tags": ["NetworkObject"]
}
}
assert sorted(['NetworkObject', 'NetworkObjectWrapper']) == sorted(self.fdm_data['models'].keys())
assert expected_operations == self.fdm_data['operations']
assert {'NetworkObject': expected_operations} == self.fdm_data['model_operations']
def test_simple_object_with_documentation(self):
api_spec = copy.deepcopy(base)
docs = {
'definitions': {
'NetworkObject': {
'description': 'Description for Network Object',
'properties': {'name': 'Description for name field'}
}
},
'paths': {
'/object/networks': {
'get': {
'description': 'Description for getNetworkObjectList operation',
'parameters': [{'name': 'offset', 'description': 'Description for offset field'}]
},
'post': {'description': 'Description for addNetworkObject operation'}
}
}
}
self.fdm_data = FdmSwaggerParser().parse_spec(api_spec, docs)
assert 'Description for Network Object' == self.fdm_data['models']['NetworkObject']['description']
assert '' == self.fdm_data['models']['NetworkObjectWrapper']['description']
network_properties = self.fdm_data['models']['NetworkObject']['properties']
assert '' == network_properties['id']['description']
assert not network_properties['id']['required']
assert 'Description for name field' == network_properties['name']['description']
assert network_properties['name']['required']
ops = self.fdm_data['operations']
assert 'Description for getNetworkObjectList operation' == ops['getNetworkObjectList']['description']
assert 'Description for addNetworkObject operation' == ops['addNetworkObject']['description']
assert '' == ops['deleteNetworkObject']['description']
get_op_params = ops['getNetworkObjectList']['parameters']
assert 'Description for offset field' == get_op_params['query']['offset']['description']
assert '' == get_op_params['query']['limit']['description']
def test_model_operations_should_contain_all_operations(self):
data = {
'basePath': '/v2/',
'definitions': {
'Model1': {"type": "object"},
'Model2': {"type": "object"},
'Model3': {"type": "object"}
},
'paths': {
'path1': {
'get': {
'operationId': 'getSomeModelList',
"responses": {
"200": {"description": "",
"schema": {"type": "object",
"title": "NetworkObjectList",
"properties": {
"items": {
"type": "array",
"items": {
"$ref": "#/definitions/Model1"
}
}
}}
}
}
},
"post": {
"operationId": "addSomeModel",
"parameters": [{"in": "body",
"name": "body",
"schema": {"$ref": "#/definitions/Model2"}
}]}
},
'path2/{id}': {
"get": {"operationId": "getSomeModel",
"responses": {"200": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/Model3"}},
}
},
"put": {"operationId": "editSomeModel",
"parameters": [{"in": "body",
"name": "body",
"schema": {"$ref": "#/definitions/Model1"}}
]},
"delete": {
"operationId": "deleteModel3",
}},
'path3': {
"delete": {
"operationId": "deleteNoneModel",
}
}
}
}
expected_operations = {
'getSomeModelList': {
'method': HTTPMethod.GET,
'url': '/v2/path1',
'modelName': 'Model1',
'returnMultipleItems': True,
'tags': []
},
'addSomeModel': {
'method': HTTPMethod.POST,
'url': '/v2/path1',
'modelName': 'Model2',
'parameters': {
'path': {},
'query': {}
},
'returnMultipleItems': False,
'tags': []
},
'getSomeModel': {
'method': HTTPMethod.GET,
'url': '/v2/path2/{id}',
'modelName': 'Model3',
'returnMultipleItems': False,
'tags': []
},
'editSomeModel': {
'method': HTTPMethod.PUT,
'url': '/v2/path2/{id}',
'modelName': 'Model1',
'parameters': {
'path': {},
'query': {}
},
'returnMultipleItems': False,
'tags': []
},
'deleteModel3': {
'method': HTTPMethod.DELETE,
'url': '/v2/path2/{id}',
'modelName': 'Model3',
'returnMultipleItems': False,
'tags': []
},
'deleteNoneModel': {
'method': HTTPMethod.DELETE,
'url': '/v2/path3',
'modelName': None,
'returnMultipleItems': False,
'tags': []
}
}
fdm_data = FdmSwaggerParser().parse_spec(data)
assert sorted(['Model1', 'Model2', 'Model3']) == sorted(fdm_data['models'].keys())
assert expected_operations == fdm_data['operations']
assert {
'Model1': {
'getSomeModelList': expected_operations['getSomeModelList'],
'editSomeModel': expected_operations['editSomeModel'],
},
'Model2': {
'addSomeModel': expected_operations['addSomeModel']
},
'Model3': {
'getSomeModel': expected_operations['getSomeModel'],
'deleteModel3': expected_operations['deleteModel3']
},
None: {
'deleteNoneModel': expected_operations['deleteNoneModel']
}
} == fdm_data['model_operations']

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,75 @@
import json
import os
import unittest
from ansible_collections.community.general.plugins.module_utils.network.ftd.fdm_swagger_client import FdmSwaggerValidator, FdmSwaggerParser
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
TEST_DATA_FOLDER = os.path.join(DIR_PATH, 'test_data')
class TestFdmSwagger(unittest.TestCase):
def setUp(self):
self.init_mock_data()
def init_mock_data(self):
with open(os.path.join(TEST_DATA_FOLDER, 'ngfw_with_ex.json'), 'rb') as f:
self.base_data = json.loads(f.read().decode('utf-8'))
def test_with_all_data(self):
fdm_data = FdmSwaggerParser().parse_spec(self.base_data)
validator = FdmSwaggerValidator(fdm_data)
models = fdm_data['models']
operations = fdm_data['operations']
invalid = set({})
for operation in operations:
model_name = operations[operation]['modelName']
method = operations[operation]['method']
if method != 'get' and model_name in models:
if 'example' in models[model_name]:
example = models[model_name]['example']
try:
valid, rez = validator.validate_data(operation, example)
assert valid
except Exception:
invalid.add(model_name)
assert invalid == set(['TCPPortObject',
'UDPPortObject',
'ICMPv4PortObject',
'ICMPv6PortObject',
'StandardAccessList',
'ExtendedAccessList',
'ASPathList',
'RouteMap',
'StandardCommunityList',
'ExpandedCommunityList',
'IPV4PrefixList',
'IPV6PrefixList',
'PolicyList',
'SyslogServer',
'HAConfiguration',
'TestIdentitySource'])
def test_parse_all_data(self):
self.fdm_data = FdmSwaggerParser().parse_spec(self.base_data)
operations = self.fdm_data['operations']
without_model_name = []
expected_operations_counter = 0
for key in self.base_data['paths']:
operation = self.base_data['paths'][key]
for dummy in operation:
expected_operations_counter += 1
for key in operations:
operation = operations[key]
if not operation['modelName']:
without_model_name.append(operation['url'])
if operation['modelName'] == '_File' and 'download' not in operation['url']:
self.fail('File type can be defined for download operation only')
assert sorted(['/api/fdm/v2/operational/deploy/{objId}', '/api/fdm/v2/action/upgrade']) == sorted(
without_model_name)
assert sorted(self.fdm_data['model_operations'][None].keys()) == sorted(['deleteDeployment', 'startUpgrade'])
assert expected_operations_counter == len(operations)

View file

@ -0,0 +1,886 @@
# Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import
import copy
import json
import unittest
import pytest
from ansible_collections.community.general.tests.unit.compat import mock
from ansible_collections.community.general.plugins.module_utils.network.ftd.common import FtdServerError, HTTPMethod, ResponseParams, FtdConfigurationError
from ansible_collections.community.general.plugins.module_utils.network.ftd.configuration import DUPLICATE_NAME_ERROR_MESSAGE, UNPROCESSABLE_ENTITY_STATUS, \
MULTIPLE_DUPLICATES_FOUND_ERROR, BaseConfigurationResource, FtdInvalidOperationNameError, QueryParams, \
ADD_OPERATION_NOT_SUPPORTED_ERROR, ParamName
from ansible_collections.community.general.plugins.module_utils.network.ftd.fdm_swagger_client import ValidationError
ADD_RESPONSE = {'status': 'Object added'}
EDIT_RESPONSE = {'status': 'Object edited'}
DELETE_RESPONSE = {'status': 'Object deleted'}
GET_BY_FILTER_RESPONSE = [{'name': 'foo', 'description': 'bar'}]
ARBITRARY_RESPONSE = {'status': 'Arbitrary request sent'}
class TestUpsertOperationUnitTests(unittest.TestCase):
@mock.patch.object(BaseConfigurationResource, '_fetch_system_info')
def setUp(self, fetch_system_info_mock):
self._conn = mock.MagicMock()
self._resource = BaseConfigurationResource(self._conn)
fetch_system_info_mock.return_value = {
'databaseInfo': {
'buildVersion': '6.3.0'
}
}
def test_get_operation_name(self):
operation_a = mock.MagicMock()
operation_b = mock.MagicMock()
def checker_wrapper(expected_object):
def checker(obj, *args, **kwargs):
return obj == expected_object
return checker
operations = {
operation_a: "spec",
operation_b: "spec"
}
assert operation_a == self._resource._get_operation_name(checker_wrapper(operation_a), operations)
assert operation_b == self._resource._get_operation_name(checker_wrapper(operation_b), operations)
assert self._resource._get_operation_name(checker_wrapper(None), operations) is None
@mock.patch.object(BaseConfigurationResource, "_get_operation_name")
@mock.patch.object(BaseConfigurationResource, "add_object")
def test_add_upserted_object(self, add_object_mock, get_operation_mock):
model_operations = mock.MagicMock()
params = mock.MagicMock()
add_op_name = get_operation_mock.return_value
assert add_object_mock.return_value == self._resource._add_upserted_object(model_operations, params)
get_operation_mock.assert_called_once_with(
self._resource._operation_checker.is_add_operation,
model_operations)
add_object_mock.assert_called_once_with(add_op_name, params)
@mock.patch.object(BaseConfigurationResource, "_get_operation_name")
@mock.patch.object(BaseConfigurationResource, "add_object")
def test_add_upserted_object_with_no_add_operation(self, add_object_mock, get_operation_mock):
model_operations = mock.MagicMock()
get_operation_mock.return_value = None
with pytest.raises(FtdConfigurationError) as exc_info:
self._resource._add_upserted_object(model_operations, mock.MagicMock())
assert ADD_OPERATION_NOT_SUPPORTED_ERROR in str(exc_info.value)
get_operation_mock.assert_called_once_with(self._resource._operation_checker.is_add_operation, model_operations)
add_object_mock.assert_not_called()
@mock.patch.object(BaseConfigurationResource, "_get_operation_name")
@mock.patch.object(BaseConfigurationResource, "edit_object")
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.copy_identity_properties')
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration._set_default')
def test_edit_upserted_object(self, _set_default_mock, copy_properties_mock, edit_object_mock, get_operation_mock):
model_operations = mock.MagicMock()
existing_object = mock.MagicMock()
params = {
'path_params': {},
'data': {}
}
result = self._resource._edit_upserted_object(model_operations, existing_object, params)
assert result == edit_object_mock.return_value
_set_default_mock.assert_has_calls([
mock.call(params, 'path_params', {}),
mock.call(params, 'data', {})
])
get_operation_mock.assert_called_once_with(
self._resource._operation_checker.is_edit_operation,
model_operations
)
copy_properties_mock.assert_called_once_with(
existing_object,
params['data']
)
edit_object_mock.assert_called_once_with(
get_operation_mock.return_value,
params
)
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_successfully_added(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock):
params = mock.MagicMock()
is_upsert_supported_mock.return_value = True
find_object.return_value = None
result = self._resource.upsert_object('upsertFoo', params)
assert result == add_mock.return_value
self._conn.get_model_spec.assert_called_once_with('Foo')
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
get_operation_mock.assert_called_once_with('Foo')
find_object.assert_called_once_with('Foo', params)
add_mock.assert_called_once_with(get_operation_mock.return_value, params)
edit_mock.assert_not_called()
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.equal_objects')
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_successfully_edited(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock, equal_objects_mock):
params = mock.MagicMock()
existing_obj = mock.MagicMock()
is_upsert_supported_mock.return_value = True
find_object.return_value = existing_obj
equal_objects_mock.return_value = False
result = self._resource.upsert_object('upsertFoo', params)
assert result == edit_mock.return_value
self._conn.get_model_spec.assert_called_once_with('Foo')
get_operation_mock.assert_called_once_with('Foo')
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
add_mock.assert_not_called()
equal_objects_mock.assert_called_once_with(existing_obj, params[ParamName.DATA])
edit_mock.assert_called_once_with(get_operation_mock.return_value, existing_obj, params)
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.equal_objects')
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_returned_without_modifications(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock, equal_objects_mock):
params = mock.MagicMock()
existing_obj = mock.MagicMock()
is_upsert_supported_mock.return_value = True
find_object.return_value = existing_obj
equal_objects_mock.return_value = True
result = self._resource.upsert_object('upsertFoo', params)
assert result == existing_obj
self._conn.get_model_spec.assert_called_once_with('Foo')
get_operation_mock.assert_called_once_with('Foo')
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
add_mock.assert_not_called()
equal_objects_mock.assert_called_once_with(existing_obj, params[ParamName.DATA])
edit_mock.assert_not_called()
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_not_supported(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock):
params = mock.MagicMock()
is_upsert_supported_mock.return_value = False
self.assertRaises(
FtdInvalidOperationNameError,
self._resource.upsert_object, 'upsertFoo', params
)
self._conn.get_model_spec.assert_called_once_with('Foo')
get_operation_mock.assert_called_once_with('Foo')
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
find_object.assert_not_called()
add_mock.assert_not_called()
edit_mock.assert_not_called()
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_when_model_not_supported(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock):
params = mock.MagicMock()
self._conn.get_model_spec.return_value = None
self.assertRaises(
FtdInvalidOperationNameError,
self._resource.upsert_object, 'upsertNonExisting', params
)
self._conn.get_model_spec.assert_called_once_with('NonExisting')
get_operation_mock.assert_not_called()
is_upsert_supported_mock.assert_not_called()
find_object.assert_not_called()
add_mock.assert_not_called()
edit_mock.assert_not_called()
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.equal_objects')
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_with_fatal_error_during_edit(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock, equal_objects_mock):
params = mock.MagicMock()
existing_obj = mock.MagicMock()
is_upsert_supported_mock.return_value = True
find_object.return_value = existing_obj
equal_objects_mock.return_value = False
edit_mock.side_effect = FtdConfigurationError("Some object edit error")
self.assertRaises(
FtdConfigurationError,
self._resource.upsert_object, 'upsertFoo', params
)
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
self._conn.get_model_spec.assert_called_once_with('Foo')
get_operation_mock.assert_called_once_with('Foo')
find_object.assert_called_once_with('Foo', params)
add_mock.assert_not_called()
edit_mock.assert_called_once_with(get_operation_mock.return_value, existing_obj, params)
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_with_fatal_error_during_add(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock):
params = mock.MagicMock()
is_upsert_supported_mock.return_value = True
find_object.return_value = None
error = FtdConfigurationError("Obj duplication error")
add_mock.side_effect = error
self.assertRaises(
FtdConfigurationError,
self._resource.upsert_object, 'upsertFoo', params
)
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
self._conn.get_model_spec.assert_called_once_with('Foo')
get_operation_mock.assert_called_once_with('Foo')
find_object.assert_called_once_with('Foo', params)
add_mock.assert_called_once_with(get_operation_mock.return_value, params)
edit_mock.assert_not_called()
# functional tests below
class TestUpsertOperationFunctionalTests(object):
@pytest.fixture(autouse=True)
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible_collections.community.general.plugins.modules.network.ftd.ftd_configuration.Connection')
connection_instance = connection_class_mock.return_value
connection_instance.validate_data.return_value = True, None
connection_instance.validate_query_params.return_value = True, None
connection_instance.validate_path_params.return_value = True, None
return connection_instance
def test_module_should_create_object_when_upsert_operation_and_object_does_not_exist(self, connection_mock):
url = '/test'
operations = {
'getObjectList': {
'method': HTTPMethod.GET,
'url': url,
'modelName': 'Object',
'returnMultipleItems': True},
'addObject': {
'method': HTTPMethod.POST,
'modelName': 'Object',
'url': url},
'editObject': {
'method': HTTPMethod.PUT,
'modelName': 'Object',
'url': '/test/{objId}'},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': '/test/{objId}',
'returnMultipleItems': False
}
}
def get_operation_spec(name):
return operations[name]
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
if http_method == HTTPMethod.POST:
assert url_path == url
assert body_params == params['data']
assert query_params == {}
assert path_params == params['path_params']
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: ADD_RESPONSE
}
elif http_method == HTTPMethod.GET:
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {'items': []}
}
else:
assert False
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request = request_handler
params = {
'operation': 'upsertObject',
'data': {'id': '123', 'name': 'testObject', 'type': 'object'},
'path_params': {'objId': '123'},
'register_as': 'test_var'
}
result = self._resource_execute_operation(params, connection=connection_mock)
assert ADD_RESPONSE == result
def test_module_should_fail_when_no_model(self, connection_mock):
connection_mock.get_model_spec.return_value = None
params = {
'operation': 'upsertObject',
'data': {'id': '123', 'name': 'testObject', 'type': 'object'},
'path_params': {'objId': '123'},
'register_as': 'test_var'
}
with pytest.raises(FtdInvalidOperationNameError) as exc_info:
self._resource_execute_operation(params, connection=connection_mock)
assert 'upsertObject' == exc_info.value.operation_name
def test_module_should_fail_when_no_add_operation_and_no_object(self, connection_mock):
url = '/test'
operations = {
'getObjectList': {
'method': HTTPMethod.GET,
'url': url,
'modelName': 'Object',
'returnMultipleItems': True},
'editObject': {
'method': HTTPMethod.PUT,
'modelName': 'Object',
'url': '/test/{objId}'},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': '/test/{objId}',
'returnMultipleItems': False
}}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request.return_value = {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {'items': []}
}
params = {
'operation': 'upsertObject',
'data': {'id': '123', 'name': 'testObject', 'type': 'object'},
'path_params': {'objId': '123'},
'register_as': 'test_var'
}
with pytest.raises(FtdConfigurationError) as exc_info:
self._resource_execute_operation(params, connection=connection_mock)
assert ADD_OPERATION_NOT_SUPPORTED_ERROR in str(exc_info.value)
# test when object exists but with different fields(except id)
def test_module_should_update_object_when_upsert_operation_and_object_exists(self, connection_mock):
url = '/test'
obj_id = '456'
version = 'test_version'
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
new_value = '0000'
old_value = '1111'
params = {
'operation': 'upsertObject',
'data': {'name': 'testObject', 'value': new_value, 'type': 'object'},
'register_as': 'test_var'
}
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
if http_method == HTTPMethod.POST:
assert url_path == url
assert body_params == params['data']
assert query_params == {}
assert path_params == {}
return {
ResponseParams.SUCCESS: False,
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
}
elif http_method == HTTPMethod.GET:
is_get_list_req = url_path == url
is_get_req = url_path == url_with_id_templ
assert is_get_req or is_get_list_req
if is_get_list_req:
assert body_params == {}
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
assert path_params == {}
elif is_get_req:
assert body_params == {}
assert query_params == {}
assert path_params == {'objId': obj_id}
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {
'items': [
{'name': 'testObject', 'value': old_value, 'type': 'object', 'id': obj_id,
'version': version}
]
}
}
elif http_method == HTTPMethod.PUT:
assert url_path == url_with_id_templ
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: body_params
}
else:
assert False
operations = {
'getObjectList': {'method': HTTPMethod.GET, 'url': url, 'modelName': 'Object', 'returnMultipleItems': True},
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': url_with_id_templ,
'returnMultipleItems': False}
}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request = request_handler
expected_val = {'name': 'testObject', 'value': new_value, 'type': 'object', 'id': obj_id, 'version': version}
result = self._resource_execute_operation(params, connection=connection_mock)
assert expected_val == result
# test when object exists and all fields have the same value
def test_module_should_not_update_object_when_upsert_operation_and_object_exists_with_the_same_fields(
self, connection_mock):
url = '/test'
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
params = {
'operation': 'upsertObject',
'data': {'name': 'testObject', 'value': '3333', 'type': 'object'},
'register_as': 'test_var'
}
expected_val = copy.deepcopy(params['data'])
expected_val['version'] = 'test_version'
expected_val['id'] = 'test_id'
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
if http_method == HTTPMethod.POST:
assert url_path == url
assert body_params == params['data']
assert query_params == {}
assert path_params == {}
return {
ResponseParams.SUCCESS: False,
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
}
elif http_method == HTTPMethod.GET:
assert url_path == url
assert body_params == {}
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
assert path_params == {}
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {
'items': [expected_val]
}
}
else:
assert False
operations = {
'getObjectList': {'method': HTTPMethod.GET, 'modelName': 'Object', 'url': url, 'returnMultipleItems': True},
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': url_with_id_templ,
'returnMultipleItems': False}
}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request = request_handler
result = self._resource_execute_operation(params, connection=connection_mock)
assert expected_val == result
def test_module_should_fail_when_upsert_operation_is_not_supported(self, connection_mock):
connection_mock.get_operation_specs_by_model_name.return_value = {
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': '/test'},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': '/test/{objId}'},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': '/test/{objId}',
'returnMultipleItems': False}
}
operation_name = 'upsertObject'
params = {
'operation': operation_name,
'data': {'id': '123', 'name': 'testObject', 'type': 'object'},
'path_params': {'objId': '123'},
'register_as': 'test_var'
}
result = self._resource_execute_operation_with_expected_failure(
expected_exception_class=FtdInvalidOperationNameError,
params=params, connection=connection_mock)
connection_mock.send_request.assert_not_called()
assert operation_name == result.operation_name
# when create operation raised FtdConfigurationError exception without id and version
def test_module_should_fail_when_upsert_operation_and_failed_create_without_id_and_version(self, connection_mock):
url = '/test'
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
params = {
'operation': 'upsertObject',
'data': {'name': 'testObject', 'value': '3333', 'type': 'object'},
'register_as': 'test_var'
}
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
if http_method == HTTPMethod.POST:
assert url_path == url
assert body_params == params['data']
assert query_params == {}
assert path_params == {}
return {
ResponseParams.SUCCESS: False,
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
}
elif http_method == HTTPMethod.GET:
assert url_path == url
assert body_params == {}
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
assert path_params == {}
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {
'items': []
}
}
else:
assert False
operations = {
'getObjectList': {'method': HTTPMethod.GET, 'modelName': 'Object', 'url': url, 'returnMultipleItems': True},
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': url_with_id_templ,
'returnMultipleItems': False}
}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request = request_handler
result = self._resource_execute_operation_with_expected_failure(
expected_exception_class=FtdServerError,
params=params, connection=connection_mock)
assert result.code == 422
assert result.response == 'Validation failed due to a duplicate name'
def test_module_should_fail_when_upsert_operation_and_failed_update_operation(self, connection_mock):
url = '/test'
obj_id = '456'
version = 'test_version'
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
error_code = 404
new_value = '0000'
old_value = '1111'
params = {
'operation': 'upsertObject',
'data': {'name': 'testObject', 'value': new_value, 'type': 'object'},
'register_as': 'test_var'
}
error_msg = 'test error'
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
if http_method == HTTPMethod.POST:
assert url_path == url
assert body_params == params['data']
assert query_params == {}
assert path_params == {}
return {
ResponseParams.SUCCESS: False,
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
}
elif http_method == HTTPMethod.GET:
is_get_list_req = url_path == url
is_get_req = url_path == url_with_id_templ
assert is_get_req or is_get_list_req
if is_get_list_req:
assert body_params == {}
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
elif is_get_req:
assert body_params == {}
assert query_params == {}
assert path_params == {'objId': obj_id}
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {
'items': [
{'name': 'testObject', 'value': old_value, 'type': 'object', 'id': obj_id,
'version': version}
]
}
}
elif http_method == HTTPMethod.PUT:
assert url_path == url_with_id_templ
raise FtdServerError(error_msg, error_code)
else:
assert False
operations = {
'getObjectList': {'method': HTTPMethod.GET, 'modelName': 'Object', 'url': url, 'returnMultipleItems': True},
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': url_with_id_templ,
'returnMultipleItems': False}
}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request = request_handler
result = self._resource_execute_operation_with_expected_failure(
expected_exception_class=FtdServerError,
params=params, connection=connection_mock)
assert result.code == error_code
assert result.response == error_msg
def test_module_should_fail_when_upsert_operation_and_invalid_data_for_create_operation(self, connection_mock):
new_value = '0000'
params = {
'operation': 'upsertObject',
'data': {'name': 'testObject', 'value': new_value, 'type': 'object'},
'register_as': 'test_var'
}
connection_mock.send_request.assert_not_called()
operations = {
'getObjectList': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': 'sd',
'returnMultipleItems': True},
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': 'sdf'},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': 'sadf'},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': 'sdfs',
'returnMultipleItems': False}
}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
report = {
'required': ['objects[0].type'],
'invalid_type': [
{
'path': 'objects[3].id',
'expected_type': 'string',
'actually_value': 1
}
]
}
connection_mock.validate_data.return_value = (False, json.dumps(report, sort_keys=True, indent=4))
key = 'Invalid data provided'
result = self._resource_execute_operation_with_expected_failure(
expected_exception_class=ValidationError,
params=params, connection=connection_mock)
assert len(result.args) == 1
assert key in result.args[0]
assert json.loads(result.args[0][key]) == {
'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
'required': ['objects[0].type']
}
def test_module_should_fail_when_upsert_operation_and_few_objects_found_by_filter(self, connection_mock):
url = '/test'
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
sample_obj = {'name': 'testObject', 'value': '3333', 'type': 'object'}
params = {
'operation': 'upsertObject',
'data': sample_obj,
'register_as': 'test_var'
}
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
if http_method == HTTPMethod.POST:
assert url_path == url
assert body_params == params['data']
assert query_params == {}
assert path_params == {}
return {
ResponseParams.SUCCESS: False,
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
}
elif http_method == HTTPMethod.GET:
assert url_path == url
assert body_params == {}
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
assert path_params == {}
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {
'items': [sample_obj, sample_obj]
}
}
else:
assert False
operations = {
'getObjectList': {'method': HTTPMethod.GET, 'modelName': 'Object', 'url': url, 'returnMultipleItems': True},
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': url_with_id_templ,
'returnMultipleItems': False}
}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request = request_handler
result = self._resource_execute_operation_with_expected_failure(
expected_exception_class=FtdConfigurationError,
params=params, connection=connection_mock)
assert result.msg is MULTIPLE_DUPLICATES_FOUND_ERROR
assert result.obj is None
@staticmethod
def _resource_execute_operation(params, connection):
with mock.patch.object(BaseConfigurationResource, '_fetch_system_info') as fetch_system_info_mock:
fetch_system_info_mock.return_value = {
'databaseInfo': {
'buildVersion': '6.3.0'
}
}
resource = BaseConfigurationResource(connection)
op_name = params['operation']
resp = resource.execute_operation(op_name, params)
return resp
def _resource_execute_operation_with_expected_failure(self, expected_exception_class, params, connection):
with pytest.raises(expected_exception_class) as ex:
self._resource_execute_operation(params, connection)
# 'ex' here is the instance of '_pytest._code.code.ExceptionInfo' but not <expected_exception_class>
# actual instance of <expected_exception_class> is in the value attribute of 'ex'. That's why we should return
# 'ex.value' here, so it can be checked in a test later.
return ex.value

View file

@ -0,0 +1,176 @@
# Copyright (c) 2017 Citrix Systems
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.tests.unit.compat.mock import Mock
from ansible_collections.community.general.plugins.module_utils.network.netscaler.netscaler import (ConfigProxy, get_immutables_intersection,
ensure_feature_is_enabled, log, loglines)
class TestNetscalerConfigProxy(unittest.TestCase):
def test_values_copied_to_actual(self):
actual = Mock()
client = Mock()
values = {
'some_key': 'some_value',
}
ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['some_key']
)
self.assertEqual(actual.some_key, values['some_key'], msg='Failed to pass correct value from values dict')
def test_none_values_not_copied_to_actual(self):
actual = Mock()
client = Mock()
actual.key_for_none = 'initial'
print('actual %s' % actual.key_for_none)
values = {
'key_for_none': None,
}
print('value %s' % actual.key_for_none)
ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['key_for_none']
)
self.assertEqual(actual.key_for_none, 'initial')
def test_missing_from_values_dict_not_copied_to_actual(self):
actual = Mock()
client = Mock()
values = {
'irrelevant_key': 'irrelevant_value',
}
print('value %s' % actual.key_for_none)
ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['key_for_none']
)
print('none %s' % getattr(actual, 'key_for_none'))
self.assertIsInstance(actual.key_for_none, Mock)
def test_bool_yes_no_transform(self):
actual = Mock()
client = Mock()
values = {
'yes_key': True,
'no_key': False,
}
transforms = {
'yes_key': ['bool_yes_no'],
'no_key': ['bool_yes_no']
}
ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['yes_key', 'no_key'],
transforms=transforms,
)
actual_values = [actual.yes_key, actual.no_key]
self.assertListEqual(actual_values, ['YES', 'NO'])
def test_bool_on_off_transform(self):
actual = Mock()
client = Mock()
values = {
'on_key': True,
'off_key': False,
}
transforms = {
'on_key': ['bool_on_off'],
'off_key': ['bool_on_off']
}
ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['on_key', 'off_key'],
transforms=transforms,
)
actual_values = [actual.on_key, actual.off_key]
self.assertListEqual(actual_values, ['ON', 'OFF'])
def test_callable_transform(self):
actual = Mock()
client = Mock()
values = {
'transform_key': 'hello',
'transform_chain': 'hello',
}
transforms = {
'transform_key': [lambda v: v.upper()],
'transform_chain': [lambda v: v.upper(), lambda v: v[:4]]
}
ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['transform_key', 'transform_chain'],
transforms=transforms,
)
actual_values = [actual.transform_key, actual.transform_chain]
self.assertListEqual(actual_values, ['HELLO', 'HELL'])
class TestNetscalerModuleUtils(unittest.TestCase):
def test_immutables_intersection(self):
actual = Mock()
client = Mock()
values = {
'mutable_key': 'some value',
'immutable_key': 'some other value',
}
proxy = ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['mutable_key', 'immutable_key'],
immutable_attrs=['immutable_key'],
)
keys_to_check = ['mutable_key', 'immutable_key', 'non_existant_key']
result = get_immutables_intersection(proxy, keys_to_check)
self.assertListEqual(result, ['immutable_key'])
def test_ensure_feature_is_enabled(self):
client = Mock()
attrs = {'get_enabled_features.return_value': ['GSLB']}
client.configure_mock(**attrs)
ensure_feature_is_enabled(client, 'GSLB')
ensure_feature_is_enabled(client, 'LB')
client.enable_features.assert_called_once_with('LB')
def test_log_function(self):
messages = [
'First message',
'Second message',
]
log(messages[0])
log(messages[1])
self.assertListEqual(messages, loglines, msg='Log messages not recorded correctly')

View file

@ -0,0 +1,148 @@
#
# (c) 2018 Extreme Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from mock import MagicMock, patch, call
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.plugins.module_utils.network.nos import nos
class TestPluginCLIConfNOS(unittest.TestCase):
""" Test class for NOS CLI Conf Methods
"""
def test_get_connection_established(self):
""" Test get_connection with established connection
"""
module = MagicMock()
connection = nos.get_connection(module)
self.assertEqual(connection, module.nos_connection)
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
def test_get_connection_new(self, connection):
""" Test get_connection with new connection
"""
socket_path = "little red riding hood"
module = MagicMock(spec=[
'fail_json',
])
module._socket_path = socket_path
connection().get_capabilities.return_value = '{"network_api": "cliconf"}'
returned_connection = nos.get_connection(module)
connection.assert_called_with(socket_path)
self.assertEqual(returned_connection, module.nos_connection)
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
def test_get_connection_incorrect_network_api(self, connection):
""" Test get_connection with incorrect network_api response
"""
socket_path = "little red riding hood"
module = MagicMock(spec=[
'fail_json',
])
module._socket_path = socket_path
module.fail_json.side_effect = TypeError
connection().get_capabilities.return_value = '{"network_api": "nope"}'
with self.assertRaises(TypeError):
nos.get_connection(module)
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
def test_get_capabilities(self, connection):
""" Test get_capabilities
"""
socket_path = "little red riding hood"
module = MagicMock(spec=[
'fail_json',
])
module._socket_path = socket_path
module.fail_json.side_effect = TypeError
capabilities = {'network_api': 'cliconf'}
connection().get_capabilities.return_value = json.dumps(capabilities)
capabilities_returned = nos.get_capabilities(module)
self.assertEqual(capabilities, capabilities_returned)
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
def test_run_commands(self, connection):
""" Test get_capabilities
"""
module = MagicMock()
commands = [
'hello',
'dolly',
'well hello',
'dolly',
'its so nice to have you back',
'where you belong',
]
responses = [
'Dolly, never go away again1',
'Dolly, never go away again2',
'Dolly, never go away again3',
'Dolly, never go away again4',
'Dolly, never go away again5',
'Dolly, never go away again6',
]
module.nos_connection.get.side_effect = responses
run_command_responses = nos.run_commands(module, commands)
calls = []
for command in commands:
calls.append(call(
command,
None,
None
))
module.nos_connection.get.assert_has_calls(calls)
self.assertEqual(responses, run_command_responses)
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
def test_load_config(self, connection):
""" Test load_config
"""
module = MagicMock()
commands = [
'what does it take',
'to be',
'number one?',
'two is not a winner',
'and three nobody remember',
]
nos.load_config(module, commands)
module.nos_connection.edit_config.assert_called_once_with(commands)

View file

@ -0,0 +1,659 @@
# Copyright (c) 2017 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
import json
from ansible_collections.community.general.tests.unit.compat.mock import patch
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.plugins.module_utils.network.nso import nso
MODULE_PREFIX_MAP = '''
{
"ansible-nso": "an",
"test": "test",
"tailf-ncs": "ncs"
}
'''
SCHEMA_DATA = {
'/an:id-name-leaf': '''
{
"meta": {
"prefix": "an",
"namespace": "http://github.com/ansible/nso",
"types": {
"http://github.com/ansible/nso:id-name-t": [
{
"name": "http://github.com/ansible/nso:id-name-t",
"enumeration": [
{
"label": "id-one"
},
{
"label": "id-two"
}
]
},
{
"name": "identityref"
}
]
},
"keypath": "/an:id-name-leaf"
},
"data": {
"kind": "leaf",
"type": {
"namespace": "http://github.com/ansible/nso",
"name": "id-name-t"
},
"name": "id-name-leaf",
"qname": "an:id-name-leaf"
}
}''',
'/an:id-name-values': '''
{
"meta": {
"prefix": "an",
"namespace": "http://github.com/ansible/nso",
"types": {},
"keypath": "/an:id-name-values"
},
"data": {
"kind": "container",
"name": "id-name-values",
"qname": "an:id-name-values",
"children": [
{
"kind": "list",
"name": "id-name-value",
"qname": "an:id-name-value",
"key": [
"name"
]
}
]
}
}
''',
'/an:id-name-values/id-name-value': '''
{
"meta": {
"prefix": "an",
"namespace": "http://github.com/ansible/nso",
"types": {
"http://github.com/ansible/nso:id-name-t": [
{
"name": "http://github.com/ansible/nso:id-name-t",
"enumeration": [
{
"label": "id-one"
},
{
"label": "id-two"
}
]
},
{
"name": "identityref"
}
]
},
"keypath": "/an:id-name-values/id-name-value"
},
"data": {
"kind": "list",
"name": "id-name-value",
"qname": "an:id-name-value",
"key": [
"name"
],
"children": [
{
"kind": "key",
"name": "name",
"qname": "an:name",
"type": {
"namespace": "http://github.com/ansible/nso",
"name": "id-name-t"
}
},
{
"kind": "leaf",
"type": {
"primitive": true,
"name": "string"
},
"name": "value",
"qname": "an:value"
}
]
}
}
''',
'/test:test': '''
{
"meta": {
"types": {
"http://example.com/test:t15": [
{
"leaf_type":[
{
"name":"string"
}
],
"list_type":[
{
"name":"http://example.com/test:t15",
"leaf-list":true
}
]
}
]
}
},
"data": {
"kind": "list",
"name":"test",
"qname":"test:test",
"key":["name"],
"children": [
{
"kind": "key",
"name": "name",
"qname": "test:name",
"type": {"name":"string","primitive":true}
},
{
"kind": "choice",
"name": "test-choice",
"qname": "test:test-choice",
"cases": [
{
"kind": "case",
"name": "direct-child-case",
"qname":"test:direct-child-case",
"children":[
{
"kind": "leaf",
"name": "direct-child",
"qname": "test:direct-child",
"type": {"name":"string","primitive":true}
}
]
},
{
"kind":"case","name":"nested-child-case","qname":"test:nested-child-case",
"children": [
{
"kind": "choice",
"name": "nested-choice",
"qname": "test:nested-choice",
"cases": [
{
"kind":"case","name":"nested-child","qname":"test:nested-child",
"children": [
{
"kind": "leaf",
"name":"nested-child",
"qname":"test:nested-child",
"type":{"name":"string","primitive":true}}
]
}
]
}
]
}
]
},
{
"kind":"leaf-list",
"name":"device-list",
"qname":"test:device-list",
"type": {
"namespace":"http://example.com/test",
"name":"t15"
}
}
]
}
}
''',
'/test:test/device-list': '''
{
"meta": {
"types": {
"http://example.com/test:t15": [
{
"leaf_type":[
{
"name":"string"
}
],
"list_type":[
{
"name":"http://example.com/test:t15",
"leaf-list":true
}
]
}
]
}
},
"data": {
"kind":"leaf-list",
"name":"device-list",
"qname":"test:device-list",
"type": {
"namespace":"http://example.com/test",
"name":"t15"
}
}
}
''',
'/test:deps': '''
{
"meta": {
},
"data": {
"kind":"container",
"name":"deps",
"qname":"test:deps",
"children": [
{
"kind": "leaf",
"type": {
"primitive": true,
"name": "string"
},
"name": "a",
"qname": "test:a",
"deps": ["/test:deps/c"]
},
{
"kind": "leaf",
"type": {
"primitive": true,
"name": "string"
},
"name": "b",
"qname": "test:b",
"deps": ["/test:deps/a"]
},
{
"kind": "leaf",
"type": {
"primitive": true,
"name": "string"
},
"name": "c",
"qname": "test:c"
}
]
}
}
'''
}
class MockResponse(object):
def __init__(self, method, params, code, body, headers=None):
if headers is None:
headers = {}
self.method = method
self.params = params
self.code = code
self.body = body
self.headers = dict(headers)
def read(self):
return self.body
def mock_call(calls, url, timeout, validate_certs, data=None, headers=None, method=None):
result = calls[0]
del calls[0]
request = json.loads(data)
if result.method != request['method']:
raise ValueError('expected method {0}({1}), got {2}({3})'.format(
result.method, result.params,
request['method'], request['params']))
for key, value in result.params.items():
if key not in request['params']:
raise ValueError('{0} not in parameters'.format(key))
if value != request['params'][key]:
raise ValueError('expected {0} to be {1}, got {2}'.format(
key, value, request['params'][key]))
return result
def get_schema_response(path):
return MockResponse(
'get_schema', {'path': path}, 200, '{{"result": {0}}}'.format(
SCHEMA_DATA[path]))
class TestJsonRpc(unittest.TestCase):
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_exists(self, open_url_mock):
calls = [
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
MockResponse('exists', {'path': '/exists'}, 200, '{"result": {"exists": true}}'),
MockResponse('exists', {'path': '/not-exists'}, 200, '{"result": {"exists": false}}')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
client = nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False)
self.assertEqual(True, client.exists('/exists'))
self.assertEqual(False, client.exists('/not-exists'))
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_exists_data_not_found(self, open_url_mock):
calls = [
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
MockResponse('exists', {'path': '/list{missing-parent}/list{child}'}, 200, '{"error":{"type":"data.not_found"}}')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
client = nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False)
self.assertEqual(False, client.exists('/list{missing-parent}/list{child}'))
self.assertEqual(0, len(calls))
class TestValueBuilder(unittest.TestCase):
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_identityref_leaf(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/an:id-name-leaf'),
MockResponse('get_module_prefix_map', {}, 200, '{{"result": {0}}}'.format(MODULE_PREFIX_MAP))
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/an:id-name-leaf"
schema_data = json.loads(
SCHEMA_DATA['/an:id-name-leaf'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, None, 'ansible-nso:id-two', schema)
values = list(vb.values)
self.assertEqual(1, len(values))
value = values[0]
self.assertEqual(parent, value.path)
self.assertEqual('set', value.state)
self.assertEqual('an:id-two', value.value)
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_identityref_key(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/an:id-name-values/id-name-value'),
MockResponse('get_module_prefix_map', {}, 200, '{{"result": {0}}}'.format(MODULE_PREFIX_MAP)),
MockResponse('exists', {'path': '/an:id-name-values/id-name-value{an:id-one}'}, 200, '{"result": {"exists": true}}')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/an:id-name-values"
schema_data = json.loads(
SCHEMA_DATA['/an:id-name-values/id-name-value'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, 'id-name-value', [{'name': 'ansible-nso:id-one', 'value': '1'}], schema)
values = list(vb.values)
self.assertEqual(1, len(values))
value = values[0]
self.assertEqual('{0}/id-name-value{{an:id-one}}/value'.format(parent), value.path)
self.assertEqual('set', value.state)
self.assertEqual('1', value.value)
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_nested_choice(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/test:test'),
MockResponse('exists', {'path': '/test:test{direct}'}, 200, '{"result": {"exists": true}}'),
MockResponse('exists', {'path': '/test:test{nested}'}, 200, '{"result": {"exists": true}}')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/test:test"
schema_data = json.loads(
SCHEMA_DATA['/test:test'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, None, [{'name': 'direct', 'direct-child': 'direct-value'},
{'name': 'nested', 'nested-child': 'nested-value'}], schema)
values = list(vb.values)
self.assertEqual(2, len(values))
value = values[0]
self.assertEqual('{0}{{direct}}/direct-child'.format(parent), value.path)
self.assertEqual('set', value.state)
self.assertEqual('direct-value', value.value)
value = values[1]
self.assertEqual('{0}{{nested}}/nested-child'.format(parent), value.path)
self.assertEqual('set', value.state)
self.assertEqual('nested-value', value.value)
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_leaf_list_type(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.4"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/test:test')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/test:test"
schema_data = json.loads(
SCHEMA_DATA['/test:test'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, None, {'device-list': ['one', 'two']}, schema)
values = list(vb.values)
self.assertEqual(1, len(values))
value = values[0]
self.assertEqual('{0}/device-list'.format(parent), value.path)
self.assertEqual(['one', 'two'], value.value)
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_leaf_list_type_45(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/test:test/device-list')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/test:test"
schema_data = json.loads(
SCHEMA_DATA['/test:test'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, None, {'device-list': ['one', 'two']}, schema)
values = list(vb.values)
self.assertEqual(3, len(values))
value = values[0]
self.assertEqual('{0}/device-list'.format(parent), value.path)
self.assertEqual(nso.State.ABSENT, value.state)
value = values[1]
self.assertEqual('{0}/device-list{{one}}'.format(parent), value.path)
self.assertEqual(nso.State.PRESENT, value.state)
value = values[2]
self.assertEqual('{0}/device-list{{two}}'.format(parent), value.path)
self.assertEqual(nso.State.PRESENT, value.state)
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_sort_by_deps(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/test:deps')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/test:deps"
schema_data = json.loads(
SCHEMA_DATA['/test:deps'])
schema = schema_data['data']
values = {
'a': '1',
'b': '2',
'c': '3',
}
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, None, values, schema)
values = list(vb.values)
self.assertEqual(3, len(values))
value = values[0]
self.assertEqual('{0}/c'.format(parent), value.path)
self.assertEqual('3', value.value)
value = values[1]
self.assertEqual('{0}/a'.format(parent), value.path)
self.assertEqual('1', value.value)
value = values[2]
self.assertEqual('{0}/b'.format(parent), value.path)
self.assertEqual('2', value.value)
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_sort_by_deps_not_included(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/test:deps')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/test:deps"
schema_data = json.loads(
SCHEMA_DATA['/test:deps'])
schema = schema_data['data']
values = {
'a': '1',
'b': '2'
}
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, None, values, schema)
values = list(vb.values)
self.assertEqual(2, len(values))
value = values[0]
self.assertEqual('{0}/a'.format(parent), value.path)
self.assertEqual('1', value.value)
value = values[1]
self.assertEqual('{0}/b'.format(parent), value.path)
self.assertEqual('2', value.value)
self.assertEqual(0, len(calls))
class TestVerifyVersion(unittest.TestCase):
def test_valid_versions(self):
self.assertTrue(nso.verify_version_str('5.0', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('5.1.1', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('5.1.1.2', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('4.6', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('4.6.2', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('4.6.2.1', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('4.5.1', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('4.5.2', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('4.5.1.2', [(4, 6), (4, 5, 1)]))
def test_invalid_versions(self):
self.assertFalse(nso.verify_version_str('4.4', [(4, 6), (4, 5, 1)]))
self.assertFalse(nso.verify_version_str('4.4.1', [(4, 6), (4, 5, 1)]))
self.assertFalse(nso.verify_version_str('4.4.1.2', [(4, 6), (4, 5, 1)]))
self.assertFalse(nso.verify_version_str('4.5.0', [(4, 6), (4, 5, 1)]))
class TestValueSort(unittest.TestCase):
def test_sort_parent_depend(self):
values = [
nso.ValueBuilder.Value('/test/list{entry}', '/test/list', 'CREATE', ['']),
nso.ValueBuilder.Value('/test/list{entry}/description', '/test/list/description', 'TEST', ['']),
nso.ValueBuilder.Value('/test/entry', '/test/entry', 'VALUE', ['/test/list', '/test/list/name'])
]
result = [v.path for v in nso.ValueBuilder.sort_values(values)]
self.assertEqual(['/test/list{entry}', '/test/entry', '/test/list{entry}/description'], result)
def test_sort_break_direct_cycle(self):
values = [
nso.ValueBuilder.Value('/test/a', '/test/a', 'VALUE', ['/test/c']),
nso.ValueBuilder.Value('/test/b', '/test/b', 'VALUE', ['/test/a']),
nso.ValueBuilder.Value('/test/c', '/test/c', 'VALUE', ['/test/a'])
]
result = [v.path for v in nso.ValueBuilder.sort_values(values)]
self.assertEqual(['/test/a', '/test/b', '/test/c'], result)
def test_sort_break_indirect_cycle(self):
values = [
nso.ValueBuilder.Value('/test/c', '/test/c', 'VALUE', ['/test/a']),
nso.ValueBuilder.Value('/test/a', '/test/a', 'VALUE', ['/test/b']),
nso.ValueBuilder.Value('/test/b', '/test/b', 'VALUE', ['/test/c'])
]
result = [v.path for v in nso.ValueBuilder.sort_values(values)]
self.assertEqual(['/test/a', '/test/c', '/test/b'], result)
def test_sort_depend_on_self(self):
values = [
nso.ValueBuilder.Value('/test/a', '/test/a', 'VALUE', ['/test/a']),
nso.ValueBuilder.Value('/test/b', '/test/b', 'VALUE', [])
]
result = [v.path for v in nso.ValueBuilder.sort_values(values)]
self.assertEqual(['/test/a', '/test/b'], result)

View file

@ -0,0 +1,148 @@
#
# (c) 2018 Extreme Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from mock import MagicMock, patch, call
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.plugins.module_utils.network.slxos import slxos
class TestPluginCLIConfSLXOS(unittest.TestCase):
""" Test class for SLX-OS CLI Conf Methods
"""
def test_get_connection_established(self):
""" Test get_connection with established connection
"""
module = MagicMock()
connection = slxos.get_connection(module)
self.assertEqual(connection, module.slxos_connection)
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
def test_get_connection_new(self, connection):
""" Test get_connection with new connection
"""
socket_path = "little red riding hood"
module = MagicMock(spec=[
'fail_json',
])
module._socket_path = socket_path
connection().get_capabilities.return_value = '{"network_api": "cliconf"}'
returned_connection = slxos.get_connection(module)
connection.assert_called_with(socket_path)
self.assertEqual(returned_connection, module.slxos_connection)
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
def test_get_connection_incorrect_network_api(self, connection):
""" Test get_connection with incorrect network_api response
"""
socket_path = "little red riding hood"
module = MagicMock(spec=[
'fail_json',
])
module._socket_path = socket_path
module.fail_json.side_effect = TypeError
connection().get_capabilities.return_value = '{"network_api": "nope"}'
with self.assertRaises(TypeError):
slxos.get_connection(module)
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
def test_get_capabilities(self, connection):
""" Test get_capabilities
"""
socket_path = "little red riding hood"
module = MagicMock(spec=[
'fail_json',
])
module._socket_path = socket_path
module.fail_json.side_effect = TypeError
capabilities = {'network_api': 'cliconf'}
connection().get_capabilities.return_value = json.dumps(capabilities)
capabilities_returned = slxos.get_capabilities(module)
self.assertEqual(capabilities, capabilities_returned)
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
def test_run_commands(self, connection):
""" Test get_capabilities
"""
module = MagicMock()
commands = [
'hello',
'dolly',
'well hello',
'dolly',
'its so nice to have you back',
'where you belong',
]
responses = [
'Dolly, never go away again1',
'Dolly, never go away again2',
'Dolly, never go away again3',
'Dolly, never go away again4',
'Dolly, never go away again5',
'Dolly, never go away again6',
]
module.slxos_connection.get.side_effect = responses
run_command_responses = slxos.run_commands(module, commands)
calls = []
for command in commands:
calls.append(call(
command,
None,
None
))
module.slxos_connection.get.assert_has_calls(calls)
self.assertEqual(responses, run_command_responses)
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
def test_load_config(self, connection):
""" Test load_config
"""
module = MagicMock()
commands = [
'what does it take',
'to be',
'number one?',
'two is not a winner',
'and three nobody remember',
]
slxos.load_config(module, commands)
module.slxos_connection.edit_config.assert_called_once_with(commands)

View file

@ -0,0 +1,322 @@
# Copyright: (c) 2019, Andrew Klychkov (@Andersson007) <aaklychkov@mail.ru>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
import pytest
import ansible_collections.community.general.plugins.module_utils.postgres as pg
INPUT_DICT = dict(
session_role=dict(default=''),
login_user=dict(default='postgres'),
login_password=dict(default='test', no_log=True),
login_host=dict(default='test'),
login_unix_socket=dict(default=''),
port=dict(type='int', default=5432, aliases=['login_port']),
ssl_mode=dict(
default='prefer',
choices=['allow', 'disable', 'prefer', 'require', 'verify-ca', 'verify-full']
),
ca_cert=dict(aliases=['ssl_rootcert']),
)
EXPECTED_DICT = dict(
user=dict(default='postgres'),
password=dict(default='test', no_log=True),
host=dict(default='test'),
port=dict(type='int', default=5432, aliases=['login_port']),
sslmode=dict(
default='prefer',
choices=['allow', 'disable', 'prefer', 'require', 'verify-ca', 'verify-full']
),
sslrootcert=dict(aliases=['ssl_rootcert']),
)
class TestPostgresCommonArgSpec():
"""
Namespace for testing postgresql_common_arg_spec() function.
"""
def test_postgres_common_argument_spec(self):
"""
Test for postgresql_common_arg_spec() function.
The tested function just returns a dictionary with the default
parameters and their values for PostgreSQL modules.
The return and expected dictionaries must be compared.
"""
expected_dict = dict(
login_user=dict(default='postgres'),
login_password=dict(default='', no_log=True),
login_host=dict(default=''),
login_unix_socket=dict(default=''),
port=dict(type='int', default=5432, aliases=['login_port']),
ssl_mode=dict(
default='prefer',
choices=['allow', 'disable', 'prefer', 'require', 'verify-ca', 'verify-full']
),
ca_cert=dict(aliases=['ssl_rootcert']),
)
assert pg.postgres_common_argument_spec() == expected_dict
@pytest.fixture
def m_psycopg2():
"""Return mock object for psycopg2 emulation."""
global Cursor
Cursor = None
class Cursor():
def __init__(self):
self.passed_query = None
def execute(self, query):
self.passed_query = query
def close(self):
pass
global DbConnection
DbConnection = None
class DbConnection():
def __init__(self):
pass
def cursor(self, cursor_factory=None):
return Cursor()
def set_session(self, autocommit=None):
pass
def set_isolation_level(self, isolevel):
pass
class Extras():
def __init__(self):
self.DictCursor = True
class Extensions():
def __init__(self):
self.ISOLATION_LEVEL_AUTOCOMMIT = True
class DummyPsycopg2():
def __init__(self):
self.__version__ = '2.4.3'
self.extras = Extras()
self.extensions = Extensions()
def connect(self, host=None, port=None, user=None,
password=None, sslmode=None, sslrootcert=None):
if user == 'Exception':
raise Exception()
return DbConnection()
return DummyPsycopg2()
class TestEnsureReqLibs():
"""
Namespace for testing ensure_required_libs() function.
If there is something wrong with libs, the function invokes fail_json()
method of AnsibleModule object passed as an argument called 'module'.
Therefore we must check:
1. value of err_msg attribute of m_ansible_module mock object.
"""
@pytest.fixture(scope='class')
def m_ansible_module(self):
"""Return an object of dummy AnsibleModule class."""
class Dummym_ansible_module():
def __init__(self):
self.params = {'ca_cert': False}
self.err_msg = ''
def fail_json(self, msg):
self.err_msg = msg
return Dummym_ansible_module()
def test_ensure_req_libs_has_not_psycopg2(self, m_ansible_module):
"""Test ensure_required_libs() with psycopg2 is None."""
# HAS_PSYCOPG2 is False by default
pg.ensure_required_libs(m_ansible_module)
assert 'Failed to import the required Python library (psycopg2)' in m_ansible_module.err_msg
def test_ensure_req_libs_has_psycopg2(self, m_ansible_module, monkeypatch):
"""Test ensure_required_libs() with psycopg2 is not None."""
monkeypatch.setattr(pg, 'HAS_PSYCOPG2', True)
pg.ensure_required_libs(m_ansible_module)
assert m_ansible_module.err_msg == ''
def test_ensure_req_libs_ca_cert(self, m_ansible_module, m_psycopg2, monkeypatch):
"""
Test with module.params['ca_cert'], psycopg2 version is suitable.
"""
m_ansible_module.params['ca_cert'] = True
monkeypatch.setattr(pg, 'HAS_PSYCOPG2', True)
monkeypatch.setattr(pg, 'psycopg2', m_psycopg2)
pg.ensure_required_libs(m_ansible_module)
assert m_ansible_module.err_msg == ''
def test_ensure_req_libs_ca_cert_low_psycopg2_ver(self, m_ansible_module, m_psycopg2, monkeypatch):
"""
Test with module.params['ca_cert'], psycopg2 version is wrong.
"""
m_ansible_module.params['ca_cert'] = True
monkeypatch.setattr(pg, 'HAS_PSYCOPG2', True)
# Set wrong psycopg2 version number:
psycopg2 = m_psycopg2
psycopg2.__version__ = '2.4.2'
monkeypatch.setattr(pg, 'psycopg2', psycopg2)
pg.ensure_required_libs(m_ansible_module)
assert 'psycopg2 must be at least 2.4.3' in m_ansible_module.err_msg
@pytest.fixture(scope='class')
def m_ansible_module():
"""Return an object of dummy AnsibleModule class."""
class DummyAnsibleModule():
def __init__(self):
self.params = pg.postgres_common_argument_spec()
self.err_msg = ''
self.warn_msg = ''
def fail_json(self, msg):
self.err_msg = msg
def warn(self, msg):
self.warn_msg = msg
return DummyAnsibleModule()
class TestConnectToDb():
"""
Namespace for testing connect_to_db() function.
When some connection errors occure connect_to_db() caught any of them
and invoke fail_json() or warn() methods of AnsibleModule object
depending on the passed parameters.
connect_to_db may return db_connection object or None if errors occured.
Therefore we must check:
1. Values of err_msg and warn_msg attributes of m_ansible_module mock object.
2. Types of return objects (db_connection and cursor).
"""
def test_connect_to_db(self, m_ansible_module, monkeypatch, m_psycopg2):
"""Test connect_to_db(), common test."""
monkeypatch.setattr(pg, 'HAS_PSYCOPG2', True)
monkeypatch.setattr(pg, 'psycopg2', m_psycopg2)
conn_params = pg.get_conn_params(m_ansible_module, m_ansible_module.params)
db_connection = pg.connect_to_db(m_ansible_module, conn_params)
cursor = db_connection.cursor()
# if errors, db_connection returned as None:
assert isinstance(db_connection, DbConnection)
assert isinstance(cursor, Cursor)
assert m_ansible_module.err_msg == ''
# The default behaviour, normal in this case:
assert 'Database name has not been passed' in m_ansible_module.warn_msg
def test_session_role(self, m_ansible_module, monkeypatch, m_psycopg2):
"""Test connect_to_db(), switch on session_role."""
monkeypatch.setattr(pg, 'HAS_PSYCOPG2', True)
monkeypatch.setattr(pg, 'psycopg2', m_psycopg2)
m_ansible_module.params['session_role'] = 'test_role'
conn_params = pg.get_conn_params(m_ansible_module, m_ansible_module.params)
db_connection = pg.connect_to_db(m_ansible_module, conn_params)
cursor = db_connection.cursor()
# if errors, db_connection returned as None:
assert isinstance(db_connection, DbConnection)
assert isinstance(cursor, Cursor)
assert m_ansible_module.err_msg == ''
# The default behaviour, normal in this case:
assert 'Database name has not been passed' in m_ansible_module.warn_msg
def test_fail_on_conn_true(self, m_ansible_module, monkeypatch, m_psycopg2):
"""
Test connect_to_db(), fail_on_conn arg passed as True (the default behavior).
"""
monkeypatch.setattr(pg, 'HAS_PSYCOPG2', True)
monkeypatch.setattr(pg, 'psycopg2', m_psycopg2)
m_ansible_module.params['login_user'] = 'Exception' # causes Exception
conn_params = pg.get_conn_params(m_ansible_module, m_ansible_module.params)
db_connection = pg.connect_to_db(m_ansible_module, conn_params, fail_on_conn=True)
assert 'unable to connect to database' in m_ansible_module.err_msg
assert db_connection is None
def test_fail_on_conn_false(self, m_ansible_module, monkeypatch, m_psycopg2):
"""
Test connect_to_db(), fail_on_conn arg passed as False.
"""
monkeypatch.setattr(pg, 'HAS_PSYCOPG2', True)
monkeypatch.setattr(pg, 'psycopg2', m_psycopg2)
m_ansible_module.params['login_user'] = 'Exception' # causes Exception
conn_params = pg.get_conn_params(m_ansible_module, m_ansible_module.params)
db_connection = pg.connect_to_db(m_ansible_module, conn_params, fail_on_conn=False)
assert m_ansible_module.err_msg == ''
assert 'PostgreSQL server is unavailable' in m_ansible_module.warn_msg
assert db_connection is None
def test_autocommit_true(self, m_ansible_module, monkeypatch, m_psycopg2):
"""
Test connect_to_db(), autocommit arg passed as True (the default is False).
"""
monkeypatch.setattr(pg, 'HAS_PSYCOPG2', True)
# case 1: psycopg2.__version >= 2.4.2 (the default in m_psycopg2)
monkeypatch.setattr(pg, 'psycopg2', m_psycopg2)
conn_params = pg.get_conn_params(m_ansible_module, m_ansible_module.params)
db_connection = pg.connect_to_db(m_ansible_module, conn_params, autocommit=True)
cursor = db_connection.cursor()
# if errors, db_connection returned as None:
assert isinstance(db_connection, DbConnection)
assert isinstance(cursor, Cursor)
assert m_ansible_module.err_msg == ''
# case 2: psycopg2.__version < 2.4.2
m_psycopg2.__version__ = '2.4.1'
monkeypatch.setattr(pg, 'psycopg2', m_psycopg2)
conn_params = pg.get_conn_params(m_ansible_module, m_ansible_module.params)
db_connection = pg.connect_to_db(m_ansible_module, conn_params, autocommit=True)
cursor = db_connection.cursor()
# if errors, db_connection returned as None:
assert isinstance(db_connection, DbConnection)
assert isinstance(cursor, Cursor)
assert 'psycopg2 must be at least 2.4.3' in m_ansible_module.err_msg
class TestGetConnParams():
"""Namespace for testing get_conn_params() function."""
def test_get_conn_params_def(self, m_ansible_module):
"""Test get_conn_params(), warn_db_default kwarg is default."""
assert pg.get_conn_params(m_ansible_module, INPUT_DICT) == EXPECTED_DICT
assert m_ansible_module.warn_msg == 'Database name has not been passed, used default database to connect to.'
def test_get_conn_params_warn_db_def_false(self, m_ansible_module):
"""Test get_conn_params(), warn_db_default kwarg is False."""
assert pg.get_conn_params(m_ansible_module, INPUT_DICT, warn_db_default=False) == EXPECTED_DICT
assert m_ansible_module.warn_msg == ''

View file

@ -0,0 +1,78 @@
# -*- coding: utf-8 -*-
#
# Dell EMC OpenManage Ansible Modules
# Version 2.0
# Copyright (C) 2019 Dell Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# All rights reserved. Dell, EMC, and other trademarks are trademarks of Dell Inc. or its subsidiaries.
# Other trademarks may be trademarks of their respective owners.
#
from __future__ import absolute_import
import pytest
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from ansible.module_utils.six.moves.urllib.error import URLError, HTTPError
from ansible_collections.community.general.plugins.module_utils.remote_management.dellemc.ome import RestOME
from ansible_collections.community.general.tests.unit.compat.mock import MagicMock
import json
class TestRestOME(object):
@pytest.fixture
def mock_response(self):
mock_response = MagicMock()
mock_response.getcode.return_value = 200
mock_response.headers = mock_response.getheaders.return_value = {'X-Auth-Token': 'token_id'}
mock_response.read.return_value = json.dumps({"value": "data"})
return mock_response
def test_invoke_request_with_session(self, mock_response, mocker):
mocker.patch('ansible_collections.community.general.plugins.module_utils.remote_management.dellemc.ome.open_url',
return_value=mock_response)
module_params = {'hostname': '192.168.0.1', 'username': 'username',
'password': 'password', "port": 443}
req_session = True
with RestOME(module_params, req_session) as obj:
response = obj.invoke_request("/testpath", "GET")
assert response.status_code == 200
assert response.json_data == {"value": "data"}
assert response.success is True
def test_invoke_request_without_session(self, mock_response, mocker):
mocker.patch('ansible_collections.community.general.plugins.module_utils.remote_management.dellemc.ome.open_url',
return_value=mock_response)
module_params = {'hostname': '192.168.0.1', 'username': 'username',
'password': 'password', "port": 443}
req_session = False
with RestOME(module_params, req_session) as obj:
response = obj.invoke_request("/testpath", "GET")
assert response.status_code == 200
assert response.json_data == {"value": "data"}
assert response.success is True
@pytest.mark.parametrize("exc", [URLError, SSLValidationError, ConnectionError])
def test_invoke_request_error_case_handling(self, exc, mock_response, mocker):
open_url_mock = mocker.patch('ansible_collections.community.general.plugins.module_utils.remote_management.dellemc.ome.open_url',
return_value=mock_response)
open_url_mock.side_effect = exc("test")
module_params = {'hostname': '192.168.0.1', 'username': 'username',
'password': 'password', "port": 443}
req_session = False
with pytest.raises(exc) as e:
with RestOME(module_params, req_session) as obj:
obj.invoke_request("/testpath", "GET")
def test_invoke_request_http_error_handling(self, mock_response, mocker):
open_url_mock = mocker.patch('ansible_collections.community.general.plugins.module_utils.remote_management.dellemc.ome.open_url',
return_value=mock_response)
open_url_mock.side_effect = HTTPError('http://testhost.com/', 400,
'Bad Request Error', {}, None)
module_params = {'hostname': '192.168.0.1', 'username': 'username',
'password': 'password', "port": 443}
req_session = False
with pytest.raises(HTTPError) as e:
with RestOME(module_params, req_session) as obj:
obj.invoke_request("/testpath", "GET")

View file

@ -0,0 +1,100 @@
import pytest
from ansible_collections.community.general.plugins.module_utils.database import (
pg_quote_identifier,
SQLParseError,
)
# These are all valid strings
# The results are based on interpreting the identifier as a table name
VALID = {
# User quoted
'"public.table"': '"public.table"',
'"public"."table"': '"public"."table"',
'"schema test"."table test"': '"schema test"."table test"',
# We quote part
'public.table': '"public"."table"',
'"public".table': '"public"."table"',
'public."table"': '"public"."table"',
'schema test.table test': '"schema test"."table test"',
'"schema test".table test': '"schema test"."table test"',
'schema test."table test"': '"schema test"."table test"',
# Embedded double quotes
'table "test"': '"table ""test"""',
'public."table ""test"""': '"public"."table ""test"""',
'public.table "test"': '"public"."table ""test"""',
'schema "test".table': '"schema ""test"""."table"',
'"schema ""test""".table': '"schema ""test"""."table"',
'"""wat"""."""test"""': '"""wat"""."""test"""',
# Sigh, handle these as well:
'"no end quote': '"""no end quote"',
'schema."table': '"schema"."""table"',
'"schema.table': '"""schema"."table"',
'schema."table.something': '"schema"."""table"."something"',
# Embedded dots
'"schema.test"."table.test"': '"schema.test"."table.test"',
'"schema.".table': '"schema."."table"',
'"schema."."table"': '"schema."."table"',
'schema.".table"': '"schema".".table"',
'"schema".".table"': '"schema".".table"',
'"schema.".".table"': '"schema.".".table"',
# These are valid but maybe not what the user intended
'."table"': '".""table"""',
'table.': '"table."',
}
INVALID = {
('test.too.many.dots', 'table'): 'PostgreSQL does not support table with more than 3 dots',
('"test.too".many.dots', 'database'): 'PostgreSQL does not support database with more than 1 dots',
('test.too."many.dots"', 'database'): 'PostgreSQL does not support database with more than 1 dots',
('"test"."too"."many"."dots"', 'database'): "PostgreSQL does not support database with more than 1 dots",
('"test"."too"."many"."dots"', 'schema'): "PostgreSQL does not support schema with more than 2 dots",
('"test"."too"."many"."dots"', 'table'): "PostgreSQL does not support table with more than 3 dots",
('"test"."too"."many"."dots"."for"."column"', 'column'): "PostgreSQL does not support column with more than 4 dots",
('"table "invalid" double quote"', 'table'): 'User escaped identifiers must escape extra quotes',
('"schema "invalid"""."table "invalid"', 'table'): 'User escaped identifiers must escape extra quotes',
('"schema."table"', 'table'): 'User escaped identifiers must escape extra quotes',
('"schema".', 'table'): 'Identifier name unspecified or unquoted trailing dot',
}
HOW_MANY_DOTS = (
('role', 'role', '"role"',
'PostgreSQL does not support role with more than 1 dots'),
('db', 'database', '"db"',
'PostgreSQL does not support database with more than 1 dots'),
('db.schema', 'schema', '"db"."schema"',
'PostgreSQL does not support schema with more than 2 dots'),
('db.schema.table', 'table', '"db"."schema"."table"',
'PostgreSQL does not support table with more than 3 dots'),
('db.schema.table.column', 'column', '"db"."schema"."table"."column"',
'PostgreSQL does not support column with more than 4 dots'),
)
VALID_QUOTES = ((test, VALID[test]) for test in sorted(VALID))
INVALID_QUOTES = ((test[0], test[1], INVALID[test]) for test in sorted(INVALID))
@pytest.mark.parametrize("identifier, quoted_identifier", VALID_QUOTES)
def test_valid_quotes(identifier, quoted_identifier):
assert pg_quote_identifier(identifier, 'table') == quoted_identifier
@pytest.mark.parametrize("identifier, id_type, msg", INVALID_QUOTES)
def test_invalid_quotes(identifier, id_type, msg):
with pytest.raises(SQLParseError) as ex:
pg_quote_identifier(identifier, id_type)
ex.match(msg)
@pytest.mark.parametrize("identifier, id_type, quoted_identifier, msg", HOW_MANY_DOTS)
def test_how_many_dots(identifier, id_type, quoted_identifier, msg):
assert pg_quote_identifier(identifier, id_type) == quoted_identifier
with pytest.raises(SQLParseError) as ex:
pg_quote_identifier('%s.more' % identifier, id_type)
ex.match(msg)

View file

@ -0,0 +1,265 @@
# Copyright: (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
import copy
import json
import pytest
from mock import MagicMock
from ansible_collections.community.general.plugins.module_utils import hetzner
class ModuleFailException(Exception):
def __init__(self, msg, **kwargs):
super(ModuleFailException, self).__init__(msg)
self.fail_msg = msg
self.fail_kwargs = kwargs
def get_module_mock():
def f(msg, **kwargs):
raise ModuleFailException(msg, **kwargs)
module = MagicMock()
module.fail_json = f
module.from_json = json.loads
return module
# ########################################################################################
FETCH_URL_JSON_SUCCESS = [
(
(None, dict(
body=json.dumps(dict(
a='b'
)).encode('utf-8'),
)),
None,
(dict(
a='b'
), None)
),
(
(None, dict(
body=json.dumps(dict(
error=dict(
code="foo",
status=400,
message="bar",
),
a='b'
)).encode('utf-8'),
)),
['foo'],
(dict(
error=dict(
code="foo",
status=400,
message="bar",
),
a='b'
), 'foo')
),
]
FETCH_URL_JSON_FAIL = [
(
(None, dict(
body=json.dumps(dict(
error=dict(
code="foo",
status=400,
message="bar",
),
)).encode('utf-8'),
)),
None,
'Request failed: 400 foo (bar)'
),
(
(None, dict(
body=json.dumps(dict(
error=dict(
code="foo",
status=400,
message="bar",
),
)).encode('utf-8'),
)),
['bar'],
'Request failed: 400 foo (bar)'
),
]
@pytest.mark.parametrize("return_value, accept_errors, result", FETCH_URL_JSON_SUCCESS)
def test_fetch_url_json(monkeypatch, return_value, accept_errors, result):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=return_value)
assert hetzner.fetch_url_json(module, 'https://foo/bar', accept_errors=accept_errors) == result
@pytest.mark.parametrize("return_value, accept_errors, result", FETCH_URL_JSON_FAIL)
def test_fetch_url_json_fail(monkeypatch, return_value, accept_errors, result):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=return_value)
with pytest.raises(ModuleFailException) as exc:
hetzner.fetch_url_json(module, 'https://foo/bar', accept_errors=accept_errors)
assert exc.value.fail_msg == result
assert exc.value.fail_kwargs == dict()
# ########################################################################################
GET_FAILOVER_SUCCESS = [
(
'1.2.3.4',
(None, dict(
body=json.dumps(dict(
failover=dict(
active_server_ip='1.1.1.1',
ip='1.2.3.4',
netmask='255.255.255.255',
)
)).encode('utf-8'),
)),
'1.1.1.1',
dict(
active_server_ip='1.1.1.1',
ip='1.2.3.4',
netmask='255.255.255.255',
)
),
]
GET_FAILOVER_FAIL = [
(
'1.2.3.4',
(None, dict(
body=json.dumps(dict(
error=dict(
code="foo",
status=400,
message="bar",
),
)).encode('utf-8'),
)),
'Request failed: 400 foo (bar)'
),
]
@pytest.mark.parametrize("ip, return_value, result, record", GET_FAILOVER_SUCCESS)
def test_get_failover_record(monkeypatch, ip, return_value, result, record):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=copy.deepcopy(return_value))
assert hetzner.get_failover_record(module, ip) == record
@pytest.mark.parametrize("ip, return_value, result", GET_FAILOVER_FAIL)
def test_get_failover_record_fail(monkeypatch, ip, return_value, result):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=copy.deepcopy(return_value))
with pytest.raises(ModuleFailException) as exc:
hetzner.get_failover_record(module, ip)
assert exc.value.fail_msg == result
assert exc.value.fail_kwargs == dict()
@pytest.mark.parametrize("ip, return_value, result, record", GET_FAILOVER_SUCCESS)
def test_get_failover(monkeypatch, ip, return_value, result, record):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=copy.deepcopy(return_value))
assert hetzner.get_failover(module, ip) == result
@pytest.mark.parametrize("ip, return_value, result", GET_FAILOVER_FAIL)
def test_get_failover_fail(monkeypatch, ip, return_value, result):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=copy.deepcopy(return_value))
with pytest.raises(ModuleFailException) as exc:
hetzner.get_failover(module, ip)
assert exc.value.fail_msg == result
assert exc.value.fail_kwargs == dict()
# ########################################################################################
SET_FAILOVER_SUCCESS = [
(
'1.2.3.4',
'1.1.1.1',
(None, dict(
body=json.dumps(dict(
failover=dict(
active_server_ip='1.1.1.2',
)
)).encode('utf-8'),
)),
('1.1.1.2', True)
),
(
'1.2.3.4',
'1.1.1.1',
(None, dict(
body=json.dumps(dict(
error=dict(
code="FAILOVER_ALREADY_ROUTED",
status=400,
message="Failover already routed",
),
)).encode('utf-8'),
)),
('1.1.1.1', False)
),
]
SET_FAILOVER_FAIL = [
(
'1.2.3.4',
'1.1.1.1',
(None, dict(
body=json.dumps(dict(
error=dict(
code="foo",
status=400,
message="bar",
),
)).encode('utf-8'),
)),
'Request failed: 400 foo (bar)'
),
]
@pytest.mark.parametrize("ip, value, return_value, result", SET_FAILOVER_SUCCESS)
def test_set_failover(monkeypatch, ip, value, return_value, result):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=copy.deepcopy(return_value))
assert hetzner.set_failover(module, ip, value) == result
@pytest.mark.parametrize("ip, value, return_value, result", SET_FAILOVER_FAIL)
def test_set_failover_fail(monkeypatch, ip, value, return_value, result):
module = get_module_mock()
hetzner.fetch_url = MagicMock(return_value=copy.deepcopy(return_value))
with pytest.raises(ModuleFailException) as exc:
hetzner.set_failover(module, ip, value)
assert exc.value.fail_msg == result
assert exc.value.fail_kwargs == dict()

View file

@ -0,0 +1,116 @@
# -*- coding: utf-8 -*-
# (c) 2015, Michael Scherer <mscherer@redhat.com>
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
from ansible_collections.community.general.plugins.module_utils import known_hosts
URLS = {
'ssh://one.example.org/example.git': {
'is_ssh_url': True,
'get_fqdn': 'one.example.org',
'add_host_key_cmd': " -t rsa one.example.org",
'port': None,
},
'ssh+git://two.example.org/example.git': {
'is_ssh_url': True,
'get_fqdn': 'two.example.org',
'add_host_key_cmd': " -t rsa two.example.org",
'port': None,
},
'rsync://three.example.org/user/example.git': {
'is_ssh_url': False,
'get_fqdn': 'three.example.org',
'add_host_key_cmd': None, # not called for non-ssh urls
'port': None,
},
'git@four.example.org:user/example.git': {
'is_ssh_url': True,
'get_fqdn': 'four.example.org',
'add_host_key_cmd': " -t rsa four.example.org",
'port': None,
},
'git+ssh://five.example.org/example.git': {
'is_ssh_url': True,
'get_fqdn': 'five.example.org',
'add_host_key_cmd': " -t rsa five.example.org",
'port': None,
},
'ssh://six.example.org:21/example.org': {
# ssh on FTP Port?
'is_ssh_url': True,
'get_fqdn': 'six.example.org',
'add_host_key_cmd': " -t rsa -p 21 six.example.org",
'port': '21',
},
'ssh://[2001:DB8::abcd:abcd]/example.git': {
'is_ssh_url': True,
'get_fqdn': '[2001:DB8::abcd:abcd]',
'add_host_key_cmd': " -t rsa [2001:DB8::abcd:abcd]",
'port': None,
},
'ssh://[2001:DB8::abcd:abcd]:22/example.git': {
'is_ssh_url': True,
'get_fqdn': '[2001:DB8::abcd:abcd]',
'add_host_key_cmd': " -t rsa -p 22 [2001:DB8::abcd:abcd]",
'port': '22',
},
'username@[2001:DB8::abcd:abcd]/example.git': {
'is_ssh_url': True,
'get_fqdn': '[2001:DB8::abcd:abcd]',
'add_host_key_cmd': " -t rsa [2001:DB8::abcd:abcd]",
'port': None,
},
'username@[2001:DB8::abcd:abcd]:path/example.git': {
'is_ssh_url': True,
'get_fqdn': '[2001:DB8::abcd:abcd]',
'add_host_key_cmd': " -t rsa [2001:DB8::abcd:abcd]",
'port': None,
},
'ssh://internal.git.server:7999/repos/repo.git': {
'is_ssh_url': True,
'get_fqdn': 'internal.git.server',
'add_host_key_cmd': " -t rsa -p 7999 internal.git.server",
'port': '7999',
},
}
@pytest.mark.parametrize('url, is_ssh_url', ((k, URLS[k]['is_ssh_url']) for k in sorted(URLS)))
def test_is_ssh_url(url, is_ssh_url):
assert known_hosts.is_ssh_url(url) == is_ssh_url
@pytest.mark.parametrize('url, fqdn, port', ((k, URLS[k]['get_fqdn'], URLS[k]['port']) for k in sorted(URLS)))
def test_get_fqdn_and_port(url, fqdn, port):
assert known_hosts.get_fqdn_and_port(url) == (fqdn, port)
@pytest.mark.parametrize('fqdn, port, add_host_key_cmd, stdin',
((URLS[k]['get_fqdn'], URLS[k]['port'], URLS[k]['add_host_key_cmd'], {})
for k in sorted(URLS) if URLS[k]['is_ssh_url']),
indirect=['stdin'])
def test_add_host_key(am, mocker, fqdn, port, add_host_key_cmd):
get_bin_path = mocker.MagicMock()
get_bin_path.return_value = keyscan_cmd = "/custom/path/ssh-keyscan"
am.get_bin_path = get_bin_path
run_command = mocker.MagicMock()
run_command.return_value = (0, "Needs output, otherwise thinks ssh-keyscan timed out'", "")
am.run_command = run_command
append_to_file = mocker.MagicMock()
append_to_file.return_value = (None,)
am.append_to_file = append_to_file
mocker.patch('os.path.isdir', return_value=True)
mocker.patch('os.path.exists', return_value=True)
known_hosts.add_host_key(am, fqdn, port=port)
run_command.assert_called_with(keyscan_cmd + add_host_key_cmd)

View file

@ -0,0 +1,51 @@
import json
import pytest
from ansible_collections.community.general.plugins.module_utils import kubevirt as mymodule
def test_simple_merge_dicts():
dict1 = {'labels': {'label1': 'value'}}
dict2 = {'labels': {'label2': 'value'}}
dict3 = json.dumps({'labels': {'label1': 'value', 'label2': 'value'}}, sort_keys=True)
assert dict3 == json.dumps(dict(mymodule.KubeVirtRawModule.merge_dicts(dict1, dict2)), sort_keys=True)
def test_simple_multi_merge_dicts():
dict1 = {'labels': {'label1': 'value', 'label3': 'value'}}
dict2 = {'labels': {'label2': 'value'}}
dict3 = json.dumps({'labels': {'label1': 'value', 'label2': 'value', 'label3': 'value'}}, sort_keys=True)
assert dict3 == json.dumps(dict(mymodule.KubeVirtRawModule.merge_dicts(dict1, dict2)), sort_keys=True)
def test_double_nested_merge_dicts():
dict1 = {'metadata': {'labels': {'label1': 'value', 'label3': 'value'}}}
dict2 = {'metadata': {'labels': {'label2': 'value'}}}
dict3 = json.dumps({'metadata': {'labels': {'label1': 'value', 'label2': 'value', 'label3': 'value'}}}, sort_keys=True)
assert dict3 == json.dumps(dict(mymodule.KubeVirtRawModule.merge_dicts(dict1, dict2)), sort_keys=True)
@pytest.mark.parametrize("lval, operations, rval, result", [
('v1', ['<', '<='], 'v2', True),
('v1', ['>', '>=', '=='], 'v2', False),
('v1', ['>'], 'v1alpha1', True),
('v1', ['==', '<', '<='], 'v1alpha1', False),
('v1beta5', ['==', '<=', '>='], 'v1beta5', True),
('v1beta5', ['<', '>', '!='], 'v1beta5', False),
])
def test_kubeapiversion_comparisons(lval, operations, rval, result):
KubeAPIVersion = mymodule.KubeAPIVersion
for op in operations:
test = '(KubeAPIVersion("{0}") {1} KubeAPIVersion("{2}")) == {3}'.format(lval, op, rval, result)
assert eval(test)
@pytest.mark.parametrize("ver", ('nope', 'v1delta7', '1.5', 'v1beta', 'v'))
def test_kubeapiversion_unsupported_versions(ver):
threw = False
try:
mymodule.KubeAPIVersion(ver)
except ValueError:
threw = True
assert threw

View file

@ -0,0 +1,61 @@
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright: (c) 2018, Johannes Brunswicker <johannes.brunswicker@gmail.com>
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from ansible_collections.community.general.plugins.module_utils.utm_utils import UTM
class FakeModule:
def __init__(self, params):
self.params = params
def test_combine_headers_returns_only_default():
expected = {"Accept": "application/json", "Content-type": "application/json"}
module = FakeModule(
params={'utm_protocol': 'utm_protocol', 'utm_host': 'utm_host', 'utm_port': 1234, 'utm_token': 'utm_token',
'name': 'FakeName', 'headers': {}})
result = UTM(module, "endpoint", [])._combine_headers()
assert result == expected
def test_combine_headers_returns_only_default2():
expected = {"Accept": "application/json", "Content-type": "application/json"}
module = FakeModule(
params={'utm_protocol': 'utm_protocol', 'utm_host': 'utm_host', 'utm_port': 1234, 'utm_token': 'utm_token',
'name': 'FakeName'})
result = UTM(module, "endpoint", [])._combine_headers()
assert result == expected
def test_combine_headers_returns_combined():
expected = {"Accept": "application/json", "Content-type": "application/json",
"extraHeader": "extraHeaderValue"}
module = FakeModule(params={'utm_protocol': 'utm_protocol', 'utm_host': 'utm_host', 'utm_port': 1234,
'utm_token': 'utm_token', 'name': 'FakeName',
"headers": {"extraHeader": "extraHeaderValue"}})
result = UTM(module, "endpoint", [])._combine_headers()
assert result == expected

View file

@ -0,0 +1,30 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class AnsibleModuleException(Exception):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
class ExitJsonException(AnsibleModuleException):
pass
class FailJsonException(AnsibleModuleException):
pass
class FakeAnsibleModule:
def __init__(self, params=None, check_mode=False):
self.params = params
self.check_mode = check_mode
def exit_json(self, *args, **kwargs):
raise ExitJsonException(*args, **kwargs)
def fail_json(self, *args, **kwargs):
raise FailJsonException(*args, **kwargs)

View file

@ -0,0 +1,66 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
FAKE_API_VERSION = "1.1"
class Failure(Exception):
def __init__(self, details):
self.details = details
def __str__(self):
return str(self.details)
class Session(object):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=1, ignore_ssl=False):
self.transport = transport
self._session = None
self.last_login_method = None
self.last_login_params = None
self.API_version = FAKE_API_VERSION
def _get_api_version(self):
return FAKE_API_VERSION
def _login(self, method, params):
self._session = "OpaqueRef:fake-xenapi-session-ref"
self.last_login_method = method
self.last_login_params = params
self.API_version = self._get_api_version()
def _logout(self):
self._session = None
self.last_login_method = None
self.last_login_params = None
self.API_version = FAKE_API_VERSION
def xenapi_request(self, methodname, params):
if methodname.startswith('login'):
self._login(methodname, params)
return None
elif methodname == 'logout' or methodname == 'session.logout':
self._logout()
return None
else:
# Should be patched with mocker.patch().
return None
def __getattr__(self, name):
if name == 'handle':
return self._session
elif name == 'xenapi':
# Should be patched with mocker.patch().
return None
elif name.startswith('login') or name.startswith('slave_local'):
return lambda *params: self._login(name, params)
elif name == 'logout':
return self._logout
def xapi_local():
return Session("http://_var_lib_xcp_xapi/")

View file

@ -0,0 +1,25 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
def fake_xenapi_ref(xenapi_class):
return "OpaqueRef:fake-xenapi-%s-ref" % xenapi_class
testcase_bad_xenapi_refs = {
"params": [
None,
'',
'OpaqueRef:NULL',
],
"ids": [
'none',
'empty',
'ref-null',
],
}

View file

@ -0,0 +1,118 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import sys
import importlib
import os
import json
import pytest
from .FakeAnsibleModule import FakeAnsibleModule
from ansible.module_utils import six
from mock import MagicMock
@pytest.fixture
def fake_ansible_module(request):
"""Returns fake AnsibleModule with fake module params."""
if hasattr(request, 'param'):
return FakeAnsibleModule(request.param)
else:
params = {
"hostname": "somehost",
"username": "someuser",
"password": "somepwd",
"validate_certs": True,
}
return FakeAnsibleModule(params)
@pytest.fixture(autouse=True)
def XenAPI():
"""Imports and returns fake XenAPI module."""
# Import of fake XenAPI module is wrapped by fixture so that it does not
# affect other unit tests which could potentialy also use XenAPI module.
# First we use importlib.import_module() to import the module and assign
# it to a local symbol.
fake_xenapi = importlib.import_module('ansible_collections.community.general.tests.unit.plugins.module_utils.xenserver.FakeXenAPI')
# Now we populate Python module cache with imported fake module using the
# original module name (XenAPI). That way, any 'import XenAPI' statement
# will just load already imported fake module from the cache.
sys.modules['XenAPI'] = fake_xenapi
return fake_xenapi
@pytest.fixture(autouse=True)
def xenserver(XenAPI):
"""Imports and returns xenserver module util."""
# Since we are wrapping fake XenAPI module inside a fixture, all modules
# that depend on it have to be imported inside a test function. To make
# this easier to handle and remove some code repetition, we wrap the import
# of xenserver module util with a fixture.
from ansible_collections.community.general.plugins.module_utils import xenserver
return xenserver
@pytest.fixture
def mock_xenapi_failure(XenAPI, mocker):
"""
Returns mock object that raises XenAPI.Failure on any XenAPI
method call.
"""
fake_error_msg = "Fake XAPI method call error!"
# We need to use our MagicMock based class that passes side_effect to its
# children because calls to xenapi methods can generate an arbitrary
# hierarchy of mock objects. Any such object when called should use the
# same side_effect as its parent mock object.
class MagicMockSideEffect(MagicMock):
def _get_child_mock(self, **kw):
child_mock = super(MagicMockSideEffect, self)._get_child_mock(**kw)
child_mock.side_effect = self.side_effect
return child_mock
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', new=MagicMockSideEffect(), create=True)
mocked_xenapi.side_effect = XenAPI.Failure(fake_error_msg)
return mocked_xenapi, fake_error_msg
@pytest.fixture
def fixture_data_from_file(request):
"""Loads fixture data from files."""
if not hasattr(request, 'param'):
return {}
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
if isinstance(request.param, six.string_types):
request.param = [request.param]
for fixture_name in request.param:
path = os.path.join(fixture_path, fixture_name)
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[fixture_name] = data
return fixture_data

View file

@ -0,0 +1,73 @@
{
"cdrom": {
"type": "none"
},
"customization_agent": "native",
"disks": [
{
"name": "ansible-test-vm-1-C",
"name_desc": "C:\\",
"os_device": "xvda",
"size": 42949672960,
"sr": "Ansible Test Storage 1",
"sr_uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
"vbd_userdevice": "0"
}
],
"domid": "143",
"folder": "/Ansible/Test",
"hardware": {
"memory_mb": 2048,
"num_cpu_cores_per_socket": 2,
"num_cpus": 2
},
"home_server": "",
"is_template": false,
"name": "ansible-test-vm-1",
"name_desc": "Created by Ansible",
"networks": [
{
"gateway": "10.0.0.1",
"gateway6": "",
"ip": "10.0.0.2",
"ip6": [
"fe80:0000:0000:0000:11e1:12c9:ef3b:75a0"
],
"mac": "7a:a6:48:1e:31:46",
"mtu": "1500",
"name": "Host internal management network",
"netmask": "255.255.255.0",
"prefix": "24",
"prefix6": "",
"vif_device": "0"
}
],
"other_config": {
"base_template_name": "Windows Server 2016 (64-bit)",
"folder": "/Ansible/Test",
"import_task": "OpaqueRef:e43eb71c-45d6-5351-09ff-96e4fb7d0fa5",
"install-methods": "cdrom",
"instant": "true",
"mac_seed": "366fe8e0-878b-4320-8731-90d1ed3c0b93"
},
"platform": {
"acpi": "1",
"apic": "true",
"cores-per-socket": "2",
"device_id": "0002",
"hpet": "true",
"nx": "true",
"pae": "true",
"timeoffset": "-28800",
"vga": "std",
"videoram": "8",
"viridian": "true",
"viridian_reference_tsc": "true",
"viridian_time_ref_count": "true"
},
"state": "poweredon",
"uuid": "81c373d7-a407-322f-911b-31386eb5215d",
"xenstore_data": {
"vm-data": ""
}
}

View file

@ -0,0 +1,707 @@
{
"SR": {
"OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f": {
"PBDs": [],
"VDIs": [],
"allowed_operations": [
"unplug",
"plug",
"pbd_create",
"update",
"pbd_destroy",
"vdi_resize",
"vdi_clone",
"scan",
"vdi_snapshot",
"vdi_mirror",
"vdi_create",
"vdi_destroy"
],
"blobs": {},
"clustered": false,
"content_type": "",
"current_operations": {},
"introduced_by": "OpaqueRef:NULL",
"is_tools_sr": false,
"local_cache_enabled": false,
"name_description": "",
"name_label": "Ansible Test Storage 1",
"other_config": {
"auto-scan": "false"
},
"physical_size": "2521133219840",
"physical_utilisation": "1551485632512",
"shared": true,
"sm_config": {
"allocation": "thick",
"devserial": "scsi-3600a098038302d353624495242443848",
"multipathable": "true",
"use_vhd": "true"
},
"tags": [],
"type": "lvmohba",
"uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
"virtual_allocation": "1556925644800"
}
},
"VBD": {
"OpaqueRef:1c0a7c6d-09e5-9b2c-bbe3-9a73aadcff9f": {
"VDI": "OpaqueRef:NULL",
"VM": "OpaqueRef:43a1b8d4-da96-cb08-10f5-fb368abed19c",
"allowed_operations": [
"attach",
"unpause",
"insert",
"pause"
],
"bootable": false,
"current_operations": {},
"currently_attached": true,
"device": "xvdd",
"empty": true,
"metrics": "OpaqueRef:1a36eae4-87c8-0945-cee9-c85a71fd843f",
"mode": "RO",
"other_config": {},
"qos_algorithm_params": {},
"qos_algorithm_type": "",
"qos_supported_algorithms": [],
"runtime_properties": {},
"status_code": "0",
"status_detail": "",
"storage_lock": false,
"type": "CD",
"unpluggable": true,
"userdevice": "3",
"uuid": "e6aacd53-a2c8-649f-b405-93fcb811411a"
},
"OpaqueRef:ea4a4088-19c3-6db6-ebdf-c3c0ee4405a3": {
"VDI": "OpaqueRef:fd20510d-e9ca-b966-3b98-4ae547dacf9a",
"VM": "OpaqueRef:43a1b8d4-da96-cb08-10f5-fb368abed19c",
"allowed_operations": [
"attach",
"unpause",
"unplug",
"unplug_force",
"pause"
],
"bootable": true,
"current_operations": {},
"currently_attached": true,
"device": "xvda",
"empty": false,
"metrics": "OpaqueRef:ddbd70d4-7dde-b51e-6208-eb434b300009",
"mode": "RW",
"other_config": {
"owner": "true"
},
"qos_algorithm_params": {},
"qos_algorithm_type": "",
"qos_supported_algorithms": [],
"runtime_properties": {},
"status_code": "0",
"status_detail": "",
"storage_lock": false,
"type": "Disk",
"unpluggable": true,
"userdevice": "0",
"uuid": "ffd6de9c-c416-1d52-3e9d-3bcbf567245e"
}
},
"VDI": {
"OpaqueRef:fd20510d-e9ca-b966-3b98-4ae547dacf9a": {
"SR": "OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f",
"VBDs": [
"OpaqueRef:ea4a4088-19c3-6db6-ebdf-c3c0ee4405a3"
],
"allow_caching": false,
"allowed_operations": [
"clone",
"snapshot"
],
"crash_dumps": [],
"current_operations": {},
"is_a_snapshot": false,
"is_tools_iso": false,
"location": "b807f67b-3f37-4a6e-ad6c-033f812ab093",
"managed": true,
"metadata_latest": false,
"metadata_of_pool": "",
"missing": false,
"name_description": "C:\\",
"name_label": "ansible-test-vm-1-C",
"on_boot": "persist",
"other_config": {},
"parent": "OpaqueRef:NULL",
"physical_utilisation": "43041947648",
"read_only": false,
"sharable": false,
"sm_config": {
"host_OpaqueRef:07a8da76-f1cf-f3b5-a531-6b751384f770": "RW",
"read-caching-enabled-on-92ac8132-276b-4d0f-9d3a-54db51e4a438": "false",
"read-caching-reason-92ac8132-276b-4d0f-9d3a-54db51e4a438": "LICENSE_RESTRICTION",
"vdi_type": "vhd"
},
"snapshot_of": "OpaqueRef:NULL",
"snapshot_time": "19700101T00:00:00Z",
"snapshots": [],
"storage_lock": false,
"tags": [],
"type": "system",
"uuid": "b807f67b-3f37-4a6e-ad6c-033f812ab093",
"virtual_size": "42949672960",
"xenstore_data": {}
}
},
"VIF": {
"OpaqueRef:38da2120-6086-5043-8383-ab0a53ede42a": {
"MAC": "7a:a6:48:1e:31:46",
"MAC_autogenerated": false,
"MTU": "1500",
"VM": "OpaqueRef:43a1b8d4-da96-cb08-10f5-fb368abed19c",
"allowed_operations": [
"attach",
"unplug"
],
"current_operations": {},
"currently_attached": true,
"device": "0",
"ipv4_addresses": [
"10.0.0.2/24"
],
"ipv4_allowed": [],
"ipv4_configuration_mode": "Static",
"ipv4_gateway": "10.0.0.1",
"ipv6_addresses": [
""
],
"ipv6_allowed": [],
"ipv6_configuration_mode": "None",
"ipv6_gateway": "",
"locking_mode": "network_default",
"metrics": "OpaqueRef:15502939-df0f-0095-1ce3-e51367199d27",
"network": "OpaqueRef:8a404c5e-5673-ab69-5d6f-5a35a33b8724",
"other_config": {},
"qos_algorithm_params": {},
"qos_algorithm_type": "",
"qos_supported_algorithms": [],
"runtime_properties": {},
"status_code": "0",
"status_detail": "",
"uuid": "bd108d25-488a-f9b5-4c7b-02d40f1e38a8"
}
},
"VM": {
"OpaqueRef:43a1b8d4-da96-cb08-10f5-fb368abed19c": {
"HVM_boot_params": {
"order": "dc"
},
"HVM_boot_policy": "BIOS order",
"HVM_shadow_multiplier": 1.0,
"PCI_bus": "",
"PV_args": "",
"PV_bootloader": "",
"PV_bootloader_args": "",
"PV_kernel": "",
"PV_legacy_args": "",
"PV_ramdisk": "",
"VBDs": [
"OpaqueRef:1c0a7c6d-09e5-9b2c-bbe3-9a73aadcff9f",
"OpaqueRef:ea4a4088-19c3-6db6-ebdf-c3c0ee4405a3"
],
"VCPUs_at_startup": "2",
"VCPUs_max": "2",
"VCPUs_params": {},
"VGPUs": [],
"VIFs": [
"OpaqueRef:38da2120-6086-5043-8383-ab0a53ede42a"
],
"VTPMs": [],
"actions_after_crash": "restart",
"actions_after_reboot": "restart",
"actions_after_shutdown": "destroy",
"affinity": "OpaqueRef:NULL",
"allowed_operations": [
"changing_dynamic_range",
"migrate_send",
"pool_migrate",
"changing_VCPUs_live",
"suspend",
"hard_reboot",
"hard_shutdown",
"clean_reboot",
"clean_shutdown",
"pause",
"checkpoint",
"snapshot"
],
"appliance": "OpaqueRef:NULL",
"attached_PCIs": [],
"bios_strings": {
"bios-vendor": "Xen",
"bios-version": "",
"hp-rombios": "",
"oem-1": "Xen",
"oem-2": "MS_VM_CERT/SHA1/bdbeb6e0a816d43fa6d3fe8aaef04c2bad9d3e3d",
"system-manufacturer": "Xen",
"system-product-name": "HVM domU",
"system-serial-number": "",
"system-version": ""
},
"blobs": {},
"blocked_operations": {},
"children": [],
"consoles": [
"OpaqueRef:4fa7d34e-1fb6-9e88-1b21-41a3c6550d8b"
],
"crash_dumps": [],
"current_operations": {},
"domarch": "",
"domid": "143",
"generation_id": "3274224479562869847:6952848762503845513",
"guest_metrics": "OpaqueRef:453f21be-954d-2ca8-e38e-09741e91350c",
"ha_always_run": false,
"ha_restart_priority": "",
"hardware_platform_version": "0",
"has_vendor_device": false,
"is_a_snapshot": false,
"is_a_template": false,
"is_control_domain": false,
"is_default_template": false,
"is_snapshot_from_vmpp": false,
"is_vmss_snapshot": false,
"last_boot_CPU_flags": {
"features": "17cbfbff-f7fa3223-2d93fbff-00000023-00000001-000007ab-00000000-00000000-00001000-0c000000",
"vendor": "GenuineIntel"
},
"last_booted_record": "",
"memory_dynamic_max": "2147483648",
"memory_dynamic_min": "2147483648",
"memory_overhead": "20971520",
"memory_static_max": "2147483648",
"memory_static_min": "1073741824",
"memory_target": "2147483648",
"metrics": "OpaqueRef:6eede779-4e55-7cfb-8b8a-e4b9becf770b",
"name_description": "Created by Ansible",
"name_label": "ansible-test-vm-1",
"order": "0",
"other_config": {
"base_template_name": "Windows Server 2016 (64-bit)",
"folder": "/Ansible/Test",
"import_task": "OpaqueRef:e43eb71c-45d6-5351-09ff-96e4fb7d0fa5",
"install-methods": "cdrom",
"instant": "true",
"mac_seed": "366fe8e0-878b-4320-8731-90d1ed3c0b93"
},
"parent": "OpaqueRef:NULL",
"platform": {
"acpi": "1",
"apic": "true",
"cores-per-socket": "2",
"device_id": "0002",
"hpet": "true",
"nx": "true",
"pae": "true",
"timeoffset": "-28800",
"vga": "std",
"videoram": "8",
"viridian": "true",
"viridian_reference_tsc": "true",
"viridian_time_ref_count": "true"
},
"power_state": "Running",
"protection_policy": "OpaqueRef:NULL",
"recommendations": "<restrictions><restriction field=\"memory-static-max\" max=\"1649267441664\"/><restriction field=\"vcpus-max\" max=\"32\"/><restriction field=\"has-vendor-device\" value=\"true\"/><restriction max=\"255\" property=\"number-of-vbds\"/><restriction max=\"7\" property=\"number-of-vifs\"/></restrictions>",
"reference_label": "windows-server-2016-64bit",
"requires_reboot": false,
"resident_on": "OpaqueRef:07a8da76-f1cf-f3b5-a531-6b751384f770",
"shutdown_delay": "0",
"snapshot_info": {},
"snapshot_metadata": "",
"snapshot_of": "OpaqueRef:NULL",
"snapshot_schedule": "OpaqueRef:NULL",
"snapshot_time": "19700101T00:00:00Z",
"snapshots": [],
"start_delay": "0",
"suspend_SR": "OpaqueRef:NULL",
"suspend_VDI": "OpaqueRef:NULL",
"tags": [],
"transportable_snapshot_id": "",
"user_version": "1",
"uuid": "81c373d7-a407-322f-911b-31386eb5215d",
"version": "0",
"xenstore_data": {
"vm-data": ""
}
}
},
"VM_guest_metrics": {
"OpaqueRef:453f21be-954d-2ca8-e38e-09741e91350c": {
"PV_drivers_detected": true,
"PV_drivers_up_to_date": true,
"PV_drivers_version": {
"build": "1020",
"major": "7",
"micro": "0",
"minor": "1"
},
"can_use_hotplug_vbd": "yes",
"can_use_hotplug_vif": "yes",
"disks": {},
"last_updated": "20190113T19:40:34Z",
"live": true,
"memory": {},
"networks": {
"0/ip": "10.0.0.2",
"0/ipv6/0": "fe80:0000:0000:0000:11e1:12c9:ef3b:75a0"
},
"os_version": {
"distro": "windows",
"major": "6",
"minor": "2",
"name": "Microsoft Windows Server 2016 Standard|C:\\Windows|\\Device\\Harddisk0\\Partition2",
"spmajor": "0",
"spminor": "0"
},
"other": {
"data-ts": "1",
"error": "WTSQueryUserToken : 1008 failed.",
"feature-balloon": "1",
"feature-poweroff": "1",
"feature-reboot": "1",
"feature-s3": "1",
"feature-s4": "1",
"feature-setcomputername": "1",
"feature-static-ip-setting": "1",
"feature-suspend": "1",
"feature-ts": "1",
"feature-ts2": "1",
"feature-xs-batcmd": "1",
"has-vendor-device": "0",
"platform-feature-multiprocessor-suspend": "1"
},
"other_config": {},
"uuid": "9ea6803f-12ca-3d6a-47b7-c90a33b67b98"
}
},
"VM_metrics": {
"OpaqueRef:6eede779-4e55-7cfb-8b8a-e4b9becf770b": {
"VCPUs_CPU": {},
"VCPUs_flags": {},
"VCPUs_number": "2",
"VCPUs_params": {},
"VCPUs_utilisation": {},
"hvm": true,
"install_time": "20190113T19:31:47Z",
"last_updated": "19700101T00:00:00Z",
"memory_actual": "2147475456",
"nested_virt": false,
"nomigrate": false,
"other_config": {},
"start_time": "20190113T19:38:59Z",
"state": [],
"uuid": "c67fadf7-8143-0c92-c772-cd3901c18e70"
}
},
"host": {
"OpaqueRef:07a8da76-f1cf-f3b5-a531-6b751384f770": {
"API_version_major": "2",
"API_version_minor": "7",
"API_version_vendor": "XenSource",
"API_version_vendor_implementation": {},
"PBDs": [],
"PCIs": [],
"PGPUs": [],
"PIFs": [],
"address": "10.0.0.1",
"allowed_operations": [
"vm_migrate",
"provision",
"vm_resume",
"evacuate",
"vm_start"
],
"bios_strings": {},
"blobs": {},
"capabilities": [
"xen-3.0-x86_64",
"xen-3.0-x86_32p",
"hvm-3.0-x86_32",
"hvm-3.0-x86_32p",
"hvm-3.0-x86_64",
""
],
"chipset_info": {
"iommu": "true"
},
"control_domain": "OpaqueRef:a2a31555-f232-822b-8f36-10d75d44b79c",
"cpu_configuration": {},
"cpu_info": {
"cpu_count": "40",
"family": "6",
"features": "7ffefbff-bfebfbff-00000021-2c100800",
"features_hvm": "17cbfbff-f7fa3223-2d93fbff-00000023-00000001-000007ab-00000000-00000000-00001000-0c000000",
"features_pv": "17c9cbf5-f6f83203-2191cbf5-00000023-00000001-00000329-00000000-00000000-00001000-0c000000",
"flags": "fpu de tsc msr pae mce cx8 apic sep mca cmov pat clflush acpi mmx fxsr sse sse2 ht syscall nx lm constant_tsc arch_perfmon rep_good nopl nonstop_tsc eagerfpu pni pclmulqdq monitor est ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm ida arat epb pln pts dtherm fsgsbase bmi1 avx2 bmi2 erms xsaveopt cqm_llc cqm_occup_llc",
"model": "63",
"modelname": "Intel(R) Xeon(R) CPU E5-2660 v3 @ 2.60GHz",
"socket_count": "2",
"speed": "2597.064",
"stepping": "2",
"vendor": "GenuineIntel"
},
"crash_dump_sr": "OpaqueRef:ed72d7bf-4e53-67fc-17f5-e27b203042ba",
"crashdumps": [],
"current_operations": {},
"display": "enabled",
"edition": "free",
"enabled": true,
"external_auth_configuration": {},
"external_auth_service_name": "",
"external_auth_type": "",
"features": [],
"guest_VCPUs_params": {},
"ha_network_peers": [],
"ha_statefiles": [],
"host_CPUs": [
"OpaqueRef:f7e744f6-a6f9-c460-999a-c27e1395e2e0",
"OpaqueRef:f6e5dcf0-0453-8f3f-88c1-7ad6e2ef3dd1",
"OpaqueRef:f27a52fb-5feb-173d-1a07-d1735a83c2cc",
"OpaqueRef:ed65327a-508a-ccfc-dba6-2a0175cb2432",
"OpaqueRef:e41d2f2a-fe9e-72cb-8104-b22d6d314b13",
"OpaqueRef:e1988469-b814-5d10-17a6-bfd7c62d2b5f",
"OpaqueRef:d73967dc-b8d8-b47b-39f4-d599fdcabf55",
"OpaqueRef:cba9ebd9-40dc-0611-d1bb-aa661bd0bf70",
"OpaqueRef:c53d3110-4085-60af-8300-d879818789f7",
"OpaqueRef:bee0cf87-7df6-79a6-94e8-36f98e69ad20",
"OpaqueRef:bde28e83-213f-0e65-b6ad-0ae1ecebb98d",
"OpaqueRef:bbfefe67-f65f-98cb-c3fc-cb8ea0588006",
"OpaqueRef:b38ac595-afea-0ca0-49a0-9f5ef2368e3b",
"OpaqueRef:b14ef333-78b1-193d-02da-dc9bfed36912",
"OpaqueRef:afd478bf-57b9-0c79-f257-50aeb81504f1",
"OpaqueRef:a307cd3a-2132-2e42-4ebc-cc1c7780736d",
"OpaqueRef:a1a9df7d-88ba-64fd-a55c-0f6472e1753f",
"OpaqueRef:a0e39c9c-3e0b-fa03-e5d0-93a09aa77393",
"OpaqueRef:9fd5719b-36ab-8e25-7756-20a496ccb331",
"OpaqueRef:9ac4195d-ac07-cfe2-bc19-27ee54cf91fb",
"OpaqueRef:98c5c00c-1e2d-e22b-842e-79e85ce07873",
"OpaqueRef:961129bf-e695-f206-7297-64f9007a64f3",
"OpaqueRef:64368b4c-3488-2808-f0b3-42f2a656df2b",
"OpaqueRef:620dabc0-d7c5-0dc8-52df-3be25194c2fb",
"OpaqueRef:5cee2759-dd8e-7e1a-0727-21e196584030",
"OpaqueRef:58f70163-863d-5787-ffbb-2416cb16ca1e",
"OpaqueRef:4462f848-f396-653d-67f9-2bed13be2c58",
"OpaqueRef:40e800c2-19db-7cd8-c045-5ae93f908cae",
"OpaqueRef:3f84278b-dec6-ded0-1a33-4daa0ce75a2f",
"OpaqueRef:3ef14992-62f6-e1f0-5715-0ee02a834a9c",
"OpaqueRef:3e274c24-c55b-06f5-2c8f-415421043ab2",
"OpaqueRef:35ff27da-f286-7b70-adc1-a200880bb79f",
"OpaqueRef:2511aa53-8660-e442-3cd2-305982d1f751",
"OpaqueRef:21d234e3-138c-81ca-9ed8-febc81b874e9",
"OpaqueRef:1f9b4ee3-dcc7-114e-b401-dc3e94c07efa",
"OpaqueRef:1b94a981-d340-dd07-41c2-b3ff3c545fed",
"OpaqueRef:197ad104-64a8-5af3-8c7a-95f3d301aadd",
"OpaqueRef:1672e747-dc4b-737b-ddcf-0a373f966012",
"OpaqueRef:12ced494-a225-7584-456b-739331bb5114",
"OpaqueRef:0139ff72-62ac-1a6a-8f6f-cb01d8a4ee92"
],
"hostname": "ansible-test-host-1",
"license_params": {
"address1": "",
"address2": "",
"city": "",
"company": "",
"country": "",
"enable_xha": "true",
"expiry": "20291231T23:00:00Z",
"grace": "no",
"license_type": "",
"name": "",
"platform_filter": "false",
"postalcode": "",
"productcode": "",
"regular_nag_dialog": "false",
"restrict_ad": "false",
"restrict_batch_hotfix_apply": "true",
"restrict_checkpoint": "false",
"restrict_cifs": "true",
"restrict_connection": "false",
"restrict_cpu_masking": "false",
"restrict_dmc": "false",
"restrict_dr": "false",
"restrict_email_alerting": "false",
"restrict_equalogic": "false",
"restrict_export_resource_data": "true",
"restrict_gpu": "false",
"restrict_guest_agent_auto_update": "true",
"restrict_guest_ip_setting": "false",
"restrict_health_check": "false",
"restrict_historical_performance": "false",
"restrict_hotfix_apply": "false",
"restrict_integrated_gpu_passthrough": "false",
"restrict_intellicache": "false",
"restrict_lab": "false",
"restrict_live_patching": "true",
"restrict_marathon": "false",
"restrict_nested_virt": "true",
"restrict_netapp": "false",
"restrict_pci_device_for_auto_update": "true",
"restrict_pool_attached_storage": "false",
"restrict_pooling": "false",
"restrict_pvs_proxy": "true",
"restrict_qos": "false",
"restrict_rbac": "false",
"restrict_read_caching": "true",
"restrict_set_vcpus_number_live": "true",
"restrict_ssl_legacy_switch": "false",
"restrict_stage": "false",
"restrict_storage_xen_motion": "false",
"restrict_storagelink": "false",
"restrict_storagelink_site_recovery": "false",
"restrict_vgpu": "true",
"restrict_vif_locking": "false",
"restrict_vlan": "false",
"restrict_vm_memory_introspection": "true",
"restrict_vmpr": "false",
"restrict_vmss": "false",
"restrict_vss": "false",
"restrict_vswitch_controller": "false",
"restrict_web_selfservice": "true",
"restrict_web_selfservice_manager": "true",
"restrict_wlb": "true",
"restrict_xcm": "true",
"restrict_xen_motion": "false",
"serialnumber": "",
"sku_marketing_name": "Citrix XenServer",
"sku_type": "free",
"sockets": "2",
"state": "",
"version": ""
},
"license_server": {
"address": "localhost",
"port": "27000"
},
"local_cache_sr": "OpaqueRef:ed72d7bf-4e53-67fc-17f5-e27b203042ba",
"logging": {},
"memory_overhead": "4606619648",
"metrics": "OpaqueRef:82b6937a-60c2-96d8-4e78-9f9a1143033f",
"name_description": "",
"name_label": "ansible-test-host-1",
"other_config": {
"agent_start_time": "1532019557.",
"boot_time": "1530023264.",
"iscsi_iqn": "iqn.2018-06.com.example:c8bac750",
"last_blob_sync_time": "1547394076.36",
"multipathhandle": "dmp",
"multipathing": "true"
},
"patches": [
"OpaqueRef:f74ca18d-cfb7-e4fe-e5c4-819843de11e2",
"OpaqueRef:f53ff05e-8dd8-3a15-d3b0-8dcf6004fbe2",
"OpaqueRef:ed7f38da-1a50-a48b-60bf-933cabe8d7bc",
"OpaqueRef:e7bb1462-51a5-1aaf-3b56-11b8ebd83a94",
"OpaqueRef:d87b343b-6ba3-db8b-b80e-e02319ba5924",
"OpaqueRef:ccb00450-ed04-4eaa-e6d7-130ef3722374",
"OpaqueRef:b79b8864-11d9-1d5f-09e5-a66d7b64b9e2",
"OpaqueRef:9bebcc7d-61ae-126b-3be0-9156026e586f",
"OpaqueRef:740a1156-b991-00b8-ef50-fdbb22a4d911",
"OpaqueRef:71def430-754b-2bfb-6c93-ec3b67b754e4",
"OpaqueRef:6c73b00d-df66-1740-9578-2b14e46297ba",
"OpaqueRef:6a53d2ae-3d6b-32ed-705f-fd53f1304470",
"OpaqueRef:35a67684-b094-1c77-beff-8237d87c7a27",
"OpaqueRef:33da42c2-c421-9859-79b7-ce9b6c394a1b",
"OpaqueRef:2baa6b4b-9bbe-c1b2-23ce-c8c831ac581d",
"OpaqueRef:2ac3beea-dee2-44e7-9f67-5fd216e593a0",
"OpaqueRef:1bd8f24b-3190-6e7a-b36e-e2998197d062",
"OpaqueRef:1694ea26-4930-6ca1-036e-273438375de9",
"OpaqueRef:09813f03-0c6f-a6af-768f-ef4cdde2c641"
],
"power_on_config": {},
"power_on_mode": "",
"resident_VMs": [],
"sched_policy": "credit",
"software_version": {
"build_number": "release/falcon/master/8",
"date": "2017-05-11",
"db_schema": "5.120",
"dbv": "2017.0517",
"hostname": "f7d02093adae",
"linux": "4.4.0+10",
"network_backend": "openvswitch",
"platform_name": "XCP",
"platform_version": "2.3.0",
"product_brand": "XenServer",
"product_version": "7.2.0",
"product_version_text": "7.2",
"product_version_text_short": "7.2",
"xapi": "1.9",
"xen": "4.7.5-2.12",
"xencenter_max": "2.7",
"xencenter_min": "2.7"
},
"ssl_legacy": true,
"supported_bootloaders": [
"pygrub",
"eliloader"
],
"suspend_image_sr": "OpaqueRef:ed72d7bf-4e53-67fc-17f5-e27b203042ba",
"tags": [],
"updates": [
"OpaqueRef:b71938bf-4c4f-eb17-7e78-588e71297a74",
"OpaqueRef:91cfa47b-52f9-a4e3-4e78-52e3eb3e5141",
"OpaqueRef:e2209ae9-5362-3a20-f691-9294144e49f2",
"OpaqueRef:6ac77a0f-f079-8067-85cc-c9ae2f8dcca9",
"OpaqueRef:a17e721d-faf4-6ad1-c617-dd4899279534",
"OpaqueRef:6c9b814c-e1c2-b8be-198f-de358686b10a",
"OpaqueRef:fbaabbfe-88d5-d89b-5b3f-d6374601ca71",
"OpaqueRef:9eccc765-9726-d220-96b1-2e85adf77ecc",
"OpaqueRef:204558d7-dce0-2304-bdc5-80ec5fd7e3c3",
"OpaqueRef:65b14ae7-f440-0c4d-4af9-c7946b90fd2f",
"OpaqueRef:0760c608-b02e-743a-18a1-fa8f205374d6",
"OpaqueRef:1ced32ca-fec4-8b44-0e8f-753c97f2d93f",
"OpaqueRef:3fffd7c7-f4d1-6b03-a5b8-d75211bb7b8f",
"OpaqueRef:01befb95-412e-e9dd-5b5d-edd50df61cb1",
"OpaqueRef:a3f9481e-fe3d-1f00-235f-44d404f51128",
"OpaqueRef:507ee5fc-59d3-e635-21d5-98a5cace4bf2",
"OpaqueRef:7b4b5da1-54af-d0c4-3fea-394b4257bffe",
"OpaqueRef:f61edc83-91d9-a161-113f-00c110196238",
"OpaqueRef:7efce157-9b93-d116-f3f8-7eb0c6fb1a79"
],
"updates_requiring_reboot": [],
"uuid": "92ac8132-276b-4d0f-9d3a-54db51e4a438",
"virtual_hardware_platform_versions": [
"0",
"1",
"2"
]
}
},
"network": {
"OpaqueRef:8a404c5e-5673-ab69-5d6f-5a35a33b8724": {
"MTU": "1500",
"PIFs": [],
"VIFs": [],
"allowed_operations": [],
"assigned_ips": {
"OpaqueRef:8171dad1-f902-ec00-7ba2-9f92d8aa75ab": "169.254.0.3",
"OpaqueRef:9754a0ed-e100-d224-6a70-a55a9c2cedf9": "169.254.0.2"
},
"blobs": {},
"bridge": "xenapi",
"current_operations": {},
"default_locking_mode": "unlocked",
"managed": true,
"name_description": "Network on which guests will be assigned a private link-local IP address which can be used to talk XenAPI",
"name_label": "Host internal management network",
"other_config": {
"ip_begin": "169.254.0.1",
"ip_end": "169.254.255.254",
"is_guest_installer_network": "true",
"is_host_internal_management_network": "true",
"netmask": "255.255.0.0"
},
"tags": [],
"uuid": "dbb96525-944f-0d1a-54ed-e65cb6d07450"
}
}
}

View file

@ -0,0 +1,87 @@
{
"cdrom": {
"type": "none"
},
"customization_agent": "custom",
"disks": [
{
"name": "ansible-test-vm-2-root",
"name_desc": "/",
"os_device": "xvda",
"size": 10737418240,
"sr": "Ansible Test Storage 1",
"sr_uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
"vbd_userdevice": "0"
},
{
"name": "ansible-test-vm-2-mysql",
"name_desc": "/var/lib/mysql",
"os_device": "xvdb",
"size": 1073741824,
"sr": "Ansible Test Storage 1",
"sr_uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
"vbd_userdevice": "1"
}
],
"domid": "140",
"folder": "/Ansible/Test",
"hardware": {
"memory_mb": 1024,
"num_cpu_cores_per_socket": 1,
"num_cpus": 1
},
"home_server": "ansible-test-host-2",
"is_template": false,
"name": "ansible-test-vm-2",
"name_desc": "Created by Ansible",
"networks": [
{
"gateway": "10.0.0.1",
"gateway6": "",
"ip": "169.254.0.2",
"ip6": [],
"mac": "16:87:31:70:d6:31",
"mtu": "1500",
"name": "Host internal management network",
"netmask": "255.255.255.0",
"prefix": "24",
"prefix6": "",
"vif_device": "0"
}
],
"other_config": {
"base_template_name": "CentOS 7",
"folder": "/Ansible/Test",
"import_task": "OpaqueRef:cf1402d3-b6c1-d908-fe62-06502e3b311a",
"install-methods": "cdrom,nfs,http,ftp",
"instant": "true",
"linux_template": "true",
"mac_seed": "0ab46664-f519-5383-166e-e4ea485ede7d"
},
"platform": {
"acpi": "1",
"apic": "true",
"cores-per-socket": "1",
"device_id": "0001",
"nx": "true",
"pae": "true",
"timeoffset": "0",
"vga": "std",
"videoram": "8",
"viridian": "false"
},
"state": "poweredon",
"uuid": "0a05d5ad-3e4b-f0dc-6101-8c56623958bc",
"xenstore_data": {
"vm-data": "",
"vm-data/networks": "",
"vm-data/networks/0": "",
"vm-data/networks/0/gateway": "10.0.0.1",
"vm-data/networks/0/ip": "10.0.0.3",
"vm-data/networks/0/mac": "16:87:31:70:d6:31",
"vm-data/networks/0/name": "Host internal management network",
"vm-data/networks/0/netmask": "255.255.255.0",
"vm-data/networks/0/prefix": "24",
"vm-data/networks/0/type": "static"
}
}

View file

@ -0,0 +1,771 @@
{
"SR": {
"OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f": {
"PBDs": [],
"VDIs": [],
"allowed_operations": [
"unplug",
"plug",
"pbd_create",
"update",
"pbd_destroy",
"vdi_resize",
"vdi_clone",
"scan",
"vdi_snapshot",
"vdi_mirror",
"vdi_create",
"vdi_destroy"
],
"blobs": {},
"clustered": false,
"content_type": "",
"current_operations": {},
"introduced_by": "OpaqueRef:NULL",
"is_tools_sr": false,
"local_cache_enabled": false,
"name_description": "",
"name_label": "Ansible Test Storage 1",
"other_config": {
"auto-scan": "false"
},
"physical_size": "2521133219840",
"physical_utilisation": "1551485632512",
"shared": true,
"sm_config": {
"allocation": "thick",
"devserial": "scsi-3600a098038302d353624495242443848",
"multipathable": "true",
"use_vhd": "true"
},
"tags": [],
"type": "lvmohba",
"uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
"virtual_allocation": "1556925644800"
}
},
"VBD": {
"OpaqueRef:510e214e-f0ba-3bc9-7834-a4f4d3fa33ef": {
"VDI": "OpaqueRef:NULL",
"VM": "OpaqueRef:08632af0-473e-5106-f400-7910229e49be",
"allowed_operations": [
"attach",
"unpause",
"insert",
"pause"
],
"bootable": false,
"current_operations": {},
"currently_attached": true,
"device": "xvdd",
"empty": true,
"metrics": "OpaqueRef:1075bebe-ba71-66ef-ba30-8afbc83bc6b5",
"mode": "RO",
"other_config": {},
"qos_algorithm_params": {},
"qos_algorithm_type": "",
"qos_supported_algorithms": [],
"runtime_properties": {},
"status_code": "0",
"status_detail": "",
"storage_lock": false,
"type": "CD",
"unpluggable": true,
"userdevice": "3",
"uuid": "79ee1d8e-944b-3bfd-ba4c-a0c165d84f3d"
},
"OpaqueRef:6bc2c353-f132-926d-6e9b-e4d1d55a3760": {
"VDI": "OpaqueRef:102bef39-b134-d23a-9a50-490e1dbca8f7",
"VM": "OpaqueRef:08632af0-473e-5106-f400-7910229e49be",
"allowed_operations": [
"attach",
"unpause",
"pause"
],
"bootable": true,
"current_operations": {},
"currently_attached": true,
"device": "xvda",
"empty": false,
"metrics": "OpaqueRef:1c71ccde-d7e9-10fb-569c-993b880fa790",
"mode": "RW",
"other_config": {
"owner": ""
},
"qos_algorithm_params": {},
"qos_algorithm_type": "",
"qos_supported_algorithms": [],
"runtime_properties": {},
"status_code": "0",
"status_detail": "",
"storage_lock": false,
"type": "Disk",
"unpluggable": false,
"userdevice": "0",
"uuid": "932fdf6d-7ac5-45e8-a48e-694af75726f1"
},
"OpaqueRef:9bd6decd-2e55-b55e-387d-c40aa67ff151": {
"VDI": "OpaqueRef:87b45ac6-af36-f4fd-6ebd-a08bed9001e4",
"VM": "OpaqueRef:08632af0-473e-5106-f400-7910229e49be",
"allowed_operations": [
"attach",
"unpause",
"unplug",
"unplug_force",
"pause"
],
"bootable": false,
"current_operations": {},
"currently_attached": true,
"device": "xvdb",
"empty": false,
"metrics": "OpaqueRef:b8424146-d3ea-4850-db9a-47f0059c10ac",
"mode": "RW",
"other_config": {},
"qos_algorithm_params": {},
"qos_algorithm_type": "",
"qos_supported_algorithms": [],
"runtime_properties": {},
"status_code": "0",
"status_detail": "",
"storage_lock": false,
"type": "Disk",
"unpluggable": true,
"userdevice": "1",
"uuid": "c0c1e648-3690-e1fb-9f47-24b4df0cb458"
}
},
"VDI": {
"OpaqueRef:102bef39-b134-d23a-9a50-490e1dbca8f7": {
"SR": "OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f",
"VBDs": [
"OpaqueRef:6bc2c353-f132-926d-6e9b-e4d1d55a3760"
],
"allow_caching": false,
"allowed_operations": [
"clone",
"snapshot"
],
"crash_dumps": [],
"current_operations": {},
"is_a_snapshot": false,
"is_tools_iso": false,
"location": "fa1202b8-326f-4235-802e-fafbed66b26b",
"managed": true,
"metadata_latest": false,
"metadata_of_pool": "",
"missing": false,
"name_description": "/",
"name_label": "ansible-test-vm-2-root",
"on_boot": "persist",
"other_config": {},
"parent": "OpaqueRef:NULL",
"physical_utilisation": "10766778368",
"read_only": false,
"sharable": false,
"sm_config": {
"host_OpaqueRef:e87be804-57a1-532e-56ac-6c4910957be0": "RW",
"read-caching-enabled-on-dff6702e-bcb6-4704-8dd4-952e8c883365": "false",
"read-caching-reason-dff6702e-bcb6-4704-8dd4-952e8c883365": "LICENSE_RESTRICTION",
"vdi_type": "vhd"
},
"snapshot_of": "OpaqueRef:NULL",
"snapshot_time": "19700101T00:00:00Z",
"snapshots": [],
"storage_lock": false,
"tags": [],
"type": "system",
"uuid": "fa1202b8-326f-4235-802e-fafbed66b26b",
"virtual_size": "10737418240",
"xenstore_data": {}
},
"OpaqueRef:87b45ac6-af36-f4fd-6ebd-a08bed9001e4": {
"SR": "OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f",
"VBDs": [
"OpaqueRef:9bd6decd-2e55-b55e-387d-c40aa67ff151"
],
"allow_caching": false,
"allowed_operations": [
"clone",
"snapshot"
],
"crash_dumps": [],
"current_operations": {},
"is_a_snapshot": false,
"is_tools_iso": false,
"location": "ab3a4d72-f498-4687-86ce-ca937046db76",
"managed": true,
"metadata_latest": false,
"metadata_of_pool": "",
"missing": false,
"name_description": "/var/lib/mysql",
"name_label": "ansible-test-vm-2-mysql",
"on_boot": "persist",
"other_config": {},
"parent": "OpaqueRef:NULL",
"physical_utilisation": "1082130432",
"read_only": false,
"sharable": false,
"sm_config": {
"host_OpaqueRef:e87be804-57a1-532e-56ac-6c4910957be0": "RW",
"read-caching-enabled-on-dff6702e-bcb6-4704-8dd4-952e8c883365": "false",
"read-caching-reason-dff6702e-bcb6-4704-8dd4-952e8c883365": "LICENSE_RESTRICTION",
"vdi_type": "vhd"
},
"snapshot_of": "OpaqueRef:NULL",
"snapshot_time": "19700101T00:00:00Z",
"snapshots": [],
"storage_lock": false,
"tags": [],
"type": "user",
"uuid": "ab3a4d72-f498-4687-86ce-ca937046db76",
"virtual_size": "1073741824",
"xenstore_data": {}
}
},
"VIF": {
"OpaqueRef:9754a0ed-e100-d224-6a70-a55a9c2cedf9": {
"MAC": "16:87:31:70:d6:31",
"MAC_autogenerated": false,
"MTU": "1500",
"VM": "OpaqueRef:08632af0-473e-5106-f400-7910229e49be",
"allowed_operations": [
"attach",
"unplug"
],
"current_operations": {},
"currently_attached": true,
"device": "0",
"ipv4_addresses": [],
"ipv4_allowed": [],
"ipv4_configuration_mode": "None",
"ipv4_gateway": "",
"ipv6_addresses": [],
"ipv6_allowed": [],
"ipv6_configuration_mode": "None",
"ipv6_gateway": "",
"locking_mode": "network_default",
"metrics": "OpaqueRef:d74d5f20-f0ab-ee36-9a74-496ffb994232",
"network": "OpaqueRef:8a404c5e-5673-ab69-5d6f-5a35a33b8724",
"other_config": {},
"qos_algorithm_params": {},
"qos_algorithm_type": "",
"qos_supported_algorithms": [],
"runtime_properties": {},
"status_code": "0",
"status_detail": "",
"uuid": "07b70134-9396-94fc-5105-179b430ce4f8"
}
},
"VM": {
"OpaqueRef:08632af0-473e-5106-f400-7910229e49be": {
"HVM_boot_params": {
"order": "cdn"
},
"HVM_boot_policy": "BIOS order",
"HVM_shadow_multiplier": 1.0,
"PCI_bus": "",
"PV_args": "",
"PV_bootloader": "",
"PV_bootloader_args": "",
"PV_kernel": "",
"PV_legacy_args": "",
"PV_ramdisk": "",
"VBDs": [
"OpaqueRef:510e214e-f0ba-3bc9-7834-a4f4d3fa33ef",
"OpaqueRef:9bd6decd-2e55-b55e-387d-c40aa67ff151",
"OpaqueRef:6bc2c353-f132-926d-6e9b-e4d1d55a3760"
],
"VCPUs_at_startup": "1",
"VCPUs_max": "1",
"VCPUs_params": {},
"VGPUs": [],
"VIFs": [
"OpaqueRef:9754a0ed-e100-d224-6a70-a55a9c2cedf9"
],
"VTPMs": [],
"actions_after_crash": "restart",
"actions_after_reboot": "restart",
"actions_after_shutdown": "destroy",
"affinity": "OpaqueRef:e87be804-57a1-532e-56ac-6c4910957be0",
"allowed_operations": [
"changing_dynamic_range",
"migrate_send",
"pool_migrate",
"changing_VCPUs_live",
"suspend",
"hard_reboot",
"hard_shutdown",
"clean_reboot",
"clean_shutdown",
"pause",
"checkpoint",
"snapshot"
],
"appliance": "OpaqueRef:NULL",
"attached_PCIs": [],
"bios_strings": {
"bios-vendor": "Xen",
"bios-version": "",
"hp-rombios": "",
"oem-1": "Xen",
"oem-2": "MS_VM_CERT/SHA1/bdbeb6e0a816d43fa6d3fe8aaef04c2bad9d3e3d",
"system-manufacturer": "Xen",
"system-product-name": "HVM domU",
"system-serial-number": "",
"system-version": ""
},
"blobs": {},
"blocked_operations": {},
"children": [],
"consoles": [
"OpaqueRef:2a24e023-a856-de30-aea3-2024bacdc71f"
],
"crash_dumps": [],
"current_operations": {},
"domarch": "",
"domid": "140",
"generation_id": "",
"guest_metrics": "OpaqueRef:150d2dfa-b634-7965-92ab-31fc26382683",
"ha_always_run": false,
"ha_restart_priority": "",
"hardware_platform_version": "0",
"has_vendor_device": false,
"is_a_snapshot": false,
"is_a_template": false,
"is_control_domain": false,
"is_default_template": false,
"is_snapshot_from_vmpp": false,
"is_vmss_snapshot": false,
"last_boot_CPU_flags": {
"features": "17cbfbff-f7fa3223-2d93fbff-00000023-00000001-000007ab-00000000-00000000-00001000-0c000000",
"vendor": "GenuineIntel"
},
"last_booted_record": "",
"memory_dynamic_max": "1073741824",
"memory_dynamic_min": "1073741824",
"memory_overhead": "11534336",
"memory_static_max": "1073741824",
"memory_static_min": "1073741824",
"memory_target": "1073741824",
"metrics": "OpaqueRef:b56b460b-6476-304d-b143-ce543ffab828",
"name_description": "Created by Ansible",
"name_label": "ansible-test-vm-2",
"order": "0",
"other_config": {
"base_template_name": "CentOS 7",
"folder": "/Ansible/Test",
"import_task": "OpaqueRef:cf1402d3-b6c1-d908-fe62-06502e3b311a",
"install-methods": "cdrom,nfs,http,ftp",
"instant": "true",
"linux_template": "true",
"mac_seed": "0ab46664-f519-5383-166e-e4ea485ede7d"
},
"parent": "OpaqueRef:NULL",
"platform": {
"acpi": "1",
"apic": "true",
"cores-per-socket": "1",
"device_id": "0001",
"nx": "true",
"pae": "true",
"timeoffset": "0",
"vga": "std",
"videoram": "8",
"viridian": "false"
},
"power_state": "Running",
"protection_policy": "OpaqueRef:NULL",
"recommendations": "<restrictions><restriction field=\"memory-static-max\" max=\"549755813888\" /><restriction field=\"vcpus-max\" max=\"16\" /><restriction property=\"number-of-vbds\" max=\"16\" /><restriction property=\"number-of-vifs\" max=\"7\" /><restriction field=\"allow-gpu-passthrough\" value=\"0\" /></restrictions>",
"reference_label": "",
"requires_reboot": false,
"resident_on": "OpaqueRef:e87be804-57a1-532e-56ac-6c4910957be0",
"shutdown_delay": "0",
"snapshot_info": {},
"snapshot_metadata": "",
"snapshot_of": "OpaqueRef:NULL",
"snapshot_schedule": "OpaqueRef:NULL",
"snapshot_time": "19700101T00:00:00Z",
"snapshots": [],
"start_delay": "0",
"suspend_SR": "OpaqueRef:NULL",
"suspend_VDI": "OpaqueRef:NULL",
"tags": [],
"transportable_snapshot_id": "",
"user_version": "1",
"uuid": "0a05d5ad-3e4b-f0dc-6101-8c56623958bc",
"version": "0",
"xenstore_data": {
"vm-data": "",
"vm-data/networks": "",
"vm-data/networks/0": "",
"vm-data/networks/0/gateway": "10.0.0.1",
"vm-data/networks/0/ip": "10.0.0.3",
"vm-data/networks/0/mac": "16:87:31:70:d6:31",
"vm-data/networks/0/name": "Host internal management network",
"vm-data/networks/0/netmask": "255.255.255.0",
"vm-data/networks/0/prefix": "24",
"vm-data/networks/0/type": "static"
}
}
},
"VM_guest_metrics": {
"OpaqueRef:150d2dfa-b634-7965-92ab-31fc26382683": {
"PV_drivers_detected": true,
"PV_drivers_up_to_date": true,
"PV_drivers_version": {
"build": "90977",
"major": "6",
"micro": "0",
"minor": "5"
},
"can_use_hotplug_vbd": "unspecified",
"can_use_hotplug_vif": "unspecified",
"disks": {},
"last_updated": "20190113T19:36:26Z",
"live": true,
"memory": {},
"networks": {
"0/ip": "169.254.0.2"
},
"os_version": {
"distro": "centos",
"major": "7",
"minor": "2",
"name": "CentOS Linux release 7.2.1511 (Core)",
"uname": "3.10.0-327.22.2.el7.x86_64"
},
"other": {
"feature-balloon": "1",
"feature-shutdown": "1",
"feature-suspend": "1",
"feature-vcpu-hotplug": "1",
"has-vendor-device": "0",
"platform-feature-multiprocessor-suspend": "1"
},
"other_config": {},
"uuid": "5c9d1be5-7eee-88f2-46c3-df1d44f9cdb5"
}
},
"VM_metrics": {
"OpaqueRef:b56b460b-6476-304d-b143-ce543ffab828": {
"VCPUs_CPU": {},
"VCPUs_flags": {},
"VCPUs_number": "1",
"VCPUs_params": {},
"VCPUs_utilisation": {},
"hvm": true,
"install_time": "20190113T19:32:46Z",
"last_updated": "19700101T00:00:00Z",
"memory_actual": "1073729536",
"nested_virt": false,
"nomigrate": false,
"other_config": {},
"start_time": "20190113T19:35:15Z",
"state": [],
"uuid": "876dd44c-aad1-97bf-9ee5-4cd58eac7163"
}
},
"host": {
"OpaqueRef:e87be804-57a1-532e-56ac-6c4910957be0": {
"API_version_major": "2",
"API_version_minor": "7",
"API_version_vendor": "XenSource",
"API_version_vendor_implementation": {},
"PBDs": [],
"PCIs": [],
"PGPUs": [],
"PIFs": [],
"address": "10.0.0.1",
"allowed_operations": [
"vm_migrate",
"provision",
"vm_resume",
"evacuate",
"vm_start"
],
"bios_strings": {},
"blobs": {},
"capabilities": [
"xen-3.0-x86_64",
"xen-3.0-x86_32p",
"hvm-3.0-x86_32",
"hvm-3.0-x86_32p",
"hvm-3.0-x86_64",
""
],
"chipset_info": {
"iommu": "true"
},
"control_domain": "OpaqueRef:ffcc92a1-8fde-df6f-a501-44b37811286b",
"cpu_configuration": {},
"cpu_info": {
"cpu_count": "40",
"family": "6",
"features": "7ffefbff-bfebfbff-00000021-2c100800",
"features_hvm": "17cbfbff-f7fa3223-2d93fbff-00000023-00000001-000007ab-00000000-00000000-00001000-0c000000",
"features_pv": "17c9cbf5-f6f83203-2191cbf5-00000023-00000001-00000329-00000000-00000000-00001000-0c000000",
"flags": "fpu de tsc msr pae mce cx8 apic sep mca cmov pat clflush acpi mmx fxsr sse sse2 ht syscall nx lm constant_tsc arch_perfmon rep_good nopl nonstop_tsc eagerfpu pni pclmulqdq monitor est ssse3 fma cx16 sse4_1 sse4_2 movbe popcnt aes xsave avx f16c rdrand hypervisor lahf_lm abm ida arat epb pln pts dtherm fsgsbase bmi1 avx2 bmi2 erms xsaveopt cqm_llc cqm_occup_llc",
"model": "63",
"modelname": "Intel(R) Xeon(R) CPU E5-2660 v3 @ 2.60GHz",
"socket_count": "2",
"speed": "2597.070",
"stepping": "2",
"vendor": "GenuineIntel"
},
"crash_dump_sr": "OpaqueRef:0b984cec-a36c-ce84-7b34-9f0088352d55",
"crashdumps": [],
"current_operations": {},
"display": "enabled",
"edition": "free",
"enabled": true,
"external_auth_configuration": {},
"external_auth_service_name": "",
"external_auth_type": "",
"features": [],
"guest_VCPUs_params": {},
"ha_network_peers": [],
"ha_statefiles": [],
"host_CPUs": [
"OpaqueRef:ec3ba9c4-9b57-236b-3eaa-b157affc1621",
"OpaqueRef:e6de7ab3-f4ad-f271-e51b-e3d8c041d3fb",
"OpaqueRef:e519ef88-bf41-86ac-16b3-c178cb4b78b1",
"OpaqueRef:e48f1bc1-98ba-89e5-ab69-821c625f7f82",
"OpaqueRef:e2659936-3de6-dbca-cc44-4af50960b2b7",
"OpaqueRef:d0da1e31-20ac-4aff-8897-e80df8200648",
"OpaqueRef:cec473ba-41a8-439d-b397-be0c60467b5d",
"OpaqueRef:ce88014d-b06c-c959-0624-04d79b791885",
"OpaqueRef:c656ca58-41fe-3689-d322-174aa5798beb",
"OpaqueRef:c0a21f14-8f46-19de-1cf4-530a34c4aa17",
"OpaqueRef:bf70c061-7b45-0497-7ef6-65a236e898e8",
"OpaqueRef:b7a2ba0f-f11b-3633-ad47-4f5f76a600a8",
"OpaqueRef:b4fef1fa-3aae-9790-f47e-6a17f645339c",
"OpaqueRef:b4594721-f8f4-4475-61c5-4efeec1733f1",
"OpaqueRef:9dcba36f-c29f-478f-f578-d1ea347410a6",
"OpaqueRef:987897e8-1184-917e-6a5f-e205d0c739e5",
"OpaqueRef:90f06d64-be18-7fdf-36ba-bbd696a26cf3",
"OpaqueRef:90150bc1-e604-4cd4-35ad-9cfa8e985de3",
"OpaqueRef:838f4ad4-8ad2-0d6c-a74e-26baa461de3d",
"OpaqueRef:736fb523-d347-e8c0-089b-c9811d3c1195",
"OpaqueRef:7137b479-87d4-9097-a684-e54cc4de5d09",
"OpaqueRef:6e08fa1d-7d7b-d9be-1574-ffe95bd515fd",
"OpaqueRef:6b9e6ecd-54e5-4248-5aea-ee5b99248818",
"OpaqueRef:65d56b24-3445-b444-5125-c91e6966fd29",
"OpaqueRef:60908eca-1e5c-c938-5b76-e8ff9d8899ab",
"OpaqueRef:46e96878-c076-2164-2373-6cdd108c2436",
"OpaqueRef:40ccdaf4-6008-2b83-92cb-ca197f73433f",
"OpaqueRef:3bc8133a-ccb2-6790-152f-b3f577517751",
"OpaqueRef:38c8edd8-0621-76de-53f6-86bef2a9e05c",
"OpaqueRef:342c1bab-a211-a0eb-79a5-780bd5ad1f23",
"OpaqueRef:1e20e6d0-5502-0dff-4f17-5d35eb833af1",
"OpaqueRef:176baafa-0e63-7000-f754-25e2a6b74959",
"OpaqueRef:16cab1a2-0111-b2af-6dfe-3724b79e6b6b",
"OpaqueRef:0f213647-8362-9c5e-e99b-0ebaefc609ce",
"OpaqueRef:0e019819-b41f-0bfb-d4ee-dd5484fea9b6",
"OpaqueRef:0d39212f-82ba-190c-b304-19b3fa491fff",
"OpaqueRef:087ce3ad-3b66-ae1e-3130-3ae640dcc638",
"OpaqueRef:0730f24c-87ed-8296-8f14-3036e5ad2357",
"OpaqueRef:04c27426-4895-39a7-9ade-ef33d3721c26",
"OpaqueRef:017b27bf-0270-19e7-049a-5a9b3bb54898"
],
"hostname": "ansible-test-host-2",
"license_params": {
"address1": "",
"address2": "",
"city": "",
"company": "",
"country": "",
"enable_xha": "true",
"expiry": "20291231T23:00:00Z",
"grace": "no",
"license_type": "",
"name": "",
"platform_filter": "false",
"postalcode": "",
"productcode": "",
"regular_nag_dialog": "false",
"restrict_ad": "false",
"restrict_batch_hotfix_apply": "true",
"restrict_checkpoint": "false",
"restrict_cifs": "true",
"restrict_connection": "false",
"restrict_cpu_masking": "false",
"restrict_dmc": "false",
"restrict_dr": "false",
"restrict_email_alerting": "false",
"restrict_equalogic": "false",
"restrict_export_resource_data": "true",
"restrict_gpu": "false",
"restrict_guest_agent_auto_update": "true",
"restrict_guest_ip_setting": "false",
"restrict_health_check": "false",
"restrict_historical_performance": "false",
"restrict_hotfix_apply": "false",
"restrict_integrated_gpu_passthrough": "false",
"restrict_intellicache": "false",
"restrict_lab": "false",
"restrict_live_patching": "true",
"restrict_marathon": "false",
"restrict_nested_virt": "true",
"restrict_netapp": "false",
"restrict_pci_device_for_auto_update": "true",
"restrict_pool_attached_storage": "false",
"restrict_pooling": "false",
"restrict_pvs_proxy": "true",
"restrict_qos": "false",
"restrict_rbac": "false",
"restrict_read_caching": "true",
"restrict_set_vcpus_number_live": "true",
"restrict_ssl_legacy_switch": "false",
"restrict_stage": "false",
"restrict_storage_xen_motion": "false",
"restrict_storagelink": "false",
"restrict_storagelink_site_recovery": "false",
"restrict_vgpu": "true",
"restrict_vif_locking": "false",
"restrict_vlan": "false",
"restrict_vm_memory_introspection": "true",
"restrict_vmpr": "false",
"restrict_vmss": "false",
"restrict_vss": "false",
"restrict_vswitch_controller": "false",
"restrict_web_selfservice": "true",
"restrict_web_selfservice_manager": "true",
"restrict_wlb": "true",
"restrict_xcm": "true",
"restrict_xen_motion": "false",
"serialnumber": "",
"sku_marketing_name": "Citrix XenServer",
"sku_type": "free",
"sockets": "2",
"state": "",
"version": ""
},
"license_server": {
"address": "localhost",
"port": "27000"
},
"local_cache_sr": "OpaqueRef:0b984cec-a36c-ce84-7b34-9f0088352d55",
"logging": {},
"memory_overhead": "4865126400",
"metrics": "OpaqueRef:f55653cb-92eb-8257-f2ee-7a2d1c2d6aef",
"name_description": "",
"name_label": "ansible-test-host-2",
"other_config": {
"agent_start_time": "1532019582.",
"boot_time": "1528986759.",
"iscsi_iqn": "iqn.2018-06.com.example:87b7637d",
"last_blob_sync_time": "1547394065.41",
"multipathhandle": "dmp",
"multipathing": "true"
},
"patches": [
"OpaqueRef:f5bd18b6-1423-893a-5d7f-7095338e6a2d",
"OpaqueRef:eecb0b95-87fb-a53e-651c-9741efd18bb6",
"OpaqueRef:e92c9ef3-2e51-1a36-d400-9e237982b782",
"OpaqueRef:cc98226c-2c08-799e-5f15-7761a398e4a0",
"OpaqueRef:c4f35e66-d064-55a7-6946-7f4b145275a6",
"OpaqueRef:c3794494-f894-6141-b811-f37a8fe60094",
"OpaqueRef:bcf61af7-63a9-e430-5b7c-a740ba470596",
"OpaqueRef:b58ac71e-797e-6f66-71ad-fe298c94fd10",
"OpaqueRef:a2ea18fd-5343-f8db-718d-f059c2a8cce0",
"OpaqueRef:929db459-6861-c588-158f-70f763331d6d",
"OpaqueRef:92962d94-2205-f6e1-12f9-b55a99fd824d",
"OpaqueRef:65dfb07a-f90d-dad9-9ab8-1cc2b1e79afb",
"OpaqueRef:537a87c4-3bf4-969f-f06a-2dd8d3a018a2",
"OpaqueRef:32dd1de3-c9c8-bcbb-27a0-83d4a930876d",
"OpaqueRef:30a8ccc8-74a9-b31f-0403-66b117e281b6",
"OpaqueRef:24545c44-ffd1-8a28-18c6-3d008bf4d63e",
"OpaqueRef:1fcef81b-7c44-a4db-f59a-c4a147da9c49",
"OpaqueRef:1e98a240-514b-1863-5518-c771d0ebf579",
"OpaqueRef:1632cab2-b268-6ce8-4f7b-ce7fd4bfa1eb"
],
"power_on_config": {},
"power_on_mode": "",
"resident_VMs": [],
"sched_policy": "credit",
"software_version": {
"build_number": "release/falcon/master/8",
"date": "2017-05-11",
"db_schema": "5.120",
"dbv": "2017.0517",
"hostname": "f7d02093adae",
"linux": "4.4.0+10",
"network_backend": "openvswitch",
"platform_name": "XCP",
"platform_version": "2.3.0",
"product_brand": "XenServer",
"product_version": "7.2.0",
"product_version_text": "7.2",
"product_version_text_short": "7.2",
"xapi": "1.9",
"xen": "4.7.5-2.12",
"xencenter_max": "2.7",
"xencenter_min": "2.7"
},
"ssl_legacy": true,
"supported_bootloaders": [
"pygrub",
"eliloader"
],
"suspend_image_sr": "OpaqueRef:0b984cec-a36c-ce84-7b34-9f0088352d55",
"tags": [],
"updates": [
"OpaqueRef:7b4b5da1-54af-d0c4-3fea-394b4257bffe",
"OpaqueRef:fbaabbfe-88d5-d89b-5b3f-d6374601ca71",
"OpaqueRef:507ee5fc-59d3-e635-21d5-98a5cace4bf2",
"OpaqueRef:6c9b814c-e1c2-b8be-198f-de358686b10a",
"OpaqueRef:a17e721d-faf4-6ad1-c617-dd4899279534",
"OpaqueRef:6ac77a0f-f079-8067-85cc-c9ae2f8dcca9",
"OpaqueRef:f61edc83-91d9-a161-113f-00c110196238",
"OpaqueRef:b71938bf-4c4f-eb17-7e78-588e71297a74",
"OpaqueRef:01befb95-412e-e9dd-5b5d-edd50df61cb1",
"OpaqueRef:a3f9481e-fe3d-1f00-235f-44d404f51128",
"OpaqueRef:0760c608-b02e-743a-18a1-fa8f205374d6",
"OpaqueRef:204558d7-dce0-2304-bdc5-80ec5fd7e3c3",
"OpaqueRef:9eccc765-9726-d220-96b1-2e85adf77ecc",
"OpaqueRef:91cfa47b-52f9-a4e3-4e78-52e3eb3e5141",
"OpaqueRef:3fffd7c7-f4d1-6b03-a5b8-d75211bb7b8f",
"OpaqueRef:7efce157-9b93-d116-f3f8-7eb0c6fb1a79",
"OpaqueRef:e2209ae9-5362-3a20-f691-9294144e49f2",
"OpaqueRef:1ced32ca-fec4-8b44-0e8f-753c97f2d93f",
"OpaqueRef:65b14ae7-f440-0c4d-4af9-c7946b90fd2f"
],
"updates_requiring_reboot": [],
"uuid": "dff6702e-bcb6-4704-8dd4-952e8c883365",
"virtual_hardware_platform_versions": [
"0",
"1",
"2"
]
}
},
"network": {
"OpaqueRef:8a404c5e-5673-ab69-5d6f-5a35a33b8724": {
"MTU": "1500",
"PIFs": [],
"VIFs": [],
"allowed_operations": [],
"assigned_ips": {
"OpaqueRef:8171dad1-f902-ec00-7ba2-9f92d8aa75ab": "169.254.0.3",
"OpaqueRef:9754a0ed-e100-d224-6a70-a55a9c2cedf9": "169.254.0.2"
},
"blobs": {},
"bridge": "xenapi",
"current_operations": {},
"default_locking_mode": "unlocked",
"managed": true,
"name_description": "Network on which guests will be assigned a private link-local IP address which can be used to talk XenAPI",
"name_label": "Host internal management network",
"other_config": {
"ip_begin": "169.254.0.1",
"ip_end": "169.254.255.254",
"is_guest_installer_network": "true",
"is_host_internal_management_network": "true",
"netmask": "255.255.0.0"
},
"tags": [],
"uuid": "dbb96525-944f-0d1a-54ed-e65cb6d07450"
}
}
}

View file

@ -0,0 +1,75 @@
{
"cdrom": {
"type": "none"
},
"customization_agent": "custom",
"disks": [
{
"name": "ansible-test-vm-3-root",
"name_desc": "/",
"os_device": "xvda",
"size": 8589934592,
"sr": "Ansible Test Storage 1",
"sr_uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
"vbd_userdevice": "0"
}
],
"domid": "-1",
"folder": "",
"hardware": {
"memory_mb": 1024,
"num_cpu_cores_per_socket": 1,
"num_cpus": 1
},
"home_server": "",
"is_template": false,
"name": "ansible-test-vm-3",
"name_desc": "Created by Ansible",
"networks": [
{
"gateway": "",
"gateway6": "",
"ip": "169.254.0.3",
"ip6": [],
"mac": "72:fb:c7:ac:b9:97",
"mtu": "1500",
"name": "Host internal management network",
"netmask": "",
"prefix": "",
"prefix6": "",
"vif_device": "0"
}
],
"other_config": {
"auto_poweron": "true",
"base_template_name": "zatemplate",
"import_task": "OpaqueRef:9948fd82-6d79-8882-2f01-4edc8795e361",
"install-methods": "cdrom,nfs,http,ftp",
"install-repository": "http://mirror.centos.org/centos-6/6.2/os/x86_64/",
"instant": "true",
"last_shutdown_action": "Destroy",
"last_shutdown_initiator": "external",
"last_shutdown_reason": "halted",
"last_shutdown_time": "20140314T21:16:41Z",
"linux_template": "true",
"mac_seed": "06e27068-70c2-4c69-614b-7c54b5a4a781",
"rhel6": "true"
},
"platform": {
"acpi": "true",
"apic": "true",
"cores-per-socket": "1",
"nx": "false",
"pae": "true",
"viridian": "true"
},
"state": "poweredoff",
"uuid": "8f5bc97c-42fa-d619-aba4-d25eced735e0",
"xenstore_data": {
"vm-data": "",
"vm-data/networks": "",
"vm-data/networks/0": "",
"vm-data/networks/0/mac": "72:fb:c7:ac:b9:97",
"vm-data/networks/0/name": "Host internal management network"
}
}

View file

@ -0,0 +1,420 @@
{
"SR": {
"OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f": {
"PBDs": [],
"VDIs": [],
"allowed_operations": [
"unplug",
"plug",
"pbd_create",
"update",
"pbd_destroy",
"vdi_resize",
"vdi_clone",
"scan",
"vdi_snapshot",
"vdi_mirror",
"vdi_create",
"vdi_destroy"
],
"blobs": {},
"clustered": false,
"content_type": "",
"current_operations": {},
"introduced_by": "OpaqueRef:NULL",
"is_tools_sr": false,
"local_cache_enabled": false,
"name_description": "",
"name_label": "Ansible Test Storage 1",
"other_config": {
"auto-scan": "false"
},
"physical_size": "2521133219840",
"physical_utilisation": "1551485632512",
"shared": true,
"sm_config": {
"allocation": "thick",
"devserial": "scsi-3600a098038302d353624495242443848",
"multipathable": "true",
"use_vhd": "true"
},
"tags": [],
"type": "lvmohba",
"uuid": "767b30e4-f8db-a83d-8ba7-f5e6e732e06f",
"virtual_allocation": "1556925644800"
}
},
"VBD": {
"OpaqueRef:024b722e-8d0f-65e6-359e-f301a009b683": {
"VDI": "OpaqueRef:NULL",
"VM": "OpaqueRef:957f576a-2347-1789-80db-4beb50466bc2",
"allowed_operations": [
"attach",
"insert"
],
"bootable": false,
"current_operations": {},
"currently_attached": false,
"device": "",
"empty": true,
"metrics": "OpaqueRef:81509584-b22f-bc71-3c4e-e6c3bdca71f0",
"mode": "RO",
"other_config": {},
"qos_algorithm_params": {},
"qos_algorithm_type": "",
"qos_supported_algorithms": [],
"runtime_properties": {},
"status_code": "0",
"status_detail": "",
"storage_lock": false,
"type": "CD",
"unpluggable": true,
"userdevice": "3",
"uuid": "38d850d0-c402-490e-6b97-1d23558c4e0e"
},
"OpaqueRef:235f4f04-1dc9-9fa5-c229-a1df187ba48c": {
"VDI": "OpaqueRef:4d3e9fc7-ae61-b312-e0a8-b53bee06282e",
"VM": "OpaqueRef:957f576a-2347-1789-80db-4beb50466bc2",
"allowed_operations": [
"attach"
],
"bootable": true,
"current_operations": {},
"currently_attached": false,
"device": "xvda",
"empty": false,
"metrics": "OpaqueRef:529f6071-5627-28c5-1f41-ee8c0733f1da",
"mode": "RW",
"other_config": {
"owner": ""
},
"qos_algorithm_params": {},
"qos_algorithm_type": "",
"qos_supported_algorithms": [],
"runtime_properties": {},
"status_code": "0",
"status_detail": "",
"storage_lock": false,
"type": "Disk",
"unpluggable": false,
"userdevice": "0",
"uuid": "3fd7d35c-cb9d-f0c4-726b-e188ef0dc446"
}
},
"VDI": {
"OpaqueRef:4d3e9fc7-ae61-b312-e0a8-b53bee06282e": {
"SR": "OpaqueRef:f746e964-e0fe-c36d-d60b-6897cfde583f",
"VBDs": [
"OpaqueRef:235f4f04-1dc9-9fa5-c229-a1df187ba48c"
],
"allow_caching": false,
"allowed_operations": [
"forget",
"generate_config",
"update",
"resize",
"destroy",
"clone",
"copy",
"snapshot"
],
"crash_dumps": [],
"current_operations": {},
"is_a_snapshot": false,
"is_tools_iso": false,
"location": "bdd0baeb-5447-4963-9e71-a5ff6e85fa59",
"managed": true,
"metadata_latest": false,
"metadata_of_pool": "",
"missing": false,
"name_description": "/",
"name_label": "ansible-test-vm-3-root",
"on_boot": "persist",
"other_config": {
"content_id": "cd8e8b2b-f158-c519-02f0-81d130fe83c5"
},
"parent": "OpaqueRef:NULL",
"physical_utilisation": "8615100416",
"read_only": false,
"sharable": false,
"sm_config": {
"vdi_type": "vhd"
},
"snapshot_of": "OpaqueRef:NULL",
"snapshot_time": "19700101T00:00:00Z",
"snapshots": [],
"storage_lock": false,
"tags": [],
"type": "system",
"uuid": "bdd0baeb-5447-4963-9e71-a5ff6e85fa59",
"virtual_size": "8589934592",
"xenstore_data": {}
}
},
"VIF": {
"OpaqueRef:8171dad1-f902-ec00-7ba2-9f92d8aa75ab": {
"MAC": "72:fb:c7:ac:b9:97",
"MAC_autogenerated": true,
"MTU": "1500",
"VM": "OpaqueRef:957f576a-2347-1789-80db-4beb50466bc2",
"allowed_operations": [
"attach"
],
"current_operations": {},
"currently_attached": false,
"device": "0",
"ipv4_addresses": [],
"ipv4_allowed": [],
"ipv4_configuration_mode": "None",
"ipv4_gateway": "",
"ipv6_addresses": [],
"ipv6_allowed": [],
"ipv6_configuration_mode": "None",
"ipv6_gateway": "",
"locking_mode": "network_default",
"metrics": "OpaqueRef:e5b53fb1-3e99-4bf5-6b00-95fdba1f2610",
"network": "OpaqueRef:8a404c5e-5673-ab69-5d6f-5a35a33b8724",
"other_config": {},
"qos_algorithm_params": {},
"qos_algorithm_type": "",
"qos_supported_algorithms": [],
"runtime_properties": {},
"status_code": "0",
"status_detail": "",
"uuid": "94bd4913-4940-437c-a1c3-50f7eb354c55"
}
},
"VM": {
"OpaqueRef:957f576a-2347-1789-80db-4beb50466bc2": {
"HVM_boot_params": {
"order": ""
},
"HVM_boot_policy": "",
"HVM_shadow_multiplier": 1.0,
"PCI_bus": "",
"PV_args": "graphical utf8",
"PV_bootloader": "pygrub",
"PV_bootloader_args": "",
"PV_kernel": "",
"PV_legacy_args": "",
"PV_ramdisk": "",
"VBDs": [
"OpaqueRef:235f4f04-1dc9-9fa5-c229-a1df187ba48c",
"OpaqueRef:024b722e-8d0f-65e6-359e-f301a009b683"
],
"VCPUs_at_startup": "1",
"VCPUs_max": "1",
"VCPUs_params": {},
"VGPUs": [],
"VIFs": [
"OpaqueRef:8171dad1-f902-ec00-7ba2-9f92d8aa75ab"
],
"VTPMs": [],
"actions_after_crash": "restart",
"actions_after_reboot": "restart",
"actions_after_shutdown": "destroy",
"affinity": "OpaqueRef:NULL",
"allowed_operations": [
"changing_dynamic_range",
"changing_shadow_memory",
"changing_static_range",
"make_into_template",
"migrate_send",
"destroy",
"export",
"start_on",
"start",
"clone",
"copy",
"snapshot"
],
"appliance": "OpaqueRef:NULL",
"attached_PCIs": [],
"bios_strings": {
"bios-vendor": "Xen",
"bios-version": "",
"hp-rombios": "",
"oem-1": "Xen",
"oem-2": "MS_VM_CERT/SHA1/bdbeb6e0a816d43fa6d3fe8aaef04c2bad9d3e3d",
"system-manufacturer": "Xen",
"system-product-name": "HVM domU",
"system-serial-number": "",
"system-version": ""
},
"blobs": {},
"blocked_operations": {},
"children": [],
"consoles": [],
"crash_dumps": [],
"current_operations": {},
"domarch": "",
"domid": "-1",
"generation_id": "",
"guest_metrics": "OpaqueRef:6a8acd85-4cab-4e52-27d5-5f4a51c1bf69",
"ha_always_run": false,
"ha_restart_priority": "",
"hardware_platform_version": "0",
"has_vendor_device": false,
"is_a_snapshot": false,
"is_a_template": false,
"is_control_domain": false,
"is_default_template": false,
"is_snapshot_from_vmpp": false,
"is_vmss_snapshot": false,
"last_boot_CPU_flags": {
"features": "17c9cbf5-f6f83203-2191cbf5-00000023-00000001-00000329-00000000-00000000-00001000-0c000000",
"vendor": "GenuineIntel"
},
"last_booted_record": "",
"memory_dynamic_max": "1073741824",
"memory_dynamic_min": "1073741824",
"memory_overhead": "10485760",
"memory_static_max": "1073741824",
"memory_static_min": "536870912",
"memory_target": "0",
"metrics": "OpaqueRef:87fc5829-478b-1dcd-989f-50e8ba58a87d",
"name_description": "Created by Ansible",
"name_label": "ansible-test-vm-3",
"order": "0",
"other_config": {
"auto_poweron": "true",
"base_template_name": "zatemplate",
"import_task": "OpaqueRef:9948fd82-6d79-8882-2f01-4edc8795e361",
"install-methods": "cdrom,nfs,http,ftp",
"install-repository": "http://mirror.centos.org/centos-6/6.2/os/x86_64/",
"instant": "true",
"last_shutdown_action": "Destroy",
"last_shutdown_initiator": "external",
"last_shutdown_reason": "halted",
"last_shutdown_time": "20140314T21:16:41Z",
"linux_template": "true",
"mac_seed": "06e27068-70c2-4c69-614b-7c54b5a4a781",
"rhel6": "true"
},
"parent": "OpaqueRef:NULL",
"platform": {
"acpi": "true",
"apic": "true",
"cores-per-socket": "1",
"nx": "false",
"pae": "true",
"viridian": "true"
},
"power_state": "Halted",
"protection_policy": "OpaqueRef:NULL",
"recommendations": "<restrictions><restriction field=\"memory-static-max\" max=\"17179869184\" /><restriction field=\"vcpus-max\" max=\"8\" /><restriction property=\"number-of-vbds\" max=\"7\" /><restriction property=\"number-of-vifs\" max=\"7\" /></restrictions>",
"reference_label": "",
"requires_reboot": false,
"resident_on": "OpaqueRef:NULL",
"shutdown_delay": "0",
"snapshot_info": {},
"snapshot_metadata": "",
"snapshot_of": "OpaqueRef:NULL",
"snapshot_schedule": "OpaqueRef:NULL",
"snapshot_time": "19700101T00:00:00Z",
"snapshots": [],
"start_delay": "0",
"suspend_SR": "OpaqueRef:NULL",
"suspend_VDI": "OpaqueRef:NULL",
"tags": [
"web-frontend"
],
"transportable_snapshot_id": "",
"user_version": "1",
"uuid": "8f5bc97c-42fa-d619-aba4-d25eced735e0",
"version": "0",
"xenstore_data": {
"vm-data": "",
"vm-data/networks": "",
"vm-data/networks/0": "",
"vm-data/networks/0/mac": "72:fb:c7:ac:b9:97",
"vm-data/networks/0/name": "Host internal management network"
}
}
},
"VM_guest_metrics": {
"OpaqueRef:6a8acd85-4cab-4e52-27d5-5f4a51c1bf69": {
"PV_drivers_detected": true,
"PV_drivers_up_to_date": true,
"PV_drivers_version": {
"build": "46676",
"major": "5",
"micro": "100",
"minor": "6"
},
"can_use_hotplug_vbd": "unspecified",
"can_use_hotplug_vif": "unspecified",
"disks": {},
"last_updated": "20190113T19:36:07Z",
"live": true,
"memory": {},
"networks": {
"0/ip": "169.254.0.3"
},
"os_version": {
"distro": "centos",
"major": "6",
"minor": "10",
"name": "CentOS release 6.10 (Final)",
"uname": "2.6.32-754.6.3.el6.x86_64"
},
"other": {
"feature-balloon": "1",
"has-vendor-device": "0",
"platform-feature-multiprocessor-suspend": "1"
},
"other_config": {},
"uuid": "3928a6a4-1acd-c134-ed35-eb0ccfaed65c"
}
},
"VM_metrics": {
"OpaqueRef:87fc5829-478b-1dcd-989f-50e8ba58a87d": {
"VCPUs_CPU": {},
"VCPUs_flags": {},
"VCPUs_number": "0",
"VCPUs_params": {},
"VCPUs_utilisation": {
"0": 0.0
},
"hvm": false,
"install_time": "20190113T19:35:05Z",
"last_updated": "19700101T00:00:00Z",
"memory_actual": "1073741824",
"nested_virt": false,
"nomigrate": false,
"other_config": {},
"start_time": "19700101T00:00:00Z",
"state": [],
"uuid": "6cb05fe9-b83e-34c8-29e0-3b793e1da661"
}
},
"host": {},
"network": {
"OpaqueRef:8a404c5e-5673-ab69-5d6f-5a35a33b8724": {
"MTU": "1500",
"PIFs": [],
"VIFs": [],
"allowed_operations": [],
"assigned_ips": {
"OpaqueRef:8171dad1-f902-ec00-7ba2-9f92d8aa75ab": "169.254.0.3",
"OpaqueRef:9754a0ed-e100-d224-6a70-a55a9c2cedf9": "169.254.0.2"
},
"blobs": {},
"bridge": "xenapi",
"current_operations": {},
"default_locking_mode": "unlocked",
"managed": true,
"name_description": "Network on which guests will be assigned a private link-local IP address which can be used to talk XenAPI",
"name_label": "Host internal management network",
"other_config": {
"ip_begin": "169.254.0.1",
"ip_end": "169.254.255.254",
"is_guest_installer_network": "true",
"is_host_internal_management_network": "true",
"netmask": "255.255.0.0"
},
"tags": [],
"uuid": "dbb96525-944f-0d1a-54ed-e65cb6d07450"
}
}
}

View file

@ -0,0 +1,74 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from .common import testcase_bad_xenapi_refs
testcase_gather_vm_params_and_facts = {
"params": [
["ansible-test-vm-1-params.json", "ansible-test-vm-1-facts.json"],
["ansible-test-vm-2-params.json", "ansible-test-vm-2-facts.json"],
["ansible-test-vm-3-params.json", "ansible-test-vm-3-facts.json"],
],
"ids": [
"ansible-test-vm-1",
"ansible-test-vm-2",
"ansible-test-vm-3",
],
}
@pytest.mark.parametrize('vm_ref', testcase_bad_xenapi_refs['params'], ids=testcase_bad_xenapi_refs['ids'])
def test_gather_vm_params_bad_vm_ref(fake_ansible_module, xenserver, vm_ref):
"""Tests return of empty dict on bad vm_ref."""
assert xenserver.gather_vm_params(fake_ansible_module, vm_ref) == {}
def test_gather_vm_facts_no_vm_params(fake_ansible_module, xenserver):
"""Tests return of empty facts dict when vm_params is not available"""
assert xenserver.gather_vm_facts(fake_ansible_module, None) == {}
assert xenserver.gather_vm_facts(fake_ansible_module, {}) == {}
@pytest.mark.parametrize('fixture_data_from_file',
testcase_gather_vm_params_and_facts['params'],
ids=testcase_gather_vm_params_and_facts['ids'],
indirect=True)
def test_gather_vm_params_and_facts(mocker, fake_ansible_module, XenAPI, xenserver, fixture_data_from_file):
"""Tests proper parsing of VM parameters and facts."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
if "params" in list(fixture_data_from_file.keys())[0]:
params_file = list(fixture_data_from_file.keys())[0]
facts_file = list(fixture_data_from_file.keys())[1]
else:
params_file = list(fixture_data_from_file.keys())[1]
facts_file = list(fixture_data_from_file.keys())[0]
mocked_returns = {
"VM.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['VM'][obj_ref],
"VM_metrics.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['VM_metrics'][obj_ref],
"VM_guest_metrics.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['VM_guest_metrics'][obj_ref],
"VBD.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['VBD'][obj_ref],
"VDI.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['VDI'][obj_ref],
"SR.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['SR'][obj_ref],
"VIF.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['VIF'][obj_ref],
"network.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['network'][obj_ref],
"host.get_record.side_effect": lambda obj_ref: fixture_data_from_file[params_file]['host'][obj_ref],
}
mocked_xenapi.configure_mock(**mocked_returns)
mocker.patch('ansible_collections.community.general.plugins.module_utils.xenserver.get_xenserver_version', return_value=[7, 2, 0])
vm_ref = list(fixture_data_from_file[params_file]['VM'].keys())[0]
assert xenserver.gather_vm_facts(fake_ansible_module, xenserver.gather_vm_params(fake_ansible_module, vm_ref)) == fixture_data_from_file[facts_file]

View file

@ -0,0 +1,73 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from .FakeAnsibleModule import FailJsonException
from .common import fake_xenapi_ref
def test_get_object_ref_xenapi_failure(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests catching of XenAPI failures."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request', side_effect=XenAPI.Failure('Fake XAPI method call error!'))
with pytest.raises(FailJsonException) as exc_info:
xenserver.get_object_ref(fake_ansible_module, "name")
assert exc_info.value.kwargs['msg'] == "XAPI ERROR: Fake XAPI method call error!"
def test_get_object_ref_bad_uuid_and_name(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests failure on bad object uuid and/or name."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request')
with pytest.raises(FailJsonException) as exc_info:
xenserver.get_object_ref(fake_ansible_module, None, msg_prefix="Test: ")
mocked_xenapi.xenapi_request.assert_not_called()
assert exc_info.value.kwargs['msg'] == "Test: no valid name or UUID supplied for VM!"
def test_get_object_ref_uuid_not_found(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests when object is not found by uuid."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request', side_effect=XenAPI.Failure('Fake XAPI not found error!'))
with pytest.raises(FailJsonException) as exc_info:
xenserver.get_object_ref(fake_ansible_module, "name", uuid="fake-uuid", msg_prefix="Test: ")
assert exc_info.value.kwargs['msg'] == "Test: VM with UUID 'fake-uuid' not found!"
assert xenserver.get_object_ref(fake_ansible_module, "name", uuid="fake-uuid", fail=False, msg_prefix="Test: ") is None
def test_get_object_ref_name_not_found(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests when object is not found by name."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request', return_value=[])
with pytest.raises(FailJsonException) as exc_info:
xenserver.get_object_ref(fake_ansible_module, "name", msg_prefix="Test: ")
assert exc_info.value.kwargs['msg'] == "Test: VM with name 'name' not found!"
assert xenserver.get_object_ref(fake_ansible_module, "name", fail=False, msg_prefix="Test: ") is None
def test_get_object_ref_name_multiple_found(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests when multiple objects are found by name."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi_request', return_value=[fake_xenapi_ref('VM'), fake_xenapi_ref('VM')])
error_msg = "Test: multiple VMs with name 'name' found! Please use UUID."
with pytest.raises(FailJsonException) as exc_info:
xenserver.get_object_ref(fake_ansible_module, "name", msg_prefix="Test: ")
assert exc_info.value.kwargs['msg'] == error_msg
with pytest.raises(FailJsonException) as exc_info:
xenserver.get_object_ref(fake_ansible_module, "name", fail=False, msg_prefix="Test: ")
assert exc_info.value.kwargs['msg'] == error_msg

View file

@ -0,0 +1,17 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
def test_xapi_to_module_vm_power_state_bad_power_state(xenserver):
"""Tests that None is returned on bad power state."""
assert xenserver.xapi_to_module_vm_power_state("bad") is None
def test_module_to_xapi_vm_power_state_bad_power_state(xenserver):
"""Tests that None is returned on bad power state."""
assert xenserver.module_to_xapi_vm_power_state("bad") is None

View file

@ -0,0 +1,182 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.module_utils.common.network import is_mac
testcase_is_valid_mac_addr = [
('A4-23-8D-F8-C9-E5', True),
('35:71:F4:11:0B:D8', True),
('b3-bd-20-59-0c-cf', True),
('32:61:ca:65:f1:f4', True),
('asdf', False),
('A4-23-8D-G8-C9-E5', False),
('A4-3-8D-F8-C9-E5', False),
('A4-23-88D-F8-C9-E5', False),
('A4-23-8D-F8-C9_E5', False),
('A4-23--8D-F8-C9-E5', False),
]
testcase_is_valid_ip_addr = [
('0.0.0.0', True),
('10.0.0.1', True),
('192.168.0.1', True),
('255.255.255.255', True),
('asdf', False),
('a.b.c.d', False),
('345.345.345.345', False),
('-10.0.0.1', False),
]
testcase_is_valid_ip_netmask = [
('240.0.0.0', True),
('255.224.0.0', True),
('255.255.248.0', True),
('255.255.255.255', True),
('asdf', False),
('a.b.c.d', False),
('192.168.0.1', False),
('255.0.248.0', False),
]
testcase_is_valid_ip_prefix = [
('0', True),
('16', True),
('24', True),
('32', True),
('asdf', False),
('-10', False),
('60', False),
('60s', False),
]
testcase_ip_prefix_to_netmask = {
"params": [
('0', '0.0.0.0'),
('8', '255.0.0.0'),
('11', '255.224.0.0'),
('16', '255.255.0.0'),
('21', '255.255.248.0'),
('24', '255.255.255.0'),
('26', '255.255.255.192'),
('32', '255.255.255.255'),
('a', ''),
('60', ''),
],
"ids": [
'0',
'8',
'11',
'16',
'21',
'24',
'26',
'32',
'a',
'60',
],
}
testcase_ip_netmask_to_prefix = {
"params": [
('0.0.0.0', '0'),
('255.0.0.0', '8'),
('255.224.0.0', '11'),
('255.255.0.0', '16'),
('255.255.248.0', '21'),
('255.255.255.0', '24'),
('255.255.255.192', '26'),
('255.255.255.255', '32'),
('a', ''),
('60', ''),
],
"ids": [
'0.0.0.0',
'255.0.0.0',
'255.224.0.0',
'255.255.0.0',
'255.255.248.0',
'255.255.255.0',
'255.255.255.192',
'255.255.255.255',
'a',
'60',
],
}
testcase_is_valid_ip6_addr = [
('::1', True),
('2001:DB8:0:0:8:800:200C:417A', True),
('2001:DB8::8:800:200C:417A', True),
('FF01::101', True),
('asdf', False),
('2001:DB8:0:0:8:800:200C:417A:221', False),
('FF01::101::2', False),
('2001:db8:85a3::8a2e:370k:7334', False),
]
testcase_is_valid_ip6_prefix = [
('0', True),
('56', True),
('78', True),
('128', True),
('asdf', False),
('-10', False),
('345', False),
('60s', False),
]
@pytest.mark.parametrize('mac_addr, result', testcase_is_valid_mac_addr)
def test_is_valid_mac_addr(xenserver, mac_addr, result):
"""Tests against examples of valid and invalid mac addresses."""
assert is_mac(mac_addr) is result
@pytest.mark.parametrize('ip_addr, result', testcase_is_valid_ip_addr)
def test_is_valid_ip_addr(xenserver, ip_addr, result):
"""Tests against examples of valid and invalid ip addresses."""
assert xenserver.is_valid_ip_addr(ip_addr) is result
@pytest.mark.parametrize('ip_netmask, result', testcase_is_valid_ip_netmask)
def test_is_valid_ip_netmask(xenserver, ip_netmask, result):
"""Tests against examples of valid and invalid ip netmasks."""
assert xenserver.is_valid_ip_netmask(ip_netmask) is result
@pytest.mark.parametrize('ip_prefix, result', testcase_is_valid_ip_prefix)
def test_is_valid_ip_prefix(xenserver, ip_prefix, result):
"""Tests against examples of valid and invalid ip prefixes."""
assert xenserver.is_valid_ip_prefix(ip_prefix) is result
@pytest.mark.parametrize('ip_prefix, ip_netmask', testcase_ip_prefix_to_netmask['params'], ids=testcase_ip_prefix_to_netmask['ids'])
def test_ip_prefix_to_netmask(xenserver, ip_prefix, ip_netmask):
"""Tests ip prefix to netmask conversion."""
assert xenserver.ip_prefix_to_netmask(ip_prefix) == ip_netmask
@pytest.mark.parametrize('ip_netmask, ip_prefix', testcase_ip_netmask_to_prefix['params'], ids=testcase_ip_netmask_to_prefix['ids'])
def test_ip_netmask_to_prefix(xenserver, ip_netmask, ip_prefix):
"""Tests ip netmask to prefix conversion."""
assert xenserver.ip_netmask_to_prefix(ip_netmask) == ip_prefix
@pytest.mark.parametrize('ip6_addr, result', testcase_is_valid_ip6_addr)
def test_is_valid_ip6_addr(xenserver, ip6_addr, result):
"""Tests against examples of valid and invalid ip6 addresses."""
assert xenserver.is_valid_ip6_addr(ip6_addr) is result
@pytest.mark.parametrize('ip6_prefix, result', testcase_is_valid_ip6_prefix)
def test_is_valid_ip6_prefix(xenserver, ip6_prefix, result):
"""Tests against examples of valid and invalid ip6 prefixes."""
assert xenserver.is_valid_ip6_prefix(ip6_prefix) is result

View file

@ -0,0 +1,413 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from .FakeAnsibleModule import FailJsonException
from .common import fake_xenapi_ref, testcase_bad_xenapi_refs
testcase_set_vm_power_state_bad_transitions = {
"params": [
('restarted', 'Halted', "Cannot restart VM in state 'poweredoff'!"),
('restarted', 'Suspended', "Cannot restart VM in state 'suspended'!"),
('suspended', 'Halted', "Cannot suspend VM in state 'poweredoff'!"),
('suspended', 'Paused', "Cannot suspend VM in state 'paused'!"),
('shutdownguest', 'Halted', "Cannot shutdown guest when VM is in state 'poweredoff'!"),
('shutdownguest', 'Suspended', "Cannot shutdown guest when VM is in state 'suspended'!"),
('shutdownguest', 'Paused', "Cannot shutdown guest when VM is in state 'paused'!"),
('rebootguest', 'Halted', "Cannot reboot guest when VM is in state 'poweredoff'!"),
('rebootguest', 'Suspended', "Cannot reboot guest when VM is in state 'suspended'!"),
('rebootguest', 'Paused', "Cannot reboot guest when VM is in state 'paused'!"),
],
"ids": [
"poweredoff->restarted",
"suspended->restarted",
"poweredoff->suspended",
"paused->suspended",
"poweredoff->shutdownguest",
"suspended->shutdownguest",
"paused->shutdownguest",
"poweredoff->rebootguest",
"suspended->rebootguest",
"paused->rebootguest",
],
}
testcase_set_vm_power_state_task_timeout = {
"params": [
('shutdownguest', "Guest shutdown task failed: 'timeout'!"),
('rebootguest', "Guest reboot task failed: 'timeout'!"),
],
"ids": [
"shutdownguest-timeout",
"rebootguest-timeout",
],
}
testcase_set_vm_power_state_no_transitions = {
"params": [
('poweredon', "Running"),
('Poweredon', "Running"),
('powered-on', "Running"),
('Powered_on', "Running"),
('poweredoff', "Halted"),
('Poweredoff', "Halted"),
('powered-off', "Halted"),
('powered_off', "Halted"),
('suspended', "Suspended"),
('Suspended', "Suspended"),
],
"ids": [
"poweredon",
"poweredon-cap",
"poweredon-dash",
"poweredon-under",
"poweredoff",
"poweredoff-cap",
"poweredoff-dash",
"poweredoff-under",
"suspended",
"suspended-cap",
],
}
testcase_set_vm_power_state_transitions = {
"params": [
('poweredon', 'Halted', 'running', 'VM.start'),
('Poweredon', 'Halted', 'running', 'VM.start'),
('powered-on', 'Halted', 'running', 'VM.start'),
('Powered_on', 'Halted', 'running', 'VM.start'),
('poweredon', 'Suspended', 'running', 'VM.resume'),
('Poweredon', 'Suspended', 'running', 'VM.resume'),
('powered-on', 'Suspended', 'running', 'VM.resume'),
('Powered_on', 'Suspended', 'running', 'VM.resume'),
('poweredon', 'Paused', 'running', 'VM.unpause'),
('Poweredon', 'Paused', 'running', 'VM.unpause'),
('powered-on', 'Paused', 'running', 'VM.unpause'),
('Powered_on', 'Paused', 'running', 'VM.unpause'),
('poweredoff', 'Running', 'halted', 'VM.hard_shutdown'),
('Poweredoff', 'Running', 'halted', 'VM.hard_shutdown'),
('powered-off', 'Running', 'halted', 'VM.hard_shutdown'),
('powered_off', 'Running', 'halted', 'VM.hard_shutdown'),
('poweredoff', 'Suspended', 'halted', 'VM.hard_shutdown'),
('Poweredoff', 'Suspended', 'halted', 'VM.hard_shutdown'),
('powered-off', 'Suspended', 'halted', 'VM.hard_shutdown'),
('powered_off', 'Suspended', 'halted', 'VM.hard_shutdown'),
('poweredoff', 'Paused', 'halted', 'VM.hard_shutdown'),
('Poweredoff', 'Paused', 'halted', 'VM.hard_shutdown'),
('powered-off', 'Paused', 'halted', 'VM.hard_shutdown'),
('powered_off', 'Paused', 'halted', 'VM.hard_shutdown'),
('restarted', 'Running', 'running', 'VM.hard_reboot'),
('Restarted', 'Running', 'running', 'VM.hard_reboot'),
('restarted', 'Paused', 'running', 'VM.hard_reboot'),
('Restarted', 'Paused', 'running', 'VM.hard_reboot'),
('suspended', 'Running', 'suspended', 'VM.suspend'),
('Suspended', 'Running', 'suspended', 'VM.suspend'),
('shutdownguest', 'Running', 'halted', 'VM.clean_shutdown'),
('Shutdownguest', 'Running', 'halted', 'VM.clean_shutdown'),
('shutdown-guest', 'Running', 'halted', 'VM.clean_shutdown'),
('shutdown_guest', 'Running', 'halted', 'VM.clean_shutdown'),
('rebootguest', 'Running', 'running', 'VM.clean_reboot'),
('rebootguest', 'Running', 'running', 'VM.clean_reboot'),
('reboot-guest', 'Running', 'running', 'VM.clean_reboot'),
('reboot_guest', 'Running', 'running', 'VM.clean_reboot'),
],
"ids": [
"poweredoff->poweredon",
"poweredoff->poweredon-cap",
"poweredoff->poweredon-dash",
"poweredoff->poweredon-under",
"suspended->poweredon",
"suspended->poweredon-cap",
"suspended->poweredon-dash",
"suspended->poweredon-under",
"paused->poweredon",
"paused->poweredon-cap",
"paused->poweredon-dash",
"paused->poweredon-under",
"poweredon->poweredoff",
"poweredon->poweredoff-cap",
"poweredon->poweredoff-dash",
"poweredon->poweredoff-under",
"suspended->poweredoff",
"suspended->poweredoff-cap",
"suspended->poweredoff-dash",
"suspended->poweredoff-under",
"paused->poweredoff",
"paused->poweredoff-cap",
"paused->poweredoff-dash",
"paused->poweredoff-under",
"poweredon->restarted",
"poweredon->restarted-cap",
"paused->restarted",
"paused->restarted-cap",
"poweredon->suspended",
"poweredon->suspended-cap",
"poweredon->shutdownguest",
"poweredon->shutdownguest-cap",
"poweredon->shutdownguest-dash",
"poweredon->shutdownguest-under",
"poweredon->rebootguest",
"poweredon->rebootguest-cap",
"poweredon->rebootguest-dash",
"poweredon->rebootguest-under",
],
}
testcase_set_vm_power_state_transitions_async = {
"params": [
('shutdownguest', 'Running', 'halted', 'Async.VM.clean_shutdown'),
('Shutdownguest', 'Running', 'halted', 'Async.VM.clean_shutdown'),
('shutdown-guest', 'Running', 'halted', 'Async.VM.clean_shutdown'),
('shutdown_guest', 'Running', 'halted', 'Async.VM.clean_shutdown'),
('rebootguest', 'Running', 'running', 'Async.VM.clean_reboot'),
('rebootguest', 'Running', 'running', 'Async.VM.clean_reboot'),
('reboot-guest', 'Running', 'running', 'Async.VM.clean_reboot'),
('reboot_guest', 'Running', 'running', 'Async.VM.clean_reboot'),
],
"ids": [
"poweredon->shutdownguest",
"poweredon->shutdownguest-cap",
"poweredon->shutdownguest-dash",
"poweredon->shutdownguest-under",
"poweredon->rebootguest",
"poweredon->rebootguest-cap",
"poweredon->rebootguest-dash",
"poweredon->rebootguest-under",
],
}
@pytest.mark.parametrize('vm_ref', testcase_bad_xenapi_refs['params'], ids=testcase_bad_xenapi_refs['ids'])
def test_set_vm_power_state_bad_vm_ref(fake_ansible_module, xenserver, vm_ref):
"""Tests failure on bad vm_ref."""
with pytest.raises(FailJsonException) as exc_info:
xenserver.set_vm_power_state(fake_ansible_module, vm_ref, None)
assert exc_info.value.kwargs['msg'] == "Cannot set VM power state. Invalid VM reference supplied!"
def test_set_vm_power_state_xenapi_failure(mock_xenapi_failure, fake_ansible_module, xenserver):
"""Tests catching of XenAPI failures."""
with pytest.raises(FailJsonException) as exc_info:
xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), "poweredon")
assert exc_info.value.kwargs['msg'] == "XAPI ERROR: %s" % mock_xenapi_failure[1]
def test_set_vm_power_state_bad_power_state(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests failure on unsupported power state."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
mocked_returns = {
"VM.get_power_state.return_value": "Running",
}
mocked_xenapi.configure_mock(**mocked_returns)
with pytest.raises(FailJsonException) as exc_info:
xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), "bad")
# Beside VM.get_power_state() no other method should have been
# called additionally.
assert len(mocked_xenapi.method_calls) == 1
assert exc_info.value.kwargs['msg'] == "Requested VM power state 'bad' is unsupported!"
@pytest.mark.parametrize('power_state_desired, power_state_current, error_msg',
testcase_set_vm_power_state_bad_transitions['params'],
ids=testcase_set_vm_power_state_bad_transitions['ids'])
def test_set_vm_power_state_bad_transition(mocker, fake_ansible_module, XenAPI, xenserver, power_state_desired, power_state_current, error_msg):
"""Tests failure on bad power state transition."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
mocked_returns = {
"VM.get_power_state.return_value": power_state_current,
}
mocked_xenapi.configure_mock(**mocked_returns)
with pytest.raises(FailJsonException) as exc_info:
xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), power_state_desired)
# Beside VM.get_power_state() no other method should have been
# called additionally.
assert len(mocked_xenapi.method_calls) == 1
assert exc_info.value.kwargs['msg'] == error_msg
@pytest.mark.parametrize('power_state, error_msg',
testcase_set_vm_power_state_task_timeout['params'],
ids=testcase_set_vm_power_state_task_timeout['ids'])
def test_set_vm_power_state_task_timeout(mocker, fake_ansible_module, XenAPI, xenserver, power_state, error_msg):
"""Tests failure on async task timeout."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
mocked_returns = {
"VM.get_power_state.return_value": "Running",
"Async.VM.clean_shutdown.return_value": fake_xenapi_ref('task'),
"Async.VM.clean_reboot.return_value": fake_xenapi_ref('task'),
}
mocked_xenapi.configure_mock(**mocked_returns)
mocker.patch('ansible_collections.community.general.plugins.module_utils.xenserver.wait_for_task', return_value="timeout")
with pytest.raises(FailJsonException) as exc_info:
xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), power_state, timeout=1)
# Beside VM.get_power_state() only one of Async.VM.clean_shutdown or
# Async.VM.clean_reboot should have been called additionally.
assert len(mocked_xenapi.method_calls) == 2
assert exc_info.value.kwargs['msg'] == error_msg
@pytest.mark.parametrize('power_state_desired, power_state_current',
testcase_set_vm_power_state_no_transitions['params'],
ids=testcase_set_vm_power_state_no_transitions['ids'])
def test_set_vm_power_state_no_transition(mocker, fake_ansible_module, XenAPI, xenserver, power_state_desired, power_state_current):
"""Tests regular invocation without power state transition."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
mocked_returns = {
"VM.get_power_state.return_value": power_state_current,
}
mocked_xenapi.configure_mock(**mocked_returns)
result = xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), power_state_desired)
# Beside VM.get_power_state() no other method should have been
# called additionally.
assert len(mocked_xenapi.method_calls) == 1
assert result[0] is False
assert result[1] == power_state_current.lower()
@pytest.mark.parametrize('power_state_desired, power_state_current, power_state_resulting, activated_xenapi_method',
testcase_set_vm_power_state_transitions['params'],
ids=testcase_set_vm_power_state_transitions['ids'])
def test_set_vm_power_state_transition(mocker,
fake_ansible_module,
XenAPI,
xenserver,
power_state_desired,
power_state_current,
power_state_resulting,
activated_xenapi_method):
"""Tests regular invocation with power state transition."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
mocked_returns = {
"VM.get_power_state.return_value": power_state_current,
}
mocked_xenapi.configure_mock(**mocked_returns)
result = xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), power_state_desired, timeout=0)
mocked_xenapi_method = mocked_xenapi
for activated_xenapi_class in activated_xenapi_method.split('.'):
mocked_xenapi_method = getattr(mocked_xenapi_method, activated_xenapi_class)
mocked_xenapi_method.assert_called_once()
# Beside VM.get_power_state() only activated_xenapi_method should have
# been called additionally.
assert len(mocked_xenapi.method_calls) == 2
assert result[0] is True
assert result[1] == power_state_resulting
@pytest.mark.parametrize('power_state_desired, power_state_current, power_state_resulting, activated_xenapi_method',
testcase_set_vm_power_state_transitions_async['params'],
ids=testcase_set_vm_power_state_transitions_async['ids'])
def test_set_vm_power_state_transition_async(mocker,
fake_ansible_module,
XenAPI,
xenserver,
power_state_desired,
power_state_current,
power_state_resulting,
activated_xenapi_method):
"""
Tests regular invocation with async power state transition
(shutdownguest and rebootguest only).
"""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
mocked_returns = {
"VM.get_power_state.return_value": power_state_current,
"%s.return_value" % activated_xenapi_method: fake_xenapi_ref('task'),
}
mocked_xenapi.configure_mock(**mocked_returns)
mocker.patch('ansible_collections.community.general.plugins.module_utils.xenserver.wait_for_task', return_value="")
result = xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), power_state_desired, timeout=1)
mocked_xenapi_method = mocked_xenapi
for activated_xenapi_class in activated_xenapi_method.split('.'):
mocked_xenapi_method = getattr(mocked_xenapi_method, activated_xenapi_class)
mocked_xenapi_method.assert_called_once()
# Beside VM.get_power_state() only activated_xenapi_method should have
# been called additionally.
assert len(mocked_xenapi.method_calls) == 2
assert result[0] is True
assert result[1] == power_state_resulting
@pytest.mark.parametrize('power_state_desired, power_state_current, power_state_resulting, activated_xenapi_method',
testcase_set_vm_power_state_transitions['params'],
ids=testcase_set_vm_power_state_transitions['ids'])
def test_set_vm_power_state_transition_check_mode(mocker,
fake_ansible_module,
XenAPI,
xenserver,
power_state_desired,
power_state_current,
power_state_resulting,
activated_xenapi_method):
"""Tests regular invocation with power state transition in check mode."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
mocked_returns = {
"VM.get_power_state.return_value": power_state_current,
}
mocked_xenapi.configure_mock(**mocked_returns)
fake_ansible_module.check_mode = True
result = xenserver.set_vm_power_state(fake_ansible_module, fake_xenapi_ref('VM'), power_state_desired, timeout=0)
mocked_xenapi_method = mocked_xenapi
for activated_xenapi_class in activated_xenapi_method.split('.'):
mocked_xenapi_method = getattr(mocked_xenapi_method, activated_xenapi_class)
mocked_xenapi_method.assert_not_called()
# Beside VM.get_power_state() no other method should have been
# called additionally.
assert len(mocked_xenapi.method_calls) == 1
assert result[0] is True
assert result[1] == power_state_resulting

View file

@ -0,0 +1,220 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from .FakeAnsibleModule import FailJsonException
from .common import fake_xenapi_ref, testcase_bad_xenapi_refs
testcase_wait_for_vm_ip_address_bad_power_states = {
"params": [
'Halted',
'Paused',
'Suspended',
'Other',
],
"ids": [
'state-halted',
'state-paused',
'state-suspended',
'state-other',
]
}
testcase_wait_for_vm_ip_address_bad_guest_metrics = {
"params": [
('OpaqueRef:NULL', {"networks": {}}),
(fake_xenapi_ref('VM_guest_metrics'), {"networks": {}}),
],
"ids": [
'vm_guest_metrics_ref-null, no-ip',
'vm_guest_metrics_ref-ok, no-ip',
],
}
testcase_wait_for_task_all_statuses = {
"params": [
('Success', ''),
('Failure', 'failure'),
('Cancelling', 'cancelling'),
('Cancelled', 'cancelled'),
('Other', 'other'),
],
"ids": [
'task-success',
'task-failure',
'task-cancelling',
'task-cancelled',
'task-other',
]
}
@pytest.mark.parametrize('vm_ref', testcase_bad_xenapi_refs['params'], ids=testcase_bad_xenapi_refs['ids'])
def test_wait_for_vm_ip_address_bad_vm_ref(fake_ansible_module, xenserver, vm_ref):
"""Tests failure on bad vm_ref."""
with pytest.raises(FailJsonException) as exc_info:
xenserver.wait_for_vm_ip_address(fake_ansible_module, vm_ref)
assert exc_info.value.kwargs['msg'] == "Cannot wait for VM IP address. Invalid VM reference supplied!"
def test_wait_for_vm_ip_address_xenapi_failure(mock_xenapi_failure, xenserver, fake_ansible_module):
"""Tests catching of XenAPI failures."""
with pytest.raises(FailJsonException) as exc_info:
xenserver.wait_for_vm_ip_address(fake_ansible_module, fake_xenapi_ref('VM'))
assert exc_info.value.kwargs['msg'] == "XAPI ERROR: %s" % mock_xenapi_failure[1]
@pytest.mark.parametrize('bad_power_state',
testcase_wait_for_vm_ip_address_bad_power_states['params'],
ids=testcase_wait_for_vm_ip_address_bad_power_states['ids'])
def test_wait_for_vm_ip_address_bad_power_state(mocker, fake_ansible_module, XenAPI, xenserver, bad_power_state):
"""Tests failure on bad power state."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
mocked_returns = {
"VM.get_power_state.return_value": bad_power_state,
}
mocked_xenapi.configure_mock(**mocked_returns)
with pytest.raises(FailJsonException) as exc_info:
xenserver.wait_for_vm_ip_address(fake_ansible_module, fake_xenapi_ref('VM'))
assert exc_info.value.kwargs['msg'] == ("Cannot wait for VM IP address when VM is in state '%s'!" %
xenserver.xapi_to_module_vm_power_state(bad_power_state.lower()))
@pytest.mark.parametrize('bad_guest_metrics_ref, bad_guest_metrics',
testcase_wait_for_vm_ip_address_bad_guest_metrics['params'],
ids=testcase_wait_for_vm_ip_address_bad_guest_metrics['ids'])
def test_wait_for_vm_ip_address_timeout(mocker, fake_ansible_module, XenAPI, xenserver, bad_guest_metrics_ref, bad_guest_metrics):
"""Tests timeout."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
mocked_returns = {
"VM.get_power_state.return_value": "Running",
"VM.get_guest_metrics.return_value": bad_guest_metrics_ref,
"VM_guest_metrics.get_record.return_value": bad_guest_metrics,
}
mocked_xenapi.configure_mock(**mocked_returns)
mocker.patch('time.sleep')
with pytest.raises(FailJsonException) as exc_info:
xenserver.wait_for_vm_ip_address(fake_ansible_module, fake_xenapi_ref('VM'), timeout=1)
assert exc_info.value.kwargs['msg'] == "Timed out waiting for VM IP address!"
def test_wait_for_vm_ip_address(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests regular invocation."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
# This mock simulates regular VM IP acquirement lifecycle:
#
# 1) First, no guest metrics are available because VM is not yet fully
# booted and guest agent is not yet started.
# 2) Next, guest agent is started and guest metrics are available but
# IP address is still not acquired.
# 3) Lastly, IP address is acquired by VM on its primary VIF.
mocked_returns = {
"VM.get_power_state.return_value": "Running",
"VM.get_guest_metrics.side_effect": [
'OpaqueRef:NULL',
fake_xenapi_ref('VM_guest_metrics'),
fake_xenapi_ref('VM_guest_metrics'),
],
"VM_guest_metrics.get_record.side_effect": [
{
"networks": {},
},
{
"networks": {
"0/ip": "192.168.0.1",
"1/ip": "10.0.0.1",
},
},
],
}
mocked_xenapi.configure_mock(**mocked_returns)
mocker.patch('time.sleep')
fake_guest_metrics = xenserver.wait_for_vm_ip_address(fake_ansible_module, fake_xenapi_ref('VM'))
assert fake_guest_metrics == mocked_returns['VM_guest_metrics.get_record.side_effect'][1]
@pytest.mark.parametrize('task_ref', testcase_bad_xenapi_refs['params'], ids=testcase_bad_xenapi_refs['ids'])
def test_wait_for_task_bad_task_ref(fake_ansible_module, xenserver, task_ref):
"""Tests failure on bad task_ref."""
with pytest.raises(FailJsonException) as exc_info:
xenserver.wait_for_task(fake_ansible_module, task_ref)
assert exc_info.value.kwargs['msg'] == "Cannot wait for task. Invalid task reference supplied!"
def test_wait_for_task_xenapi_failure(mock_xenapi_failure, fake_ansible_module, xenserver):
"""Tests catching of XenAPI failures."""
with pytest.raises(FailJsonException) as exc_info:
xenserver.wait_for_task(fake_ansible_module, fake_xenapi_ref('task'))
assert exc_info.value.kwargs['msg'] == "XAPI ERROR: %s" % mock_xenapi_failure[1]
def test_wait_for_task_timeout(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests timeout."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
mocked_returns = {
"task.get_status.return_value": "Pending",
"task.destroy.return_value": None,
}
mocked_xenapi.configure_mock(**mocked_returns)
mocker.patch('time.sleep')
fake_result = xenserver.wait_for_task(fake_ansible_module, fake_xenapi_ref('task'), timeout=1)
mocked_xenapi.task.destroy.assert_called_once()
assert fake_result == "timeout"
@pytest.mark.parametrize('task_status, result',
testcase_wait_for_task_all_statuses['params'],
ids=testcase_wait_for_task_all_statuses['ids'])
def test_wait_for_task(mocker, fake_ansible_module, XenAPI, xenserver, task_status, result):
"""Tests regular invocation."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
# Mock will first return Pending status and on second invocation it will
# return one of possible final statuses.
mocked_returns = {
"task.get_status.side_effect": [
'Pending',
task_status,
],
"task.destroy.return_value": None,
}
mocked_xenapi.configure_mock(**mocked_returns)
mocker.patch('time.sleep')
fake_result = xenserver.wait_for_task(fake_ansible_module, fake_xenapi_ref('task'))
mocked_xenapi.task.destroy.assert_called_once()
assert fake_result == result

View file

@ -0,0 +1,175 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
import atexit
from .FakeAnsibleModule import FailJsonException
from ansible.module_utils.ansible_release import __version__ as ANSIBLE_VERSION
testcase_module_local_conn = {
"params": [
{
"hostname": "localhost",
"username": "someuser",
"password": "somepwd",
"validate_certs": True,
},
],
"ids": [
"local-conn",
],
}
testcase_module_remote_conn = {
"params": [
{
"hostname": "somehost",
"username": "someuser",
"password": "somepwd",
"validate_certs": True,
},
],
"ids": [
"remote-conn",
],
}
testcase_module_remote_conn_scheme = {
"params": [
{
"hostname": "http://somehost",
"username": "someuser",
"password": "somepwd",
"validate_certs": True,
},
{
"hostname": "https://somehost",
"username": "someuser",
"password": "somepwd",
"validate_certs": True,
},
],
"ids": [
"remote-conn-http",
"remote-conn-https",
],
}
@pytest.mark.parametrize('fake_ansible_module', testcase_module_local_conn['params'], ids=testcase_module_local_conn['ids'], indirect=True)
def test_xapi_connect_local_session(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests that connection to localhost uses XenAPI.xapi_local() function."""
mocker.patch('XenAPI.xapi_local')
xapi_session = xenserver.XAPI.connect(fake_ansible_module)
XenAPI.xapi_local.assert_called_once()
@pytest.mark.parametrize('fake_ansible_module', testcase_module_local_conn['params'], ids=testcase_module_local_conn['ids'], indirect=True)
def test_xapi_connect_local_login(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests that connection to localhost uses empty username and password."""
mocker.patch.object(XenAPI.Session, 'login_with_password', create=True)
xapi_session = xenserver.XAPI.connect(fake_ansible_module)
XenAPI.Session.login_with_password.assert_called_once_with('', '', ANSIBLE_VERSION, 'Ansible')
def test_xapi_connect_login(mocker, fake_ansible_module, XenAPI, xenserver):
"""
Tests that username and password are properly propagated to
XenAPI.Session.login_with_password() function.
"""
mocker.patch.object(XenAPI.Session, 'login_with_password', create=True)
xapi_session = xenserver.XAPI.connect(fake_ansible_module)
username = fake_ansible_module.params['username']
password = fake_ansible_module.params['password']
XenAPI.Session.login_with_password.assert_called_once_with(username, password, ANSIBLE_VERSION, 'Ansible')
def test_xapi_connect_login_failure(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests that login failure is properly handled."""
fake_error_msg = "Fake XAPI login error!"
mocked_login = mocker.patch.object(XenAPI.Session, 'login_with_password', create=True)
mocked_login.side_effect = XenAPI.Failure(fake_error_msg)
hostname = fake_ansible_module.params['hostname']
username = fake_ansible_module.params['username']
with pytest.raises(FailJsonException) as exc_info:
xapi_session = xenserver.XAPI.connect(fake_ansible_module)
assert exc_info.value.kwargs['msg'] == "Unable to log on to XenServer at http://%s as %s: %s" % (hostname, username, fake_error_msg)
@pytest.mark.parametrize('fake_ansible_module', testcase_module_remote_conn_scheme['params'], ids=testcase_module_remote_conn_scheme['ids'], indirect=True)
def test_xapi_connect_remote_scheme(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests that explicit scheme in hostname param is preserved."""
mocker.patch('XenAPI.Session')
xapi_session = xenserver.XAPI.connect(fake_ansible_module)
hostname = fake_ansible_module.params['hostname']
ignore_ssl = not fake_ansible_module.params['validate_certs']
XenAPI.Session.assert_called_once_with(hostname, ignore_ssl=ignore_ssl)
@pytest.mark.parametrize('fake_ansible_module', testcase_module_remote_conn['params'], ids=testcase_module_remote_conn['ids'], indirect=True)
def test_xapi_connect_remote_no_scheme(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests that proper scheme is prepended to hostname without scheme."""
mocker.patch('XenAPI.Session')
xapi_session = xenserver.XAPI.connect(fake_ansible_module)
hostname = fake_ansible_module.params['hostname']
ignore_ssl = not fake_ansible_module.params['validate_certs']
XenAPI.Session.assert_called_once_with("http://%s" % hostname, ignore_ssl=ignore_ssl)
def test_xapi_connect_support_ignore_ssl(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests proper handling of ignore_ssl support."""
mocked_session = mocker.patch('XenAPI.Session')
mocked_session.side_effect = TypeError()
with pytest.raises(TypeError) as exc_info:
xapi_session = xenserver.XAPI.connect(fake_ansible_module)
hostname = fake_ansible_module.params['hostname']
ignore_ssl = not fake_ansible_module.params['validate_certs']
XenAPI.Session.assert_called_with("http://%s" % hostname)
def test_xapi_connect_no_disconnect_atexit(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests skipping registration of atexit disconnect handler."""
mocker.patch('atexit.register')
xapi_session = xenserver.XAPI.connect(fake_ansible_module, disconnect_atexit=False)
atexit.register.assert_not_called()
def test_xapi_connect_singleton(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests if XAPI.connect() returns singleton."""
mocker.patch('XenAPI.Session')
xapi_session1 = xenserver.XAPI.connect(fake_ansible_module)
xapi_session2 = xenserver.XAPI.connect(fake_ansible_module)
XenAPI.Session.assert_called_once()
assert xapi_session1 == xapi_session2

View file

@ -0,0 +1,50 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from .FakeAnsibleModule import FailJsonException
from .common import fake_xenapi_ref
def test_xenserverobject_xenapi_lib_detection(mocker, fake_ansible_module, xenserver):
"""Tests XenAPI lib detection code."""
mocker.patch('ansible_collections.community.general.plugins.module_utils.xenserver.HAS_XENAPI', new=False)
with pytest.raises(FailJsonException) as exc_info:
xenserver.XenServerObject(fake_ansible_module)
assert 'Failed to import the required Python library (XenAPI) on' in exc_info.value.kwargs['msg']
def test_xenserverobject_xenapi_failure(mock_xenapi_failure, fake_ansible_module, xenserver):
"""Tests catching of XenAPI failures."""
with pytest.raises(FailJsonException) as exc_info:
xenserver.XenServerObject(fake_ansible_module)
assert exc_info.value.kwargs['msg'] == "XAPI ERROR: %s" % mock_xenapi_failure[1]
def test_xenserverobject(mocker, fake_ansible_module, XenAPI, xenserver):
"""Tests successful creation of XenServerObject."""
mocked_xenapi = mocker.patch.object(XenAPI.Session, 'xenapi', create=True)
mocked_returns = {
"pool.get_all.return_value": [fake_xenapi_ref('pool')],
"pool.get_default_SR.return_value": fake_xenapi_ref('SR'),
"session.get_this_host.return_value": fake_xenapi_ref('host'),
"host.get_software_version.return_value": {"product_version": "7.2.0"},
}
mocked_xenapi.configure_mock(**mocked_returns)
xso = xenserver.XenServerObject(fake_ansible_module)
assert xso.pool_ref == fake_xenapi_ref('pool')
assert xso.xenserver_version == [7, 2, 0]

View file

View file

@ -0,0 +1,137 @@
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import sys
import pytest
import ansible_collections.community.general.tests.unit.compat.unittest as unittest
from ansible_collections.community.general.tests.unit.compat.mock import MagicMock
from ansible_collections.community.general.tests.unit.compat.unittest import TestCase
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args
# Exoscale's cs doesn't support Python 2.6
pytestmark = []
if sys.version_info[:2] != (2, 6):
from ansible_collections.community.general.plugins.modules.cloud.cloudstack.cs_traffic_type import AnsibleCloudStackTrafficType, setup_module_object
from ansible_collections.community.general.plugins.module_utils.cloudstack import HAS_LIB_CS
if not HAS_LIB_CS:
pytestmark.append(pytest.mark.skip('The cloudstack library, "cs", is needed to test cs_traffic_type'))
else:
pytestmark.append(pytest.mark.skip('Exoscale\'s cs doesn\'t support Python 2.6'))
EXISTING_TRAFFIC_TYPES_RESPONSE = {
"count": 3,
"traffictype": [
{
"id": "9801cf73-5a73-4883-97e4-fa20c129226f",
"kvmnetworklabel": "cloudbr0",
"physicalnetworkid": "659c1840-9374-440d-a412-55ca360c9d3c",
"traffictype": "Management"
},
{
"id": "28ed70b7-9a1f-41bf-94c3-53a9f22da8b6",
"kvmnetworklabel": "cloudbr0",
"physicalnetworkid": "659c1840-9374-440d-a412-55ca360c9d3c",
"traffictype": "Guest"
},
{
"id": "9c05c802-84c0-4eda-8f0a-f681364ffb46",
"kvmnetworklabel": "cloudbr0",
"physicalnetworkid": "659c1840-9374-440d-a412-55ca360c9d3c",
"traffictype": "Storage"
}
]
}
VALID_LIST_NETWORKS_RESPONSE = {
"count": 1,
"physicalnetwork": [
{
"broadcastdomainrange": "ZONE",
"id": "659c1840-9374-440d-a412-55ca360c9d3c",
"name": "eth1",
"state": "Enabled",
"vlan": "3900-4000",
"zoneid": "49acf813-a8dd-4da0-aa53-1d826d6003e7"
}
]
}
VALID_LIST_ZONES_RESPONSE = {
"count": 1,
"zone": [
{
"allocationstate": "Enabled",
"dhcpprovider": "VirtualRouter",
"dns1": "8.8.8.8",
"dns2": "8.8.4.4",
"guestcidraddress": "10.10.0.0/16",
"id": "49acf813-a8dd-4da0-aa53-1d826d6003e7",
"internaldns1": "192.168.56.1",
"localstorageenabled": True,
"name": "DevCloud-01",
"networktype": "Advanced",
"securitygroupsenabled": False,
"tags": [],
"zonetoken": "df20d65a-c6c8-3880-9064-4f77de2291ef"
}
]
}
base_module_args = {
"api_key": "api_key",
"api_secret": "very_secret_content",
"api_url": "http://localhost:8888/api/client",
"kvm_networklabel": "cloudbr0",
"physical_network": "eth1",
"poll_async": True,
"state": "present",
"traffic_type": "Guest",
"zone": "DevCloud-01"
}
class TestAnsibleCloudstackTraffiType(TestCase):
def test_module_is_created_sensibly(self):
set_module_args(base_module_args)
module = setup_module_object()
assert module.params['traffic_type'] == 'Guest'
def test_update_called_when_traffic_type_exists(self):
set_module_args(base_module_args)
module = setup_module_object()
actt = AnsibleCloudStackTrafficType(module)
actt.get_traffic_type = MagicMock(return_value=EXISTING_TRAFFIC_TYPES_RESPONSE['traffictype'][0])
actt.update_traffic_type = MagicMock()
actt.present_traffic_type()
self.assertTrue(actt.update_traffic_type.called)
def test_update_not_called_when_traffic_type_doesnt_exist(self):
set_module_args(base_module_args)
module = setup_module_object()
actt = AnsibleCloudStackTrafficType(module)
actt.get_traffic_type = MagicMock(return_value=None)
actt.update_traffic_type = MagicMock()
actt.add_traffic_type = MagicMock()
actt.present_traffic_type()
self.assertFalse(actt.update_traffic_type.called)
self.assertTrue(actt.add_traffic_type.called)
def test_traffic_type_returned_if_exists(self):
set_module_args(base_module_args)
module = setup_module_object()
actt = AnsibleCloudStackTrafficType(module)
actt.get_physical_network = MagicMock(return_value=VALID_LIST_NETWORKS_RESPONSE['physicalnetwork'][0])
actt.get_traffic_types = MagicMock(return_value=EXISTING_TRAFFIC_TYPES_RESPONSE)
tt = actt.present_traffic_type()
self.assertTrue(tt.get('kvmnetworklabel') == base_module_args['kvm_networklabel'])
self.assertTrue(tt.get('traffictype') == base_module_args['traffic_type'])
if __name__ == '__main__':
unittest.main()

View file

@ -0,0 +1,22 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import unittest
from ansible_collections.community.general.plugins.modules.cloud.docker.docker_container import TaskParameters
class TestTaskParameters(unittest.TestCase):
"""Unit tests for TaskParameters."""
def test_parse_exposed_ports_tcp_udp(self):
"""
Ensure _parse_exposed_ports does not cancel ports with the same
number but different protocol.
"""
task_params = TaskParameters.__new__(TaskParameters)
task_params.exposed_ports = None
result = task_params._parse_exposed_ports([80, '443', '443/udp'])
self.assertTrue((80, 'tcp') in result)
self.assertTrue((443, 'tcp') in result)
self.assertTrue((443, 'udp') in result)

View file

@ -0,0 +1,31 @@
"""Unit tests for docker_network."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
from ansible_collections.community.general.plugins.modules.cloud.docker.docker_network import validate_cidr
@pytest.mark.parametrize("cidr,expected", [
('192.168.0.1/16', 'ipv4'),
('192.168.0.1/24', 'ipv4'),
('192.168.0.1/32', 'ipv4'),
('fdd1:ac8c:0557:7ce2::/64', 'ipv6'),
('fdd1:ac8c:0557:7ce2::/128', 'ipv6'),
])
def test_validate_cidr_positives(cidr, expected):
assert validate_cidr(cidr) == expected
@pytest.mark.parametrize("cidr", [
'192.168.0.1',
'192.168.0.1/34',
'192.168.0.1/asd',
'fdd1:ac8c:0557:7ce2::',
])
def test_validate_cidr_negatives(cidr):
with pytest.raises(ValueError) as e:
validate_cidr(cidr)
assert '"{0}" is not a valid CIDR'.format(cidr) == str(e.value)

View file

@ -0,0 +1,510 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
class APIErrorMock(Exception):
def __init__(self, message, response=None, explanation=None):
self.message = message
self.response = response
self.explanation = explanation
@pytest.fixture(autouse=True)
def docker_module_mock(mocker):
docker_module_mock = mocker.MagicMock()
docker_utils_module_mock = mocker.MagicMock()
docker_errors_module_mock = mocker.MagicMock()
docker_errors_module_mock.APIError = APIErrorMock
mock_modules = {
'docker': docker_module_mock,
'docker.utils': docker_utils_module_mock,
'docker.errors': docker_errors_module_mock,
}
return mocker.patch.dict('sys.modules', **mock_modules)
@pytest.fixture(autouse=True)
def docker_swarm_service():
from ansible_collections.community.general.plugins.modules.cloud.docker import docker_swarm_service
return docker_swarm_service
def test_retry_on_out_of_sequence_error(mocker, docker_swarm_service):
run_mock = mocker.MagicMock(
side_effect=APIErrorMock(
message='',
response=None,
explanation='rpc error: code = Unknown desc = update out of sequence',
)
)
manager = docker_swarm_service.DockerServiceManager(client=None)
manager.run = run_mock
with pytest.raises(APIErrorMock):
manager.run_safe()
assert run_mock.call_count == 3
def test_no_retry_on_general_api_error(mocker, docker_swarm_service):
run_mock = mocker.MagicMock(
side_effect=APIErrorMock(message='', response=None, explanation='some error')
)
manager = docker_swarm_service.DockerServiceManager(client=None)
manager.run = run_mock
with pytest.raises(APIErrorMock):
manager.run_safe()
assert run_mock.call_count == 1
def test_get_docker_environment(mocker, docker_swarm_service):
env_file_result = {'TEST1': 'A', 'TEST2': 'B', 'TEST3': 'C'}
env_dict = {'TEST3': 'CC', 'TEST4': 'D'}
env_string = "TEST3=CC,TEST4=D"
env_list = ['TEST3=CC', 'TEST4=D']
expected_result = sorted(['TEST1=A', 'TEST2=B', 'TEST3=CC', 'TEST4=D'])
mocker.patch.object(
docker_swarm_service, 'parse_env_file', return_value=env_file_result
)
mocker.patch.object(
docker_swarm_service,
'format_environment',
side_effect=lambda d: ['{0}={1}'.format(key, value) for key, value in d.items()],
)
# Test with env dict and file
result = docker_swarm_service.get_docker_environment(
env_dict, env_files=['dummypath']
)
assert result == expected_result
# Test with env list and file
result = docker_swarm_service.get_docker_environment(
env_list,
env_files=['dummypath']
)
assert result == expected_result
# Test with env string and file
result = docker_swarm_service.get_docker_environment(
env_string, env_files=['dummypath']
)
assert result == expected_result
assert result == expected_result
# Test with empty env
result = docker_swarm_service.get_docker_environment(
[], env_files=None
)
assert result == []
# Test with empty env_files
result = docker_swarm_service.get_docker_environment(
None, env_files=[]
)
assert result == []
def test_get_nanoseconds_from_raw_option(docker_swarm_service):
value = docker_swarm_service.get_nanoseconds_from_raw_option('test', None)
assert value is None
value = docker_swarm_service.get_nanoseconds_from_raw_option('test', '1m30s535ms')
assert value == 90535000000
value = docker_swarm_service.get_nanoseconds_from_raw_option('test', 10000000000)
assert value == 10000000000
with pytest.raises(ValueError):
docker_swarm_service.get_nanoseconds_from_raw_option('test', [])
def test_has_dict_changed(docker_swarm_service):
assert not docker_swarm_service.has_dict_changed(
{"a": 1},
{"a": 1},
)
assert not docker_swarm_service.has_dict_changed(
{"a": 1},
{"a": 1, "b": 2}
)
assert docker_swarm_service.has_dict_changed(
{"a": 1},
{"a": 2, "b": 2}
)
assert docker_swarm_service.has_dict_changed(
{"a": 1, "b": 1},
{"a": 1}
)
assert not docker_swarm_service.has_dict_changed(
None,
{"a": 2, "b": 2}
)
assert docker_swarm_service.has_dict_changed(
{},
{"a": 2, "b": 2}
)
assert docker_swarm_service.has_dict_changed(
{"a": 1},
{}
)
assert docker_swarm_service.has_dict_changed(
{"a": 1},
None
)
assert not docker_swarm_service.has_dict_changed(
{},
{}
)
assert not docker_swarm_service.has_dict_changed(
None,
None
)
assert not docker_swarm_service.has_dict_changed(
{},
None
)
assert not docker_swarm_service.has_dict_changed(
None,
{}
)
def test_has_list_changed(docker_swarm_service):
# List comparisons without dictionaries
# I could improve the indenting, but pycodestyle wants this instead
assert not docker_swarm_service.has_list_changed(None, None)
assert not docker_swarm_service.has_list_changed(None, [])
assert not docker_swarm_service.has_list_changed(None, [1, 2])
assert not docker_swarm_service.has_list_changed([], None)
assert not docker_swarm_service.has_list_changed([], [])
assert docker_swarm_service.has_list_changed([], [1, 2])
assert docker_swarm_service.has_list_changed([1, 2], None)
assert docker_swarm_service.has_list_changed([1, 2], [])
assert docker_swarm_service.has_list_changed([1, 2, 3], [1, 2])
assert docker_swarm_service.has_list_changed([1, 2], [1, 2, 3])
# Check list sorting
assert not docker_swarm_service.has_list_changed([1, 2], [2, 1])
assert docker_swarm_service.has_list_changed(
[1, 2],
[2, 1],
sort_lists=False
)
# Check type matching
assert docker_swarm_service.has_list_changed([None, 1], [2, 1])
assert docker_swarm_service.has_list_changed([2, 1], [None, 1])
assert docker_swarm_service.has_list_changed(
"command --with args",
['command', '--with', 'args']
)
assert docker_swarm_service.has_list_changed(
['sleep', '3400'],
[u'sleep', u'3600'],
sort_lists=False
)
# List comparisons with dictionaries
assert not docker_swarm_service.has_list_changed(
[{'a': 1}],
[{'a': 1}],
sort_key='a'
)
assert not docker_swarm_service.has_list_changed(
[{'a': 1}, {'a': 2}],
[{'a': 1}, {'a': 2}],
sort_key='a'
)
with pytest.raises(Exception):
docker_swarm_service.has_list_changed(
[{'a': 1}, {'a': 2}],
[{'a': 1}, {'a': 2}]
)
# List sort checking with sort key
assert not docker_swarm_service.has_list_changed(
[{'a': 1}, {'a': 2}],
[{'a': 2}, {'a': 1}],
sort_key='a'
)
assert docker_swarm_service.has_list_changed(
[{'a': 1}, {'a': 2}],
[{'a': 2}, {'a': 1}],
sort_lists=False
)
assert docker_swarm_service.has_list_changed(
[{'a': 1}, {'a': 2}, {'a': 3}],
[{'a': 2}, {'a': 1}],
sort_key='a'
)
assert docker_swarm_service.has_list_changed(
[{'a': 1}, {'a': 2}],
[{'a': 1}, {'a': 2}, {'a': 3}],
sort_lists=False
)
# Additional dictionary elements
assert not docker_swarm_service.has_list_changed(
[
{"src": 1, "dst": 2},
{"src": 1, "dst": 2, "protocol": "udp"},
],
[
{"src": 1, "dst": 2, "protocol": "tcp"},
{"src": 1, "dst": 2, "protocol": "udp"},
],
sort_key='dst'
)
assert not docker_swarm_service.has_list_changed(
[
{"src": 1, "dst": 2, "protocol": "udp"},
{"src": 1, "dst": 3, "protocol": "tcp"},
],
[
{"src": 1, "dst": 2, "protocol": "udp"},
{"src": 1, "dst": 3, "protocol": "tcp"},
],
sort_key='dst'
)
assert docker_swarm_service.has_list_changed(
[
{"src": 1, "dst": 2, "protocol": "udp"},
{"src": 1, "dst": 2},
{"src": 3, "dst": 4},
],
[
{"src": 1, "dst": 3, "protocol": "udp"},
{"src": 1, "dst": 2, "protocol": "tcp"},
{"src": 3, "dst": 4, "protocol": "tcp"},
],
sort_key='dst'
)
assert docker_swarm_service.has_list_changed(
[
{"src": 1, "dst": 3, "protocol": "tcp"},
{"src": 1, "dst": 2, "protocol": "udp"},
],
[
{"src": 1, "dst": 2, "protocol": "tcp"},
{"src": 1, "dst": 2, "protocol": "udp"},
],
sort_key='dst'
)
assert docker_swarm_service.has_list_changed(
[
{"src": 1, "dst": 2, "protocol": "udp"},
{"src": 1, "dst": 2, "protocol": "tcp", "extra": {"test": "foo"}},
],
[
{"src": 1, "dst": 2, "protocol": "udp"},
{"src": 1, "dst": 2, "protocol": "tcp"},
],
sort_key='dst'
)
assert not docker_swarm_service.has_list_changed(
[{'id': '123', 'aliases': []}],
[{'id': '123'}],
sort_key='id'
)
def test_have_networks_changed(docker_swarm_service):
assert not docker_swarm_service.have_networks_changed(
None,
None
)
assert not docker_swarm_service.have_networks_changed(
[],
None
)
assert not docker_swarm_service.have_networks_changed(
[{'id': 1}],
[{'id': 1}]
)
assert docker_swarm_service.have_networks_changed(
[{'id': 1}],
[{'id': 1}, {'id': 2}]
)
assert not docker_swarm_service.have_networks_changed(
[{'id': 1}, {'id': 2}],
[{'id': 1}, {'id': 2}]
)
assert not docker_swarm_service.have_networks_changed(
[{'id': 1}, {'id': 2}],
[{'id': 2}, {'id': 1}]
)
assert not docker_swarm_service.have_networks_changed(
[
{'id': 1},
{'id': 2, 'aliases': []}
],
[
{'id': 1},
{'id': 2}
]
)
assert docker_swarm_service.have_networks_changed(
[
{'id': 1},
{'id': 2, 'aliases': ['alias1']}
],
[
{'id': 1},
{'id': 2}
]
)
assert docker_swarm_service.have_networks_changed(
[
{'id': 1},
{'id': 2, 'aliases': ['alias1', 'alias2']}
],
[
{'id': 1},
{'id': 2, 'aliases': ['alias1']}
]
)
assert not docker_swarm_service.have_networks_changed(
[
{'id': 1},
{'id': 2, 'aliases': ['alias1', 'alias2']}
],
[
{'id': 1},
{'id': 2, 'aliases': ['alias1', 'alias2']}
]
)
assert not docker_swarm_service.have_networks_changed(
[
{'id': 1},
{'id': 2, 'aliases': ['alias1', 'alias2']}
],
[
{'id': 1},
{'id': 2, 'aliases': ['alias2', 'alias1']}
]
)
assert not docker_swarm_service.have_networks_changed(
[
{'id': 1, 'options': {}},
{'id': 2, 'aliases': ['alias1', 'alias2']}],
[
{'id': 1},
{'id': 2, 'aliases': ['alias2', 'alias1']}
]
)
assert not docker_swarm_service.have_networks_changed(
[
{'id': 1, 'options': {'option1': 'value1'}},
{'id': 2, 'aliases': ['alias1', 'alias2']}],
[
{'id': 1, 'options': {'option1': 'value1'}},
{'id': 2, 'aliases': ['alias2', 'alias1']}
]
)
assert docker_swarm_service.have_networks_changed(
[
{'id': 1, 'options': {'option1': 'value1'}},
{'id': 2, 'aliases': ['alias1', 'alias2']}],
[
{'id': 1, 'options': {'option1': 'value2'}},
{'id': 2, 'aliases': ['alias2', 'alias1']}
]
)
def test_get_docker_networks(docker_swarm_service):
network_names = [
'network_1',
'network_2',
'network_3',
'network_4',
]
networks = [
network_names[0],
{'name': network_names[1]},
{'name': network_names[2], 'aliases': ['networkalias1']},
{'name': network_names[3], 'aliases': ['networkalias2'], 'options': {'foo': 'bar'}},
]
network_ids = {
network_names[0]: '1',
network_names[1]: '2',
network_names[2]: '3',
network_names[3]: '4',
}
parsed_networks = docker_swarm_service.get_docker_networks(
networks,
network_ids
)
assert len(parsed_networks) == 4
for i, network in enumerate(parsed_networks):
assert 'name' not in network
assert 'id' in network
expected_name = network_names[i]
assert network['id'] == network_ids[expected_name]
if i == 2:
assert network['aliases'] == ['networkalias1']
if i == 3:
assert network['aliases'] == ['networkalias2']
if i == 3:
assert 'foo' in network['options']
# Test missing name
with pytest.raises(TypeError):
docker_swarm_service.get_docker_networks([{'invalid': 'err'}], {'err': 1})
# test for invalid aliases type
with pytest.raises(TypeError):
docker_swarm_service.get_docker_networks(
[{'name': 'test', 'aliases': 1}],
{'test': 1}
)
# Test invalid aliases elements
with pytest.raises(TypeError):
docker_swarm_service.get_docker_networks(
[{'name': 'test', 'aliases': [1]}],
{'test': 1}
)
# Test for invalid options type
with pytest.raises(TypeError):
docker_swarm_service.get_docker_networks(
[{'name': 'test', 'options': 1}],
{'test': 1}
)
# Test for invalid networks type
with pytest.raises(TypeError):
docker_swarm_service.get_docker_networks(
1,
{'test': 1}
)
# Test for non existing networks
with pytest.raises(ValueError):
docker_swarm_service.get_docker_networks(
[{'name': 'idontexist'}],
{'test': 1}
)
# Test empty values
assert docker_swarm_service.get_docker_networks([], {}) == []
assert docker_swarm_service.get_docker_networks(None, {}) is None
# Test invalid options
with pytest.raises(TypeError):
docker_swarm_service.get_docker_networks(
[{'name': 'test', 'nonexisting_option': 'foo'}],
{'test': '1'}
)

View file

@ -0,0 +1,36 @@
# Copyright (c) 2018 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import pytest
from ansible_collections.community.general.plugins.modules.cloud.docker import docker_volume
from ansible_collections.community.general.plugins.module_utils.docker import common
pytestmark = pytest.mark.usefixtures('patch_ansible_module')
TESTCASE_DOCKER_VOLUME = [
{
'name': 'daemon_config',
'state': 'present'
}
]
@pytest.mark.parametrize('patch_ansible_module', TESTCASE_DOCKER_VOLUME, indirect=['patch_ansible_module'])
def test_create_volume_on_invalid_docker_version(mocker, capfd):
mocker.patch.object(common, 'HAS_DOCKER_PY', True)
mocker.patch.object(common, 'docker_version', '1.8.0')
with pytest.raises(SystemExit):
docker_volume.main()
out, dummy = capfd.readouterr()
results = json.loads(out)
assert results['failed']
assert 'Error: Docker SDK for Python version is 1.8.0 ' in results['msg']
assert 'Minimum version required is 1.10.0.' in results['msg']

View file

@ -0,0 +1,61 @@
import unittest
from ansible_collections.community.general.plugins.modules.cloud.google.gce_tag import _get_changed_items, _intersect_items, _union_items
class TestGCETag(unittest.TestCase):
"""Unit tests for gce_tag module."""
def test_union_items(self):
"""
Combine items in both lists
removing duplicates.
"""
listA = [1, 2, 3, 4, 5, 8, 9]
listB = [1, 2, 3, 4, 5, 6, 7]
want = [1, 2, 3, 4, 5, 6, 7, 8, 9]
got = _union_items(listA, listB)
self.assertEqual(want, got)
def test_intersect_items(self):
"""
All unique items from either list.
"""
listA = [1, 2, 3, 4, 5, 8, 9]
listB = [1, 2, 3, 4, 5, 6, 7]
want = [1, 2, 3, 4, 5]
got = _intersect_items(listA, listB)
self.assertEqual(want, got)
# tags removed
new_tags = ['one', 'two']
existing_tags = ['two']
want = ['two'] # only remove the tag that was present
got = _intersect_items(existing_tags, new_tags)
self.assertEqual(want, got)
def test_get_changed_items(self):
"""
All the items from left list that don't match
any item from the right list.
"""
listA = [1, 2, 3, 4, 5, 8, 9]
listB = [1, 2, 3, 4, 5, 6, 7]
want = [8, 9]
got = _get_changed_items(listA, listB)
self.assertEqual(want, got)
# simulate new tags added
tags_to_add = ['one', 'two']
existing_tags = ['two']
want = ['one']
got = _get_changed_items(tags_to_add, existing_tags)
self.assertEqual(want, got)
# simulate removing tags
# specifying one tag on right that doesn't exist
tags_to_remove = ['one', 'two']
existing_tags = ['two', 'three']
want = ['three']
got = _get_changed_items(existing_tags, tags_to_remove)
self.assertEqual(want, got)

View file

@ -0,0 +1,30 @@
import unittest
from ansible_collections.community.general.plugins.modules.cloud.google.gcp_forwarding_rule import _build_global_forwarding_rule_dict
class TestGCPFowardingRule(unittest.TestCase):
"""Unit tests for gcp_fowarding_rule module."""
params_dict = {
'forwarding_rule_name': 'foo_fowarding_rule_name',
'address': 'foo_external_address',
'target': 'foo_targetproxy',
'region': 'global',
'port_range': 80,
'protocol': 'TCP',
'state': 'present',
}
def test__build_global_forwarding_rule_dict(self):
expected = {
'name': 'foo_fowarding_rule_name',
'IPAddress': 'https://www.googleapis.com/compute/v1/projects/my-project/global/addresses/foo_external_address',
'target': 'https://www.googleapis.com/compute/v1/projects/my-project/global/targetHttpProxies/foo_targetproxy',
'region': 'global',
'portRange': 80,
'IPProtocol': 'TCP',
}
actual = _build_global_forwarding_rule_dict(
self.params_dict, 'my-project')
self.assertEqual(expected, actual)

View file

@ -0,0 +1,164 @@
import unittest
from ansible_collections.community.general.plugins.modules.cloud.google.gcp_url_map import _build_path_matchers, _build_url_map_dict
class TestGCPUrlMap(unittest.TestCase):
"""Unit tests for gcp_url_map module."""
params_dict = {
'url_map_name': 'foo_url_map_name',
'description': 'foo_url_map description',
'host_rules': [
{
'description': 'host rules description',
'hosts': [
'www.example.com',
'www2.example.com'
],
'path_matcher': 'host_rules_path_matcher'
}
],
'path_matchers': [
{
'name': 'path_matcher_one',
'description': 'path matcher one',
'defaultService': 'bes-pathmatcher-one-default',
'pathRules': [
{
'service': 'my-one-bes',
'paths': [
'/',
'/aboutus'
]
}
]
},
{
'name': 'path_matcher_two',
'description': 'path matcher two',
'defaultService': 'bes-pathmatcher-two-default',
'pathRules': [
{
'service': 'my-two-bes',
'paths': [
'/webapp',
'/graphs'
]
}
]
}
]
}
def test__build_path_matchers(self):
input_list = [
{
'defaultService': 'bes-pathmatcher-one-default',
'description': 'path matcher one',
'name': 'path_matcher_one',
'pathRules': [
{
'paths': [
'/',
'/aboutus'
],
'service': 'my-one-bes'
}
]
},
{
'defaultService': 'bes-pathmatcher-two-default',
'description': 'path matcher two',
'name': 'path_matcher_two',
'pathRules': [
{
'paths': [
'/webapp',
'/graphs'
],
'service': 'my-two-bes'
}
]
}
]
expected = [
{
'defaultService': 'https://www.googleapis.com/compute/v1/projects/my-project/global/backendServices/bes-pathmatcher-one-default',
'description': 'path matcher one',
'name': 'path_matcher_one',
'pathRules': [
{
'paths': [
'/',
'/aboutus'
],
'service': 'https://www.googleapis.com/compute/v1/projects/my-project/global/backendServices/my-one-bes'
}
]
},
{
'defaultService': 'https://www.googleapis.com/compute/v1/projects/my-project/global/backendServices/bes-pathmatcher-two-default',
'description': 'path matcher two',
'name': 'path_matcher_two',
'pathRules': [
{
'paths': [
'/webapp',
'/graphs'
],
'service': 'https://www.googleapis.com/compute/v1/projects/my-project/global/backendServices/my-two-bes'
}
]
}
]
actual = _build_path_matchers(input_list, 'my-project')
self.assertEqual(expected, actual)
def test__build_url_map_dict(self):
expected = {
'description': 'foo_url_map description',
'hostRules': [
{
'description': 'host rules description',
'hosts': [
'www.example.com',
'www2.example.com'
],
'pathMatcher': 'host_rules_path_matcher'
}
],
'name': 'foo_url_map_name',
'pathMatchers': [
{
'defaultService': 'https://www.googleapis.com/compute/v1/projects/my-project/global/backendServices/bes-pathmatcher-one-default',
'description': 'path matcher one',
'name': 'path_matcher_one',
'pathRules': [
{
'paths': [
'/',
'/aboutus'
],
'service': 'https://www.googleapis.com/compute/v1/projects/my-project/global/backendServices/my-one-bes'
}
]
},
{
'defaultService': 'https://www.googleapis.com/compute/v1/projects/my-project/global/backendServices/bes-pathmatcher-two-default',
'description': 'path matcher two',
'name': 'path_matcher_two',
'pathRules': [
{
'paths': [
'/webapp',
'/graphs'
],
'service': 'https://www.googleapis.com/compute/v1/projects/my-project/global/backendServices/my-two-bes'
}
]
}
]
}
actual = _build_url_map_dict(self.params_dict, 'my-project')
self.assertEqual(expected, actual)

View file

@ -0,0 +1,73 @@
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
from ansible_collections.community.general.tests.unit.compat.mock import MagicMock
from ansible_collections.community.kubernetes.plugins.module_utils.common import K8sAnsibleMixin
from ansible_collections.community.kubernetes.plugins.module_utils.raw import KubernetesRawModule
from ansible_collections.community.general.plugins.module_utils.kubevirt import KubeVirtRawModule
import openshift.dynamic
RESOURCE_DEFAULT_ARGS = {'api_version': 'v1alpha3', 'group': 'kubevirt.io',
'prefix': 'apis', 'namespaced': True}
class AnsibleExitJson(Exception):
"""Exception class to be raised by module.exit_json and caught
by the test case"""
def __init__(self, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
def __getitem__(self, attr):
return getattr(self, attr)
class AnsibleFailJson(Exception):
"""Exception class to be raised by module.fail_json and caught
by the test case"""
def __init__(self, **kwargs):
for k in kwargs:
setattr(self, k, kwargs[k])
def __getitem__(self, attr):
return getattr(self, attr)
def exit_json(*args, **kwargs):
kwargs['success'] = True
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(**kwargs)
def fail_json(*args, **kwargs):
kwargs['success'] = False
raise AnsibleFailJson(**kwargs)
@pytest.fixture()
def base_fixture(monkeypatch):
monkeypatch.setattr(
KubernetesRawModule, "exit_json", exit_json)
monkeypatch.setattr(
KubernetesRawModule, "fail_json", fail_json)
# Create mock methods in Resource directly, otherwise dyn client
# tries binding those to corresponding methods in DynamicClient
# (with partial()), which is more problematic to intercept
openshift.dynamic.Resource.get = MagicMock()
openshift.dynamic.Resource.create = MagicMock()
openshift.dynamic.Resource.delete = MagicMock()
openshift.dynamic.Resource.patch = MagicMock()
openshift.dynamic.Resource.search = MagicMock()
openshift.dynamic.Resource.watch = MagicMock()
# Globally mock some methods, since all tests will use this
KubernetesRawModule.patch_resource = MagicMock()
KubernetesRawModule.patch_resource.return_value = ({}, None)
K8sAnsibleMixin.get_api_client = MagicMock()
K8sAnsibleMixin.get_api_client.return_value = None
K8sAnsibleMixin.find_resource = MagicMock()
KubeVirtRawModule.find_supported_resource = MagicMock()

View file

@ -0,0 +1,75 @@
import pytest
openshiftdynamic = pytest.importorskip("openshift.dynamic")
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args
from .kubevirt_fixtures import base_fixture, RESOURCE_DEFAULT_ARGS, AnsibleExitJson
from ansible_collections.community.kubernetes.plugins.module_utils.raw import KubernetesRawModule
from ansible_collections.community.general.plugins.modules.cloud.kubevirt import kubevirt_rs as mymodule
KIND = 'VirtualMachineInstanceReplicaSet'
@pytest.mark.usefixtures("base_fixture")
@pytest.mark.parametrize("_replicas, _changed", ((1, True),
(3, True),
(2, False),
(5, True),))
def test_scale_rs_nowait(_replicas, _changed):
_name = 'test-rs'
# Desired state:
args = dict(name=_name, namespace='vms', replicas=_replicas, wait=False)
set_module_args(args)
# Mock pre-change state:
resource_args = dict(kind=KIND, **RESOURCE_DEFAULT_ARGS)
mymodule.KubeVirtVMIRS.find_supported_resource.return_value = openshiftdynamic.Resource(**resource_args)
res_inst = openshiftdynamic.ResourceInstance('', dict(kind=KIND, metadata={'name': _name}, spec={'replicas': 2}))
openshiftdynamic.Resource.get.return_value = res_inst
openshiftdynamic.Resource.search.return_value = [res_inst]
# Final state, after patching the object
KubernetesRawModule.patch_resource.return_value = dict(kind=KIND, metadata={'name': _name},
spec={'replicas': _replicas}), None
# Run code:
with pytest.raises(AnsibleExitJson) as result:
mymodule.KubeVirtVMIRS().execute_module()
# Verify result:
assert result.value['changed'] == _changed
@pytest.mark.usefixtures("base_fixture")
@pytest.mark.parametrize("_replicas, _success", ((1, False),
(2, False),
(5, True),))
def test_scale_rs_wait(_replicas, _success):
_name = 'test-rs'
# Desired state:
args = dict(name=_name, namespace='vms', replicas=5, wait=True)
set_module_args(args)
# Mock pre-change state:
resource_args = dict(kind=KIND, **RESOURCE_DEFAULT_ARGS)
mymodule.KubeVirtVMIRS.find_supported_resource.return_value = openshiftdynamic.Resource(**resource_args)
res_inst = openshiftdynamic.ResourceInstance('', dict(kind=KIND, metadata={'name': _name}, spec={'replicas': 2}))
openshiftdynamic.Resource.get.return_value = res_inst
openshiftdynamic.Resource.search.return_value = [res_inst]
# ~Final state, after patching the object (`replicas` match desired state)
KubernetesRawModule.patch_resource.return_value = dict(kind=KIND, name=_name, metadata={'name': _name},
spec={'replicas': 5}), None
# Final final state, as returned by resource.watch()
final_obj = dict(metadata=dict(name=_name), status=dict(readyReplicas=_replicas), **resource_args)
event = openshiftdynamic.ResourceInstance(None, final_obj)
openshiftdynamic.Resource.watch.return_value = [dict(object=event)]
# Run code:
with pytest.raises(Exception) as result:
mymodule.KubeVirtVMIRS().execute_module()
# Verify result:
assert result.value['success'] == _success

View file

@ -0,0 +1,110 @@
import pytest
openshiftdynamic = pytest.importorskip("openshift.dynamic")
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args
from .kubevirt_fixtures import base_fixture, RESOURCE_DEFAULT_ARGS, AnsibleExitJson
from ansible_collections.community.general.plugins.module_utils.kubevirt import KubeVirtRawModule
from ansible_collections.community.general.plugins.modules.cloud.kubevirt import kubevirt_vm as mymodule
KIND = 'VirtulMachine'
@pytest.mark.usefixtures("base_fixture")
def test_create_vm_with_multus_nowait():
# Desired state:
args = dict(
state='present', name='testvm',
namespace='vms',
interfaces=[
{'bridge': {}, 'name': 'default', 'network': {'pod': {}}},
{'bridge': {}, 'name': 'mynet', 'network': {'multus': {'networkName': 'mynet'}}},
],
wait=False,
)
set_module_args(args)
# State as "returned" by the "k8s cluster":
resource_args = dict(kind=KIND, **RESOURCE_DEFAULT_ARGS)
KubeVirtRawModule.find_supported_resource.return_value = openshiftdynamic.Resource(**resource_args)
openshiftdynamic.Resource.get.return_value = None # Object doesn't exist in the cluster
# Run code:
with pytest.raises(AnsibleExitJson) as result:
mymodule.KubeVirtVM().execute_module()
# Verify result:
assert result.value['changed']
assert result.value['method'] == 'create'
@pytest.mark.usefixtures("base_fixture")
@pytest.mark.parametrize("_wait", (False, True))
def test_vm_is_absent(_wait):
# Desired state:
args = dict(
state='absent', name='testvmi',
namespace='vms',
wait=_wait,
)
set_module_args(args)
# State as "returned" by the "k8s cluster":
resource_args = dict(kind=KIND, **RESOURCE_DEFAULT_ARGS)
KubeVirtRawModule.find_supported_resource.return_value = openshiftdynamic.Resource(**resource_args)
openshiftdynamic.Resource.get.return_value = None # Object doesn't exist in the cluster
# Run code:
with pytest.raises(AnsibleExitJson) as result:
mymodule.KubeVirtVM().execute_module()
# Verify result:
assert not result.value['kubevirt_vm']
assert result.value['method'] == 'delete'
# Note: nothing actually gets deleted, as we mock that there's not object in the cluster present,
# so if the method changes to something other than 'delete' at some point, that's fine
@pytest.mark.usefixtures("base_fixture")
def test_vmpreset_create():
KIND = 'VirtulMachineInstancePreset'
# Desired state:
args = dict(state='present', name='testvmipreset', namespace='vms', memory='1024Mi', wait=False)
set_module_args(args)
# State as "returned" by the "k8s cluster":
resource_args = dict(kind=KIND, **RESOURCE_DEFAULT_ARGS)
KubeVirtRawModule.find_supported_resource.return_value = openshiftdynamic.Resource(**resource_args)
openshiftdynamic.Resource.get.return_value = None # Object doesn't exist in the cluster
# Run code:
with pytest.raises(AnsibleExitJson) as result:
mymodule.KubeVirtVM().execute_module()
# Verify result:
assert result.value['changed']
assert result.value['method'] == 'create'
@pytest.mark.usefixtures("base_fixture")
def test_vmpreset_is_absent():
KIND = 'VirtulMachineInstancePreset'
# Desired state:
args = dict(state='absent', name='testvmipreset', namespace='vms')
set_module_args(args)
# State as "returned" by the "k8s cluster":
resource_args = dict(kind=KIND, **RESOURCE_DEFAULT_ARGS)
KubeVirtRawModule.find_supported_resource.return_value = openshiftdynamic.Resource(**resource_args)
openshiftdynamic.Resource.get.return_value = None # Object doesn't exist in the cluster
# Run code:
with pytest.raises(AnsibleExitJson) as result:
mymodule.KubeVirtVM().execute_module()
# Verify result:
assert not result.value['kubevirt_vm']
assert result.value['method'] == 'delete'
# Note: nothing actually gets deleted, as we mock that there's not object in the cluster present,
# so if the method changes to something other than 'delete' at some point, that's fine

View file

@ -0,0 +1,80 @@
import pytest
@pytest.fixture
def api_key(monkeypatch):
monkeypatch.setenv('LINODE_API_KEY', 'foobar')
@pytest.fixture
def auth(monkeypatch):
def patched_test_echo(dummy):
return []
monkeypatch.setattr('linode.api.Api.test_echo', patched_test_echo)
@pytest.fixture
def access_token(monkeypatch):
monkeypatch.setenv('LINODE_ACCESS_TOKEN', 'barfoo')
@pytest.fixture
def no_access_token_in_env(monkeypatch):
try:
monkeypatch.delenv('LINODE_ACCESS_TOKEN')
except KeyError:
pass
@pytest.fixture
def default_args():
return {'state': 'present', 'label': 'foo'}
@pytest.fixture
def mock_linode():
class Linode():
def delete(self, *args, **kwargs):
pass
@property
def _raw_json(self):
return {
"alerts": {
"cpu": 90,
"io": 10000,
"network_in": 10,
"network_out": 10,
"transfer_quota": 80
},
"backups": {
"enabled": False,
"schedule": {
"day": None,
"window": None,
}
},
"created": "2018-09-26T08:12:33",
"group": "Foobar Group",
"hypervisor": "kvm",
"id": 10480444,
"image": "linode/centos7",
"ipv4": [
"130.132.285.233"
],
"ipv6": "2a82:7e00::h03c:46ff:fe04:5cd2/64",
"label": "lin-foo",
"region": "eu-west",
"specs": {
"disk": 25600,
"memory": 1024,
"transfer": 1000,
"vcpus": 1
},
"status": "running",
"tags": [],
"type": "g6-nanode-1",
"updated": "2018-09-26T10:10:14",
"watchdog_enabled": True
}
return Linode()

View file

@ -0,0 +1,15 @@
from __future__ import (absolute_import, division, print_function)
import pytest
from ansible_collections.community.general.plugins.modules.cloud.linode import linode
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args
if not linode.HAS_LINODE:
pytestmark = pytest.mark.skip('test_linode.py requires the `linode-python` module')
def test_name_is_a_required_parameter(api_key, auth):
with pytest.raises(SystemExit):
set_module_args({})
linode.main()

View file

@ -0,0 +1,323 @@
from __future__ import (absolute_import, division, print_function)
import json
import os
import sys
import pytest
linode_apiv4 = pytest.importorskip('linode_api4')
mandatory_py_version = pytest.mark.skipif(
sys.version_info < (2, 7),
reason='The linode_api4 dependency requires python2.7 or higher'
)
from linode_api4.errors import ApiError as LinodeApiError
from linode_api4 import LinodeClient
from ansible_collections.community.general.plugins.modules.cloud.linode import linode_v4
from ansible_collections.community.general.plugins.module_utils.linode import get_user_agent
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args
from ansible_collections.community.general.tests.unit.compat import mock
def test_mandatory_state_is_validated(capfd):
with pytest.raises(SystemExit):
set_module_args({'label': 'foo'})
linode_v4.initialise_module()
out, err = capfd.readouterr()
results = json.loads(out)
assert all(txt in results['msg'] for txt in ('state', 'required'))
assert results['failed'] is True
def test_mandatory_label_is_validated(capfd):
with pytest.raises(SystemExit):
set_module_args({'state': 'present'})
linode_v4.initialise_module()
out, err = capfd.readouterr()
results = json.loads(out)
assert all(txt in results['msg'] for txt in ('label', 'required'))
assert results['failed'] is True
def test_mandatory_access_token_is_validated(default_args,
no_access_token_in_env,
capfd):
with pytest.raises(SystemExit):
set_module_args(default_args)
linode_v4.initialise_module()
out, err = capfd.readouterr()
results = json.loads(out)
assert results['failed'] is True
assert all(txt in results['msg'] for txt in (
'missing',
'required',
'access_token',
))
def test_mandatory_access_token_passed_in_env(default_args,
access_token):
set_module_args(default_args)
try:
module = linode_v4.initialise_module()
except SystemExit:
pytest.fail("'access_token' is passed in environment")
now_set_token = module.params['access_token']
assert now_set_token == os.environ['LINODE_ACCESS_TOKEN']
def test_mandatory_access_token_passed_in_as_parameter(default_args,
no_access_token_in_env):
default_args.update({'access_token': 'foo'})
set_module_args(default_args)
try:
module = linode_v4.initialise_module()
except SystemExit:
pytest.fail("'access_token' is passed in as parameter")
assert module.params['access_token'] == 'foo'
def test_instance_by_label_cannot_authenticate(capfd, access_token,
default_args):
set_module_args(default_args)
module = linode_v4.initialise_module()
client = LinodeClient(module.params['access_token'])
target = 'linode_api4.linode_client.LinodeGroup.instances'
with mock.patch(target, side_effect=LinodeApiError('foo')):
with pytest.raises(SystemExit):
linode_v4.maybe_instance_from_label(module, client)
out, err = capfd.readouterr()
results = json.loads(out)
assert results['failed'] is True
assert 'Unable to query the Linode API' in results['msg']
def test_no_instances_found_with_label_gives_none(default_args,
access_token):
set_module_args(default_args)
module = linode_v4.initialise_module()
client = LinodeClient(module.params['access_token'])
target = 'linode_api4.linode_client.LinodeGroup.instances'
with mock.patch(target, return_value=[]):
result = linode_v4.maybe_instance_from_label(module, client)
assert result is None
def test_optional_region_is_validated(default_args, capfd, access_token):
default_args.update({'type': 'foo', 'image': 'bar'})
set_module_args(default_args)
with pytest.raises(SystemExit):
linode_v4.initialise_module()
out, err = capfd.readouterr()
results = json.loads(out)
assert results['failed'] is True
assert all(txt in results['msg'] for txt in (
'required',
'together',
'region'
))
def test_optional_type_is_validated(default_args, capfd, access_token):
default_args.update({'region': 'foo', 'image': 'bar'})
set_module_args(default_args)
with pytest.raises(SystemExit):
linode_v4.initialise_module()
out, err = capfd.readouterr()
results = json.loads(out)
assert results['failed'] is True
assert all(txt in results['msg'] for txt in (
'required',
'together',
'type'
))
def test_optional_image_is_validated(default_args, capfd, access_token):
default_args.update({'type': 'foo', 'region': 'bar'})
set_module_args(default_args)
with pytest.raises(SystemExit):
linode_v4.initialise_module()
out, err = capfd.readouterr()
results = json.loads(out)
assert results['failed'] is True
assert all(txt in results['msg'] for txt in (
'required',
'together',
'image'
))
def test_instance_already_created(default_args,
mock_linode,
capfd,
access_token):
default_args.update({
'type': 'foo',
'region': 'bar',
'image': 'baz'
})
set_module_args(default_args)
target = 'linode_api4.linode_client.LinodeGroup.instances'
with mock.patch(target, return_value=[mock_linode]):
with pytest.raises(SystemExit) as sys_exit_exc:
linode_v4.main()
assert sys_exit_exc.value.code == 0
out, err = capfd.readouterr()
results = json.loads(out)
assert results['changed'] is False
assert 'root_password' not in results['instance']
assert (
results['instance']['label'] ==
mock_linode._raw_json['label']
)
def test_instance_to_be_created_without_root_pass(default_args,
mock_linode,
capfd,
access_token):
default_args.update({
'type': 'foo',
'region': 'bar',
'image': 'baz'
})
set_module_args(default_args)
target = 'linode_api4.linode_client.LinodeGroup.instances'
with mock.patch(target, return_value=[]):
with pytest.raises(SystemExit) as sys_exit_exc:
target = 'linode_api4.linode_client.LinodeGroup.instance_create'
with mock.patch(target, return_value=(mock_linode, 'passw0rd')):
linode_v4.main()
assert sys_exit_exc.value.code == 0
out, err = capfd.readouterr()
results = json.loads(out)
assert results['changed'] is True
assert (
results['instance']['label'] ==
mock_linode._raw_json['label']
)
assert results['instance']['root_pass'] == 'passw0rd'
def test_instance_to_be_created_with_root_pass(default_args,
mock_linode,
capfd,
access_token):
default_args.update({
'type': 'foo',
'region': 'bar',
'image': 'baz',
'root_pass': 'passw0rd',
})
set_module_args(default_args)
target = 'linode_api4.linode_client.LinodeGroup.instances'
with mock.patch(target, return_value=[]):
with pytest.raises(SystemExit) as sys_exit_exc:
target = 'linode_api4.linode_client.LinodeGroup.instance_create'
with mock.patch(target, return_value=mock_linode):
linode_v4.main()
assert sys_exit_exc.value.code == 0
out, err = capfd.readouterr()
results = json.loads(out)
assert results['changed'] is True
assert (
results['instance']['label'] ==
mock_linode._raw_json['label']
)
assert 'root_pass' not in results['instance']
def test_instance_to_be_deleted(default_args,
mock_linode,
capfd,
access_token):
default_args.update({'state': 'absent'})
set_module_args(default_args)
target = 'linode_api4.linode_client.LinodeGroup.instances'
with mock.patch(target, return_value=[mock_linode]):
with pytest.raises(SystemExit) as sys_exit_exc:
linode_v4.main()
assert sys_exit_exc.value.code == 0
out, err = capfd.readouterr()
results = json.loads(out)
assert results['changed'] is True
assert (
results['instance']['label'] ==
mock_linode._raw_json['label']
)
def test_instance_already_deleted_no_change(default_args,
mock_linode,
capfd,
access_token):
default_args.update({'state': 'absent'})
set_module_args(default_args)
target = 'linode_api4.linode_client.LinodeGroup.instances'
with mock.patch(target, return_value=[]):
with pytest.raises(SystemExit) as sys_exit_exc:
linode_v4.main()
assert sys_exit_exc.value.code == 0
out, err = capfd.readouterr()
results = json.loads(out)
assert results['changed'] is False
assert results['instance'] == {}
def test_user_agent_created_properly():
try:
from ansible.module_utils.ansible_release import (
__version__ as ansible_version
)
except ImportError:
ansible_version = 'unknown'
expected_user_agent = 'Ansible-linode_v4_module/%s' % ansible_version
assert expected_user_agent == get_user_agent('linode_v4_module')

View file

@ -0,0 +1,20 @@
# Copyright: (c) 2019, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
import json
import pytest
from ansible_collections.community.general.plugins.modules.cloud.misc import terraform
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args
def test_terraform_without_argument(capfd):
set_module_args({})
with pytest.raises(SystemExit) as results:
terraform.main()
out, err = capfd.readouterr()
assert not err
assert json.loads(out)['failed']
assert 'project_path' in json.loads(out)['msg']

View file

@ -0,0 +1,30 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class AnsibleModuleException(Exception):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
class ExitJsonException(AnsibleModuleException):
pass
class FailJsonException(AnsibleModuleException):
pass
class FakeAnsibleModule:
def __init__(self, params=None, check_mode=False):
self.params = params
self.check_mode = check_mode
def exit_json(self, *args, **kwargs):
raise ExitJsonException(*args, **kwargs)
def fail_json(self, *args, **kwargs):
raise FailJsonException(*args, **kwargs)

View file

@ -0,0 +1,66 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
FAKE_API_VERSION = "1.1"
class Failure(Exception):
def __init__(self, details):
self.details = details
def __str__(self):
return str(self.details)
class Session(object):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=1, ignore_ssl=False):
self.transport = transport
self._session = None
self.last_login_method = None
self.last_login_params = None
self.API_version = FAKE_API_VERSION
def _get_api_version(self):
return FAKE_API_VERSION
def _login(self, method, params):
self._session = "OpaqueRef:fake-xenapi-session-ref"
self.last_login_method = method
self.last_login_params = params
self.API_version = self._get_api_version()
def _logout(self):
self._session = None
self.last_login_method = None
self.last_login_params = None
self.API_version = FAKE_API_VERSION
def xenapi_request(self, methodname, params):
if methodname.startswith('login'):
self._login(methodname, params)
return None
elif methodname == 'logout' or methodname == 'session.logout':
self._logout()
return None
else:
# Should be patched with mocker.patch().
return None
def __getattr__(self, name):
if name == 'handle':
return self._session
elif name == 'xenapi':
# Should be patched with mocker.patch().
return None
elif name.startswith('login') or name.startswith('slave_local'):
return lambda *params: self._login(name, params)
elif name == 'logout':
return self._logout
def xapi_local():
return Session("http://_var_lib_xcp_xapi/")

View file

@ -0,0 +1,11 @@
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, Bojan Vitnik <bvitnik@mainstream.rs>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
def fake_xenapi_ref(xenapi_class):
return "OpaqueRef:fake-xenapi-%s-ref" % xenapi_class

Some files were not shown because too many files have changed in this diff Show more