[wip] Remove network content (#84)

* rebase

* remove broken symlinks

* more deletes

* restore cs_* integration tests

* More deletes - from Felix

* cs_common

* Remove some more ignores
This commit is contained in:
John R Barker 2020-04-01 20:34:05 +01:00 committed by GitHub
commit c313c825f4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2215 changed files with 0 additions and 333978 deletions

View file

@ -1,328 +0,0 @@
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Dag Wieers (@dagwieers) <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.cisco.aci.plugins.module_utils.aci import ACIModule
from ansible.module_utils.six import PY2
from ansible.module_utils._text import to_native
import pytest
class AltModule():
params = dict(
hostname='dummy',
port=123,
protocol='https',
state='present',
)
class AltACIModule(ACIModule):
def __init__(self):
self.result = dict(changed=False)
self.module = AltModule
self.params = self.module.params
aci = AltACIModule()
try:
from lxml import etree
if sys.version_info >= (2, 7):
from xmljson import cobra
except ImportError:
pytestmark = pytest.mark.skip("ACI Ansible modules require the lxml and xmljson Python libraries")
class AciRest(unittest.TestCase):
def test_invalid_aci_login(self):
self.maxDiff = None
error = dict(
code='401',
text='Username or password is incorrect - FAILED local authentication',
)
imdata = [{
'error': {
'attributes': {
'code': '401',
'text': 'Username or password is incorrect - FAILED local authentication',
},
},
}]
totalCount = 1
json_response = '{"totalCount":"1","imdata":[{"error":{"attributes":{"code":"401","text":"Username or password is incorrect - FAILED local authentication"}}}]}' # NOQA
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
xml_response = '''<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">
<error code="401" text="Username or password is incorrect - FAILED local authentication"/>
</imdata>
'''
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
def test_valid_aci_login(self):
self.maxDiff = None
imdata = [{
'aaaLogin': {
'attributes': {
'token': 'ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem', # NOQA
'siteFingerprint': 'NdxD72K/uXaUK0wn',
'refreshTimeoutSeconds': '600',
'maximumLifetimeSeconds': '86400',
'guiIdleTimeoutSeconds': '1200',
'restTimeoutSeconds': '90',
'creationTime': '1500134817',
'firstLoginTime': '1500134817',
'userName': 'admin',
'remoteUser': 'false',
'unixUserId': '15374',
'sessionId': 'o7hObsqNTfCmDGcZI5c4ng==',
'lastName': '',
'firstName': '',
'version': '2.0(2f)',
'buildTime': 'Sat Aug 20 23:07:07 PDT 2016',
'node': 'topology/pod-1/node-1',
},
'children': [{
'aaaUserDomain': {
'attributes': {
'name': 'all',
'rolesR': 'admin',
'rolesW': 'admin',
},
'children': [{
'aaaReadRoles': {
'attributes': {},
},
}, {
'aaaWriteRoles': {
'attributes': {},
'children': [{
'role': {
'attributes': {
'name': 'admin',
},
},
}],
},
}],
},
}, {
'DnDomainMapEntry': {
'attributes': {
'dn': 'uni/tn-common',
'readPrivileges': 'admin',
'writePrivileges': 'admin',
},
},
}, {
'DnDomainMapEntry': {
'attributes': {
'dn': 'uni/tn-infra',
'readPrivileges': 'admin',
'writePrivileges': 'admin',
},
},
}, {
'DnDomainMapEntry': {
'attributes': {
'dn': 'uni/tn-mgmt',
'readPrivileges': 'admin',
'writePrivileges': 'admin',
},
},
}],
},
}]
totalCount = 1
json_response = '{"totalCount":"1","imdata":[{"aaaLogin":{"attributes":{"token":"ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem","siteFingerprint":"NdxD72K/uXaUK0wn","refreshTimeoutSeconds":"600","maximumLifetimeSeconds":"86400","guiIdleTimeoutSeconds":"1200","restTimeoutSeconds":"90","creationTime":"1500134817","firstLoginTime":"1500134817","userName":"admin","remoteUser":"false","unixUserId":"15374","sessionId":"o7hObsqNTfCmDGcZI5c4ng==","lastName":"","firstName":"","version":"2.0(2f)","buildTime":"Sat Aug 20 23:07:07 PDT 2016","node":"topology/pod-1/node-1"},"children":[{"aaaUserDomain":{"attributes":{"name":"all","rolesR":"admin","rolesW":"admin"},"children":[{"aaaReadRoles":{"attributes":{}}},{"aaaWriteRoles":{"attributes":{},"children":[{"role":{"attributes":{"name":"admin"}}}]}}]}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-common","readPrivileges":"admin","writePrivileges":"admin"}}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-infra","readPrivileges":"admin","writePrivileges":"admin"}}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-mgmt","readPrivileges":"admin","writePrivileges":"admin"}}}]}}]}' # NOQA
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
xml_response = '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">\n<aaaLogin token="ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem" siteFingerprint="NdxD72K/uXaUK0wn" refreshTimeoutSeconds="600" maximumLifetimeSeconds="86400" guiIdleTimeoutSeconds="1200" restTimeoutSeconds="90" creationTime="1500134817" firstLoginTime="1500134817" userName="admin" remoteUser="false" unixUserId="15374" sessionId="o7hObsqNTfCmDGcZI5c4ng==" lastName="" firstName="" version="2.0(2f)" buildTime="Sat Aug 20 23:07:07 PDT 2016" node="topology/pod-1/node-1">\n<aaaUserDomain name="all" rolesR="admin" rolesW="admin">\n<aaaReadRoles/>\n<aaaWriteRoles>\n<role name="admin"/>\n</aaaWriteRoles>\n</aaaUserDomain>\n<DnDomainMapEntry dn="uni/tn-common" readPrivileges="admin" writePrivileges="admin"/>\n<DnDomainMapEntry dn="uni/tn-infra" readPrivileges="admin" writePrivileges="admin"/>\n<DnDomainMapEntry dn="uni/tn-mgmt" readPrivileges="admin" writePrivileges="admin"/>\n</aaaLogin></imdata>\n''' # NOQA
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
def test_invalid_input(self):
self.maxDiff = None
error = dict(
code='401',
text='Username or password is incorrect - FAILED local authentication',
)
imdata = [{
'error': {
'attributes': {
'code': '401',
'text': 'Username or password is incorrect - FAILED local authentication',
},
},
}]
totalCount = 1
json_response = '{"totalCount":"1","imdata":[{"error":{"attributes":{"code":"401","text":"Username or password is incorrect - FAILED local authentication"}}}]}' # NOQA
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
xml_response = '''<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">
<error code="401" text="Username or password is incorrect - FAILED local authentication"/>
</imdata>
'''
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
def test_empty_response(self):
self.maxDiffi = None
if PY2:
error_text = "Unable to parse output as JSON, see 'raw' output. No JSON object could be decoded"
else:
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting value: line 1 column 1 (char 0)"
error = dict(
code=-1,
text=error_text,
)
raw = ''
json_response = ''
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
elif etree.LXML_VERSION < (3, 3, 0, 0):
error_text = "Unable to parse output as XML, see 'raw' output. None",
elif etree.LXML_VERSION < (4, 0, 0, 0):
error_text = to_native(u"Unable to parse output as XML, see 'raw' output. None (line 0)", errors='surrogate_or_strict')
elif PY2:
error_text = "Unable to parse output as XML, see 'raw' output. Document is empty, line 1, column 1 (line 1)"
else:
error_text = None
xml_response = ''
aci.response_xml(xml_response)
if error_text is None:
# errors vary on Python 3.8+ for unknown reasons
# accept any of the following error messages
errors = (
"Unable to parse output as XML, see 'raw' output. None (line 0)",
"Unable to parse output as XML, see 'raw' output. Document is empty, line 1, column 1 (<string>, line 1)",
)
for error in errors:
if error in aci.error['text']:
error_text = error
break
error = dict(
code=-1,
text=error_text,
)
raw = ''
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
def test_invalid_response(self):
self.maxDiff = None
if sys.version_info < (2, 7):
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting object: line 1 column 8 (char 8)"
elif PY2:
error_text = "Unable to parse output as JSON, see 'raw' output. No JSON object could be decoded"
else:
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting value: line 1 column 9 (char 8)"
error = dict(
code=-1,
text=error_text,
)
raw = '{ "aaa":'
json_response = '{ "aaa":'
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
elif etree.LXML_VERSION < (3, 3, 0, 0):
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 5" # NOQA
elif PY2:
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 6 (line 1)" # NOQA
else:
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 6 (<string>, line 1)" # NOQA
error = dict(
code=-1,
text=error_text,
)
raw = '<aaa '
xml_response = '<aaa '
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)

View file

@ -1,699 +0,0 @@
'''
Created on Aug 16, 2016
@author: grastogi
'''
import unittest
from ansible_collections.community.general.plugins.module_utils.network.avi.ansible_utils import \
cleanup_absent_fields, avi_obj_cmp
class TestAviApiUtils(unittest.TestCase):
def test_avi_obj_cmp(self):
obj = {'name': 'testpool'}
existing_obj = {
'lb_algorithm': 'LB_ALGORITHM_LEAST_CONNECTIONS',
'use_service_port': False,
'server_auto_scale': False,
'host_check_enabled': False,
'enabled': True,
'capacity_estimation': False,
'fewest_tasks_feedback_delay': 10,
'_last_modified': '1471377748747040',
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9',
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-0e8ce760-fed2-4650-9397-5b3e4966376e',
'inline_health_monitor': True,
'default_server_port': 80,
'request_queue_depth': 128,
'graceful_disable_timeout': 1,
'server_count': 0,
'sni_enabled': True,
'request_queue_enabled': False,
'name': 'testpool',
'max_concurrent_connections_per_server': 0,
'url': 'https://192.0.2.42/api/pool/pool-20084ee1-872e-4103-98e1-899103e2242a',
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
'uuid': 'pool-20084ee1-872e-4103-98e1-899103e2242a',
'connection_ramp_duration': 10}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def test_avi_obj_cmp_w_refs(self):
obj = {'name': 'testpool',
'health_monitor_refs': ['/api/healthmonitor?name=System-HTTP'],
'enabled': True}
existing_obj = {
'lb_algorithm': 'LB_ALGORITHM_LEAST_CONNECTIONS',
'use_service_port': False,
'server_auto_scale': False,
'host_check_enabled': False,
'enabled': True,
'capacity_estimation': False,
'fewest_tasks_feedback_delay': 10,
'_last_modified': '1471377748747040',
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9',
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-0e8ce760-fed2-4650-9397-5b3e4966376e',
'inline_health_monitor': True,
'default_server_port': 80,
'request_queue_depth': 128,
'graceful_disable_timeout': 1,
'server_count': 0,
'sni_enabled': True,
'request_queue_enabled': False,
'name': 'testpool',
'max_concurrent_connections_per_server': 0,
'url': 'https://192.0.2.42/api/pool/pool-20084ee1-872e-4103-98e1-899103e2242a',
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
'uuid': 'pool-20084ee1-872e-4103-98e1-899103e2242a',
'connection_ramp_duration': 10,
'health_monitor_refs': [
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9#System-HTTP"],
}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
obj = {'name': 'testpool',
'health_monitor_refs': ['/api/healthmonitor?name=System-HTTP'],
'server_count': 1}
diff = avi_obj_cmp(obj, existing_obj)
assert not diff
obj = {'name': 'testpool',
'health_monitor_refs': ['api/healthmonitor?name=System-HTTP'],
'server_count': 0}
diff = avi_obj_cmp(obj, existing_obj)
assert not diff
obj = {'name': 'testpool',
'health_monitor_refs': ['healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9'],
'server_count': 0}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
obj = {'name': 'testpool#asdfasf',
'health_monitor_refs': ['api/healthmonitor?name=System-HTTP'],
'server_count': 0}
diff = avi_obj_cmp(obj, existing_obj)
assert not diff
obj = {'name': 'testpool',
'health_monitor_refs': ['/api/healthmonitor?name=System-HTTP#'],
'server_count': 0}
diff = avi_obj_cmp(obj, existing_obj)
assert not diff
def test_avi_obj_cmp_empty_list(self):
obj = {'name': 'testpool',
'health_monitor_refs': [],
'enabled': True}
existing_obj = {
'lb_algorithm': 'LB_ALGORITHM_LEAST_CONNECTIONS',
'use_service_port': False,
'server_auto_scale': False,
'host_check_enabled': False,
'enabled': True,
'capacity_estimation': False,
'fewest_tasks_feedback_delay': 10,
'_last_modified': '1471377748747040',
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9',
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-0e8ce760-fed2-4650-9397-5b3e4966376e',
'inline_health_monitor': True,
'default_server_port': 80,
'request_queue_depth': 128,
'graceful_disable_timeout': 1,
'server_count': 0,
'sni_enabled': True,
'request_queue_enabled': False,
'name': 'testpool',
'max_concurrent_connections_per_server': 0,
'url': 'https://192.0.2.42/api/pool/pool-20084ee1-872e-4103-98e1-899103e2242a',
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
'uuid': 'pool-20084ee1-872e-4103-98e1-899103e2242a',
'connection_ramp_duration': 10
}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def test_avi_obj_cmp_w_refs_n_name(self):
existing_obj = {
'use_service_port': False,
'server_auto_scale': False,
'host_check_enabled': False,
'enabled': True,
'capacity_estimation': False,
'fewest_tasks_feedback_delay': 10,
'_last_modified': '1471377748747040',
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9',
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-0e8ce760-fed2-4650-9397-5b3e4966376e',
'inline_health_monitor': True,
'default_server_port': 80,
'request_queue_depth': 128,
'graceful_disable_timeout': 1,
'server_count': 0,
'sni_enabled': True,
'request_queue_enabled': False,
'name': 'testpool',
'max_concurrent_connections_per_server': 0,
'url': 'https://192.0.2.42/api/pool/pool-20084ee1-872e-4103-98e1-899103e2242a',
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
'uuid': 'pool-20084ee1-872e-4103-98e1-899103e2242a',
'connection_ramp_duration': 10,
'health_monitor_refs': [
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9#System-HTTP",
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc8",
],
}
obj = {'name': 'testpool',
'health_monitor_refs': ['https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9',
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc8"],
'server_count': 0}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
obj = {'name': 'testpool',
'health_monitor_refs': [
'https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9#System-HTTP',
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc8"],
'server_count': 0}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
obj = {'name': 'testpool',
'health_monitor_refs': [
'https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9#System-HTTP',
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc8#System-HTTP2"],
'server_count': 0,
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9#Default-Cloud',
}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def test_avi_list_update(self):
existing_obj = {
'services': [
{
"enable_ssl": False,
"port_range_end": 80,
"port": 80
},
{
"enable_ssl": False,
"port_range_end": 443,
"port": 443
}
],
"name": "vs-health-test",
"url": "https://192.0.2.42/api/virtualservice/virtualservice-526c55c2-df89-40b9-9de6-e45a472290aa",
}
obj = {
'services': [
{
"enable_ssl": False,
"port_range_end": 80,
"port": 80
}
]
}
diff = avi_obj_cmp(obj, existing_obj)
assert not diff
obj = {
'services': [
{
"enable_ssl": False,
"port_range_end": 80,
"port": 80
},
{
"enable_ssl": False,
"port_range_end": 443,
"port": 80
}
],
"name": "vs-health-test",
"url": "https://192.0.2.42/api/virtualservice/virtualservice-526c55c2-df89-40b9-9de6-e45a472290aa",
}
diff = avi_obj_cmp(obj, existing_obj)
assert not diff
def test_cleanup_abset(self):
obj = {'x': 10,
'y': {'state': 'absent'},
'z': {'a': {'state': 'absent'}},
'l': [{'y1': {'state': 'absent'}}],
'z1': {'a': {'state': 'absent'}, 'b': {}, 'c': 42},
'empty': []}
obj = cleanup_absent_fields(obj)
assert 'y' not in obj
assert 'z' not in obj
assert 'l' not in obj
assert 'z1' in obj
assert 'b' not in obj['z1']
assert 'a' not in obj['z1']
assert 'empty' not in obj
def test_complex_obj(self):
obj = {
'lb_algorithm': 'LB_ALGORITHM_ROUND_ROBIN',
'use_service_port': False, 'server_auto_scale': False,
'host_check_enabled': False,
'tenant_ref': 'https://192.0.2.42/api/tenant/admin#admin',
'capacity_estimation': False,
'servers': [{
'hostname': 'grastogi-server6', 'ratio': 1,
'ip': {'type': 'V4', 'addr': '198.51.100.62'},
'discovered_networks': [{
'subnet': [{
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
},
'mask': 24
}],
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10#PG-964'
}],
'enabled': True, 'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10#PG-964',
'verify_network': False,
'static': False,
'resolve_server_by_dns': False,
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993#grastogi-server6'
}, {
'hostname': 'grastogi-server6',
'ratio': 1,
'ip': {
'type': 'V4',
'addr': '198.51.100.61'
},
'discovered_networks': [{
'subnet': [{
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
},
'mask': 24
}],
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10#PG-964'
}],
'enabled': True,
'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10#PG-964',
'verify_network': False,
'static': False,
'resolve_server_by_dns': False,
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993#grastogi-server6'
}, {
'hostname': 'grastogi-server6',
'ratio': 1,
'ip': {
'type': 'V4',
'addr': '198.51.100.65'
},
'discovered_networks': [{
'subnet': [{
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
}, 'mask': 24
}],
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10#PG-964'
}],
'enabled': True,
'verify_network': False,
'static': False,
'resolve_server_by_dns': False
}],
'fewest_tasks_feedback_delay': 10,
'_last_modified': '1473292763246107',
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-e0696a58-8b72-4026-923c-9a87c38a2489#Default-Cloud',
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-33dfbcd7-867c-4e3e-acf7-96bf679d5a0d#global',
'inline_health_monitor': True,
'default_server_port': 8000,
'request_queue_depth': 128,
'graceful_disable_timeout': 1,
'sni_enabled': True,
'server_count': 3,
'uuid': 'pool-09201181-747e-41ea-872d-e9a7df71b726',
'request_queue_enabled': False,
'name': 'p1',
'max_concurrent_connections_per_server': 0,
'url': 'https://192.0.2.42/api/pool/pool-09201181-747e-41ea-872d-e9a7df71b726#p1',
'enabled': True,
'connection_ramp_duration': 10}
existing_obj = {
'lb_algorithm': 'LB_ALGORITHM_ROUND_ROBIN',
'use_service_port': False,
'server_auto_scale': False,
'host_check_enabled': False,
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
'capacity_estimation': False,
'servers': [{
'hostname': 'grastogi-server6', 'ratio': 1,
'ip': {
'type': 'V4',
'addr': '198.51.100.62'
},
'discovered_networks': [{
'subnet': [{
'mask': 24,
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
}
}],
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10'
}],
'enabled': True,
'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10',
'verify_network': False,
'static': False,
'resolve_server_by_dns': False,
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993'
}, {
'hostname': 'grastogi-server6',
'ratio': 1,
'ip': {
'type': 'V4',
'addr': '198.51.100.61'
},
'discovered_networks': [{
'subnet': [{
'mask': 24,
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
}
}],
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10'
}],
'enabled': True,
'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10',
'verify_network': False,
'static': False,
'resolve_server_by_dns': False,
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993'
}, {
'hostname': 'grastogi-server6',
'ratio': 1,
'ip': {
'type': 'V4',
'addr': '198.51.100.65'
},
'discovered_networks': [{
'subnet': [{
'mask': 24,
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
}
}],
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10'
}],
'enabled': True,
'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10',
'verify_network': False,
'static': False,
'resolve_server_by_dns': False,
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993'
}],
'fewest_tasks_feedback_delay': 10,
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-e0696a58-8b72-4026-923c-9a87c38a2489',
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-33dfbcd7-867c-4e3e-acf7-96bf679d5a0d',
'inline_health_monitor': True,
'default_server_port': 8000,
'request_queue_depth': 128,
'graceful_disable_timeout': 1,
'sni_enabled': True,
'server_count': 3,
'uuid': 'pool-09201181-747e-41ea-872d-e9a7df71b726',
'request_queue_enabled': False,
'name': 'p1',
'max_concurrent_connections_per_server': 0,
'url': 'https://192.0.2.42/api/pool/pool-09201181-747e-41ea-872d-e9a7df71b726',
'enabled': True,
'connection_ramp_duration': 10
}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def testAWSVs(self):
existing_obj = {
'network_profile_ref': 'https://12.97.16.202/api/networkprofile/networkprofile-9a0a9896-6876-44c8-a3ee-512a968905f2#System-TCP-Proxy',
'port_uuid': 'eni-4144e73c',
'weight': 1,
'availability_zone': 'us-west-2a',
'enabled': True,
'flow_dist': 'LOAD_AWARE',
'subnet_uuid': 'subnet-91f0b6f4',
'delay_fairness': False,
'avi_allocated_vip': True,
'vrf_context_ref': 'https://12.97.16.202/api/vrfcontext/vrfcontext-722b280d-b555-4d82-9b35-af9442c0cb86#global',
'subnet': {
'ip_addr': {
'type': 'V4',
'addr': '198.51.100.0'
},
'mask': 24
},
'cloud_type': 'CLOUD_AWS', 'uuid': 'virtualservice-a5f49b99-22c8-42e6-aa65-3ca5f1e36b9e',
'network_ref': 'https://12.97.16.202/api/network/subnet-91f0b6f4',
'cloud_ref': 'https://12.97.16.202/api/cloud/cloud-49829414-c704-43ca-9dff-05b9e8474dcb#AWS Cloud',
'avi_allocated_fip': False,
'se_group_ref': 'https://12.97.16.202/api/serviceenginegroup/serviceenginegroup-3bef6320-5a2d-4801-85c4-ef4f9841f235#Default-Group',
'scaleout_ecmp': False,
'max_cps_per_client': 0,
'type': 'VS_TYPE_NORMAL',
'analytics_profile_ref': 'https://12.97.16.202/api/analyticsprofile/analyticsprofile-70f8b06f-7b6a-4500-b829-c869bbca2009#System-Analytics-Profile',
'use_bridge_ip_as_vip': False,
'application_profile_ref': 'https://12.97.16.202/api/applicationprofile/applicationprofile-103cbc31-cac5-46ab-8e66-bbbb2c8f551f#System-HTTP',
'auto_allocate_floating_ip': False,
'services': [{
'enable_ssl': False,
'port_range_end': 80,
'port': 80
}],
'active_standby_se_tag': 'ACTIVE_STANDBY_SE_1',
'ip_address': {
'type': 'V4',
'addr': '198.51.100.33'
},
'ign_pool_net_reach': False,
'east_west_placement': False,
'limit_doser': False,
'name': 'wwwawssit.ebiz.verizon.com',
'url': 'https://12.97.16.202/api/virtualservice/virtualservice-a5f49b99-22c8-42e6-aa65-3ca5f1e36b9e#wwwawssit.ebiz.verizon.com',
'ssl_sess_cache_avg_size': 1024,
'enable_autogw': True,
'auto_allocate_ip': True,
'tenant_ref': 'https://12.97.16.202/api/tenant/tenant-f52f7a3e-6876-4bb9-b8f7-3cab636dadf2#Sales',
'remove_listening_port_on_vs_down': False
}
obj = {'auto_allocate_ip': True, 'subnet_uuid': 'subnet-91f0b6f4', 'cloud_ref': '/api/cloud?name=AWS Cloud', 'services': [{'port': 80}],
'name': 'wwwawssit.ebiz.verizon.com'}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def testhttppolicy(self):
existing_obj = {
"http_request_policy": {
"rules": [{
"enable": True,
"index": 0,
"match": {
"path": {
"match_case": "INSENSITIVE",
"match_criteria": "CONTAINS",
"match_str": ["xvz", "rst"]
}
},
"name": "blah",
"switching_action": {
"action": "HTTP_SWITCHING_SELECT_POOL",
"pool_ref": "https://12.97.16.202/api/pool/pool-d7f6f5e7-bd26-49ad-aeed-965719eb140b#abc",
"status_code": "HTTP_LOCAL_RESPONSE_STATUS_CODE_200"
}
}]
},
"is_internal_policy": False,
"name": "blah",
"tenant_ref": "https://12.97.16.202/api/tenant/tenant-f52f7a3e-6876-4bb9-b8f7-3cab636dadf2#Sales",
"url": "https://12.97.16.202/api/httppolicyset/httppolicyset-ffd8354b-671b-48d5-92cc-69a9057aad0c#blah",
"uuid": "httppolicyset-ffd8354b-671b-48d5-92cc-69a9057aad0c"
}
obj = {
"http_request_policy": {
"rules": [{
"enable": True,
"index": "0",
"match": {
"path": {
"match_case": "INSENSITIVE",
"match_criteria": "CONTAINS",
"match_str": ["xvz", "rst"]
}
},
"name": "blah",
"switching_action": {
"action": "HTTP_SWITCHING_SELECT_POOL",
"pool_ref": "/api/pool?name=abc",
"status_code": "HTTP_LOCAL_RESPONSE_STATUS_CODE_200"
}
}]
},
"is_internal_policy": False,
"tenant": "Sales"
}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def testCleanupFields(self):
obj = {'name': 'testpool',
'scalar_field': {'state': 'absent'},
'list_fields': [{'x': '1'}, {'y': {'state': 'absent'}}]}
cleanup_absent_fields(obj)
assert 'scalar_field' not in obj
for elem in obj['list_fields']:
assert 'y' not in elem
def testGSLB(self):
obj = {
'domain_names': ['cloud5.avi.com', 'cloud6.avi.com'],
'health_monitor_scope': 'GSLB_SERVICE_HEALTH_MONITOR_ALL_MEMBERS',
'groups': [{
'priority': 20,
'members': [{
'ip': {
'type': 'V4',
'addr': '198.51.100.1'
},
'enabled': True, 'ratio': 1
}, {
'ip': {
'type': 'V4',
'addr': '198.51.100.10'
},
'enabled': True,
'ratio': 1
}],
'algorithm': 'GSLB_ALGORITHM_CONSISTENT_HASH',
'name': 'sc'
}, {
'priority': 14,
'members': [{
'ip': {
'type': 'V4',
'addr': '198.51.100.2'
},
'enabled': True,
'ratio': 1
}],
'algorithm': 'GSLB_ALGORITHM_ROUND_ROBIN',
'name': 'cn'
}, {
'priority': 15,
'members': [{
'ip': {
'type': 'V4',
'addr': '198.51.100.3'
},
'enabled': True, 'ratio': 1
}],
'algorithm': 'GSLB_ALGORITHM_ROUND_ROBIN',
'name': 'in'
}],
'name': 'gs-3',
'num_dns_ip': 2
}
existing_obj = {
u'controller_health_status_enabled': True,
u'uuid': u'gslbservice-ab9b36bd-3e95-4c2e-80f8-92905c2eccb2',
u'wildcard_match': False,
u'url': u'https://192.0.2.42/api/gslbservice/gslbservice-ab9b36bd-3e95-4c2e-80f8-92905c2eccb2#gs-3',
u'tenant_ref': u'https://192.0.2.42/api/tenant/admin#admin',
u'enabled': True,
u'domain_names': [u'cloud5.avi.com', u'cloud6.avi.com'],
u'use_edns_client_subnet': True,
u'groups': [{
u'priority': 20,
u'members': [{
u'ip': {
u'type': u'V4',
u'addr': u'198.51.100.1'
},
u'ratio': 1,
u'enabled': True
}, {
u'ip': {
u'type': u'V4',
u'addr': u'198.51.100.10'
},
u'ratio': 1,
u'enabled': True
}],
u'name': u'sc',
u'algorithm': u'GSLB_ALGORITHM_CONSISTENT_HASH'
}, {
u'priority': 14,
u'members': [{
u'ip': {
u'type': u'V4',
u'addr': u'198.51.100.2'
},
u'ratio': 1,
u'enabled': True
}],
u'name': u'cn',
u'algorithm': u'GSLB_ALGORITHM_ROUND_ROBIN'
}, {
u'priority': 15,
u'members': [{
u'ip': {
u'type': u'V4',
u'addr': u'198.51.100.3'
},
u'ratio': 1,
u'enabled': True
}],
u'name': u'in',
u'algorithm': u'GSLB_ALGORITHM_ROUND_ROBIN'
}],
u'num_dns_ip': 2,
u'health_monitor_scope': u'GSLB_SERVICE_HEALTH_MONITOR_ALL_MEMBERS',
u'name': u'gs-3'
}
diff = avi_obj_cmp(obj, existing_obj)
assert diff
def testNoneParams(self):
objwnone = {
'name': 'testpool',
'scalar_field': None,
'list_fields': {
'y': None,
'z': 'zz'
}
}
obj = {
'name': 'testpool',
'list_fields': {
'z': 'zz'
}
}
result = avi_obj_cmp(objwnone, obj)
assert result

View file

@ -1,46 +0,0 @@
# -*- coding: utf-8 -*-
#
# (c) 2017 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.parsing import Conditional
test_results = ['result_1', 'result_2', 'result_3']
c1 = Conditional('result[1] == result_2')
c2 = Conditional('result[2] not == result_2')
c3 = Conditional('result[0] neq not result_1')
class TestNotKeyword(unittest.TestCase):
def test_negate_instance_variable_assignment(self):
assert c1.negate is False and c2.negate is True
def test_key_value_instance_variable_assignment(self):
c1_assignments = c1.key == 'result[1]' and c1.value == 'result_2'
c2_assignments = c2.key == 'result[2]' and c2.value == 'result_2'
assert c1_assignments and c2_assignments
def test_conditionals_w_not_keyword(self):
assert c1(test_results) and c2(test_results) and c3(test_results)
if __name__ == '__main__':
unittest.main()

View file

@ -1,213 +0,0 @@
# -*- coding: utf-8 -*-
#
# (c) 2017 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import to_list, sort_list
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import dict_diff, dict_merge
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import conditional, Template
from ansible.module_utils.common.network import (
to_masklen, to_netmask, to_subnet, to_ipv6_network, to_ipv6_subnet, is_masklen, is_netmask
)
def test_to_list():
for scalar in ('string', 1, True, False, None):
assert isinstance(to_list(scalar), list)
for container in ([1, 2, 3], {'one': 1}):
assert isinstance(to_list(container), list)
test_list = [1, 2, 3]
assert id(test_list) != id(to_list(test_list))
def test_sort():
data = [3, 1, 2]
assert [1, 2, 3] == sort_list(data)
string_data = '123'
assert string_data == sort_list(string_data)
def test_dict_diff():
base = dict(obj2=dict(), b1=True, b2=False, b3=False,
one=1, two=2, three=3, obj1=dict(key1=1, key2=2),
l1=[1, 3], l2=[1, 2, 3], l4=[4],
nested=dict(n1=dict(n2=2)))
other = dict(b1=True, b2=False, b3=True, b4=True,
one=1, three=4, four=4, obj1=dict(key1=2),
l1=[2, 1], l2=[3, 2, 1], l3=[1],
nested=dict(n1=dict(n2=2, n3=3)))
result = dict_diff(base, other)
# string assertions
assert 'one' not in result
assert 'two' not in result
assert result['three'] == 4
assert result['four'] == 4
# dict assertions
assert 'obj1' in result
assert 'key1' in result['obj1']
assert 'key2' not in result['obj1']
# list assertions
assert result['l1'] == [2, 1]
assert 'l2' not in result
assert result['l3'] == [1]
assert 'l4' not in result
# nested assertions
assert 'obj1' in result
assert result['obj1']['key1'] == 2
assert 'key2' not in result['obj1']
# bool assertions
assert 'b1' not in result
assert 'b2' not in result
assert result['b3']
assert result['b4']
def test_dict_merge():
base = dict(obj2=dict(), b1=True, b2=False, b3=False,
one=1, two=2, three=3, obj1=dict(key1=1, key2=2),
l1=[1, 3], l2=[1, 2, 3], l4=[4],
nested=dict(n1=dict(n2=2)))
other = dict(b1=True, b2=False, b3=True, b4=True,
one=1, three=4, four=4, obj1=dict(key1=2),
l1=[2, 1], l2=[3, 2, 1], l3=[1],
nested=dict(n1=dict(n2=2, n3=3)))
result = dict_merge(base, other)
# string assertions
assert 'one' in result
assert 'two' in result
assert result['three'] == 4
assert result['four'] == 4
# dict assertions
assert 'obj1' in result
assert 'key1' in result['obj1']
assert 'key2' in result['obj1']
# list assertions
assert result['l1'] == [1, 2, 3]
assert 'l2' in result
assert result['l3'] == [1]
assert 'l4' in result
# nested assertions
assert 'obj1' in result
assert result['obj1']['key1'] == 2
assert 'key2' in result['obj1']
# bool assertions
assert 'b1' in result
assert 'b2' in result
assert result['b3']
assert result['b4']
def test_conditional():
assert conditional(10, 10)
assert conditional('10', '10')
assert conditional('foo', 'foo')
assert conditional(True, True)
assert conditional(False, False)
assert conditional(None, None)
assert conditional("ge(1)", 1)
assert conditional("gt(1)", 2)
assert conditional("le(2)", 2)
assert conditional("lt(3)", 2)
assert conditional("eq(1)", 1)
assert conditional("neq(0)", 1)
assert conditional("min(1)", 1)
assert conditional("max(1)", 1)
assert conditional("exactly(1)", 1)
def test_template():
tmpl = Template()
assert 'foo' == tmpl('{{ test }}', {'test': 'foo'})
def test_to_masklen():
assert 24 == to_masklen('255.255.255.0')
def test_to_masklen_invalid():
with pytest.raises(ValueError):
to_masklen('255')
def test_to_netmask():
assert '255.0.0.0' == to_netmask(8)
assert '255.0.0.0' == to_netmask('8')
def test_to_netmask_invalid():
with pytest.raises(ValueError):
to_netmask(128)
def test_to_subnet():
result = to_subnet('192.168.1.1', 24)
assert '192.168.1.0/24' == result
result = to_subnet('192.168.1.1', 24, dotted_notation=True)
assert '192.168.1.0 255.255.255.0' == result
def test_to_subnet_invalid():
with pytest.raises(ValueError):
to_subnet('foo', 'bar')
def test_is_masklen():
assert is_masklen(32)
assert not is_masklen(33)
assert not is_masklen('foo')
def test_is_netmask():
assert is_netmask('255.255.255.255')
assert not is_netmask(24)
assert not is_netmask('foo')
def test_to_ipv6_network():
assert '2001:db8::' == to_ipv6_network('2001:db8::')
assert '2001:0db8:85a3::' == to_ipv6_network('2001:0db8:85a3:0000:0000:8a2e:0370:7334')
assert '2001:0db8:85a3::' == to_ipv6_network('2001:0db8:85a3:0:0:8a2e:0370:7334')
def test_to_ipv6_subnet():
assert '2001:db8::' == to_ipv6_subnet('2001:db8::')
assert '2001:0db8:85a3:4242::' == to_ipv6_subnet('2001:0db8:85a3:4242:0000:8a2e:0370:7334')
assert '2001:0db8:85a3:4242::' == to_ipv6_subnet('2001:0db8:85a3:4242:0:8a2e:0370:7334')

View file

@ -1,446 +0,0 @@
# Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from ansible_collections.community.general.plugins.module_utils.network.ftd.common import equal_objects, delete_ref_duplicates, construct_ansible_facts
# simple objects
def test_equal_objects_return_false_with_different_length():
assert not equal_objects(
{'foo': 1},
{'foo': 1, 'bar': 2}
)
def test_equal_objects_return_false_with_different_fields():
assert not equal_objects(
{'foo': 1},
{'bar': 1}
)
def test_equal_objects_return_false_with_different_value_types():
assert not equal_objects(
{'foo': 1},
{'foo': '1'}
)
def test_equal_objects_return_false_with_different_values():
assert not equal_objects(
{'foo': 1},
{'foo': 2}
)
def test_equal_objects_return_false_with_different_nested_values():
assert not equal_objects(
{'foo': {'bar': 1}},
{'foo': {'bar': 2}}
)
def test_equal_objects_return_false_with_different_list_length():
assert not equal_objects(
{'foo': []},
{'foo': ['bar']}
)
def test_equal_objects_return_true_with_equal_objects():
assert equal_objects(
{'foo': 1, 'bar': 2},
{'bar': 2, 'foo': 1}
)
def test_equal_objects_return_true_with_equal_str_like_values():
assert equal_objects(
{'foo': b'bar'},
{'foo': u'bar'}
)
def test_equal_objects_return_true_with_equal_nested_dicts():
assert equal_objects(
{'foo': {'bar': 1, 'buz': 2}},
{'foo': {'buz': 2, 'bar': 1}}
)
def test_equal_objects_return_true_with_equal_lists():
assert equal_objects(
{'foo': ['bar']},
{'foo': ['bar']}
)
def test_equal_objects_return_true_with_ignored_fields():
assert equal_objects(
{'foo': 1, 'version': '123', 'id': '123123'},
{'foo': 1}
)
# objects with object references
def test_equal_objects_return_true_with_different_ref_ids():
assert not equal_objects(
{'foo': {'id': '1', 'type': 'network', 'ignored_field': 'foo'}},
{'foo': {'id': '2', 'type': 'network', 'ignored_field': 'bar'}}
)
def test_equal_objects_return_true_with_different_ref_types():
assert not equal_objects(
{'foo': {'id': '1', 'type': 'network', 'ignored_field': 'foo'}},
{'foo': {'id': '1', 'type': 'accessRule', 'ignored_field': 'bar'}}
)
def test_equal_objects_return_true_with_same_object_refs():
assert equal_objects(
{'foo': {'id': '1', 'type': 'network', 'ignored_field': 'foo'}},
{'foo': {'id': '1', 'type': 'network', 'ignored_field': 'bar'}}
)
# objects with array of object references
def test_equal_objects_return_false_with_different_array_length():
assert not equal_objects(
{'foo': [
{'id': '1', 'type': 'network', 'ignored_field': 'foo'}
]},
{'foo': []}
)
def test_equal_objects_return_false_with_different_array_order():
assert not equal_objects(
{'foo': [
{'id': '1', 'type': 'network', 'ignored_field': 'foo'},
{'id': '2', 'type': 'network', 'ignored_field': 'bar'}
]},
{'foo': [
{'id': '2', 'type': 'network', 'ignored_field': 'foo'},
{'id': '1', 'type': 'network', 'ignored_field': 'bar'}
]}
)
def test_equal_objects_return_true_with_equal_ref_arrays():
assert equal_objects(
{'foo': [
{'id': '1', 'type': 'network', 'ignored_field': 'foo'}
]},
{'foo': [
{'id': '1', 'type': 'network', 'ignored_field': 'bar'}
]}
)
# objects with nested structures and object references
def test_equal_objects_return_true_with_equal_nested_object_references():
assert equal_objects(
{
'name': 'foo',
'config': {
'version': '1',
'port': {
'name': 'oldPortName',
'type': 'port',
'id': '123'
}
}
},
{
'name': 'foo',
'config': {
'version': '1',
'port': {
'name': 'newPortName',
'type': 'port',
'id': '123'
}
}
}
)
def test_equal_objects_return_false_with_different_nested_object_references():
assert not equal_objects(
{
'name': 'foo',
'config': {
'version': '1',
'port': {
'name': 'oldPortName',
'type': 'port',
'id': '123'
}
}
},
{
'name': 'foo',
'config': {
'version': '1',
'port': {
'name': 'oldPortName',
'type': 'port',
'id': '234'
}
}
}
)
def test_equal_objects_return_true_with_equal_nested_list_of_object_references():
assert equal_objects(
{
'name': 'foo',
'config': {
'version': '1',
'ports': [{
'name': 'oldPortName',
'type': 'port',
'id': '123'
}, {
'name': 'oldPortName2',
'type': 'port',
'id': '234'
}]
}
},
{
'name': 'foo',
'config': {
'version': '1',
'ports': [{
'name': 'newPortName',
'type': 'port',
'id': '123'
}, {
'name': 'newPortName2',
'type': 'port',
'id': '234',
'extraField': 'foo'
}]
}
}
)
def test_equal_objects_return_true_with_reference_list_containing_duplicates():
assert equal_objects(
{
'name': 'foo',
'config': {
'version': '1',
'ports': [{
'name': 'oldPortName',
'type': 'port',
'id': '123'
}, {
'name': 'oldPortName',
'type': 'port',
'id': '123'
}, {
'name': 'oldPortName2',
'type': 'port',
'id': '234'
}]
}
},
{
'name': 'foo',
'config': {
'version': '1',
'ports': [{
'name': 'newPortName',
'type': 'port',
'id': '123'
}, {
'name': 'newPortName2',
'type': 'port',
'id': '234',
'extraField': 'foo'
}]
}
}
)
def test_delete_ref_duplicates_with_none():
assert delete_ref_duplicates(None) is None
def test_delete_ref_duplicates_with_empty_dict():
assert {} == delete_ref_duplicates({})
def test_delete_ref_duplicates_with_simple_object():
data = {
'id': '123',
'name': 'foo',
'type': 'bar',
'values': ['a', 'b']
}
assert data == delete_ref_duplicates(data)
def test_delete_ref_duplicates_with_object_containing_refs():
data = {
'id': '123',
'name': 'foo',
'type': 'bar',
'refs': [
{'id': '123', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'foo'}
]
}
assert data == delete_ref_duplicates(data)
def test_delete_ref_duplicates_with_object_containing_duplicate_refs():
data = {
'id': '123',
'name': 'foo',
'type': 'bar',
'refs': [
{'id': '123', 'type': 'baz'},
{'id': '123', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'foo'}
]
}
assert {
'id': '123',
'name': 'foo',
'type': 'bar',
'refs': [
{'id': '123', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'foo'}
]
} == delete_ref_duplicates(data)
def test_delete_ref_duplicates_with_object_containing_duplicate_refs_in_nested_object():
data = {
'id': '123',
'name': 'foo',
'type': 'bar',
'children': {
'refs': [
{'id': '123', 'type': 'baz'},
{'id': '123', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'foo'}
]
}
}
assert {
'id': '123',
'name': 'foo',
'type': 'bar',
'children': {
'refs': [
{'id': '123', 'type': 'baz'},
{'id': '234', 'type': 'baz'},
{'id': '234', 'type': 'foo'}
]
}
} == delete_ref_duplicates(data)
def test_construct_ansible_facts_should_make_default_fact_with_name_and_type():
response = {
'id': '123',
'name': 'foo',
'type': 'bar'
}
assert {'bar_foo': response} == construct_ansible_facts(response, {})
def test_construct_ansible_facts_should_not_make_default_fact_with_no_name():
response = {
'id': '123',
'name': 'foo'
}
assert {} == construct_ansible_facts(response, {})
def test_construct_ansible_facts_should_not_make_default_fact_with_no_type():
response = {
'id': '123',
'type': 'bar'
}
assert {} == construct_ansible_facts(response, {})
def test_construct_ansible_facts_should_use_register_as_when_given():
response = {
'id': '123',
'name': 'foo',
'type': 'bar'
}
params = {'register_as': 'fact_name'}
assert {'fact_name': response} == construct_ansible_facts(response, params)
def test_construct_ansible_facts_should_extract_items():
response = {'items': [
{
'id': '123',
'name': 'foo',
'type': 'bar'
}, {
'id': '123',
'name': 'foo',
'type': 'bar'
}
]}
params = {'register_as': 'fact_name'}
assert {'fact_name': response['items']} == construct_ansible_facts(response, params)
def test_construct_ansible_facts_should_ignore_items_with_no_register_as():
response = {'items': [
{
'id': '123',
'name': 'foo',
'type': 'bar'
}, {
'id': '123',
'name': 'foo',
'type': 'bar'
}
]}
assert {} == construct_ansible_facts(response, {})

View file

@ -1,588 +0,0 @@
# Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import json
import unittest
import pytest
from ansible_collections.community.general.tests.unit.compat import mock
from ansible_collections.community.general.tests.unit.compat.mock import call, patch
from ansible_collections.community.general.plugins.module_utils.network.ftd.common import HTTPMethod, FtdUnexpectedResponse
from ansible_collections.community.general.plugins.module_utils.network.ftd.configuration import iterate_over_pageable_resource, BaseConfigurationResource, \
OperationChecker, OperationNamePrefix, ParamName, QueryParams
from ansible_collections.community.general.plugins.module_utils.network.ftd.fdm_swagger_client import ValidationError, OperationField
class TestBaseConfigurationResource(object):
@pytest.fixture
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible_collections.community.general.plugins.modules.network.ftd.ftd_configuration.Connection')
connection_instance = connection_class_mock.return_value
connection_instance.validate_data.return_value = True, None
connection_instance.validate_query_params.return_value = True, None
connection_instance.validate_path_params.return_value = True, None
return connection_instance
@patch.object(BaseConfigurationResource, '_fetch_system_info')
@patch.object(BaseConfigurationResource, '_send_request')
def test_get_objects_by_filter_with_multiple_filters(self, send_request_mock, fetch_system_info_mock,
connection_mock):
objects = [
{'name': 'obj1', 'type': 1, 'foo': {'bar': 'buzz'}},
{'name': 'obj2', 'type': 1, 'foo': {'bar': 'buz'}},
{'name': 'obj3', 'type': 2, 'foo': {'bar': 'buzz'}}
]
fetch_system_info_mock.return_value = {
'databaseInfo': {
'buildVersion': '6.3.0'
}
}
connection_mock.get_operation_spec.return_value = {
'method': HTTPMethod.GET,
'url': '/object/'
}
resource = BaseConfigurationResource(connection_mock, False)
send_request_mock.side_effect = [{'items': objects}, {'items': []}]
# resource.get_objects_by_filter returns generator so to be able compare generated list with expected list
# we need evaluate it.
assert objects == list(resource.get_objects_by_filter('test', {}))
send_request_mock.assert_has_calls(
[
mock.call('/object/', 'get', {}, {}, {'limit': 10, 'offset': 0})
]
)
send_request_mock.reset_mock()
send_request_mock.side_effect = [{'items': objects}, {'items': []}]
# resource.get_objects_by_filter returns generator so to be able compare generated list with expected list
# we need evaluate it.
assert [objects[0]] == list(resource.get_objects_by_filter('test', {ParamName.FILTERS: {'name': 'obj1'}}))
send_request_mock.assert_has_calls(
[
mock.call('/object/', 'get', {}, {}, {QueryParams.FILTER: 'name:obj1', 'limit': 10, 'offset': 0})
]
)
send_request_mock.reset_mock()
send_request_mock.side_effect = [{'items': objects}, {'items': []}]
# resource.get_objects_by_filter returns generator so to be able compare generated list with expected list
# we need evaluate it.
assert [objects[1]] == list(resource.get_objects_by_filter(
'test',
{ParamName.FILTERS: {'name': 'obj2', 'type': 1, 'foo': {'bar': 'buz'}}}))
send_request_mock.assert_has_calls(
[
mock.call('/object/', 'get', {}, {}, {QueryParams.FILTER: 'name:obj2', 'limit': 10, 'offset': 0})
]
)
@patch.object(BaseConfigurationResource, '_fetch_system_info')
@patch.object(BaseConfigurationResource, '_send_request')
def test_get_objects_by_filter_with_multiple_responses(self, send_request_mock, fetch_system_info_mock,
connection_mock):
send_request_mock.side_effect = [
{'items': [
{'name': 'obj1', 'type': 'foo'},
{'name': 'obj2', 'type': 'bar'}
]},
{'items': [
{'name': 'obj3', 'type': 'foo'}
]},
{'items': []}
]
fetch_system_info_mock.return_value = {
'databaseInfo': {
'buildVersion': '6.3.0'
}
}
connection_mock.get_operation_spec.return_value = {
'method': HTTPMethod.GET,
'url': '/object/'
}
resource = BaseConfigurationResource(connection_mock, False)
assert [{'name': 'obj1', 'type': 'foo'}] == list(resource.get_objects_by_filter(
'test',
{ParamName.FILTERS: {'type': 'foo'}}))
send_request_mock.assert_has_calls(
[
mock.call('/object/', 'get', {}, {}, {'limit': 10, 'offset': 0})
]
)
send_request_mock.reset_mock()
send_request_mock.side_effect = [
{'items': [
{'name': 'obj1', 'type': 'foo'},
{'name': 'obj2', 'type': 'bar'}
]},
{'items': [
{'name': 'obj3', 'type': 'foo'}
]},
{'items': []}
]
resp = list(resource.get_objects_by_filter(
'test',
{
ParamName.FILTERS: {'type': 'foo'},
ParamName.QUERY_PARAMS: {'limit': 2}
}))
assert [{'name': 'obj1', 'type': 'foo'}, {'name': 'obj3', 'type': 'foo'}] == resp
send_request_mock.assert_has_calls(
[
mock.call('/object/', 'get', {}, {}, {'limit': 2, 'offset': 0}),
mock.call('/object/', 'get', {}, {}, {'limit': 2, 'offset': 2})
]
)
def test_module_should_fail_if_validation_error_in_data(self, connection_mock):
connection_mock.get_operation_spec.return_value = {'method': HTTPMethod.POST, 'url': '/test'}
report = {
'required': ['objects[0].type'],
'invalid_type': [
{
'path': 'objects[3].id',
'expected_type': 'string',
'actually_value': 1
}
]
}
connection_mock.validate_data.return_value = (False, json.dumps(report, sort_keys=True, indent=4))
with pytest.raises(ValidationError) as e_info:
resource = BaseConfigurationResource(connection_mock, False)
resource.crud_operation('addTest', {'data': {}})
result = e_info.value.args[0]
key = 'Invalid data provided'
assert result[key]
result[key] = json.loads(result[key])
assert result == {key: {
'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
'required': ['objects[0].type']
}}
def test_module_should_fail_if_validation_error_in_query_params(self, connection_mock):
connection_mock.get_operation_spec.return_value = {'method': HTTPMethod.GET, 'url': '/test',
'returnMultipleItems': False}
report = {
'required': ['objects[0].type'],
'invalid_type': [
{
'path': 'objects[3].id',
'expected_type': 'string',
'actually_value': 1
}
]
}
connection_mock.validate_query_params.return_value = (False, json.dumps(report, sort_keys=True, indent=4))
with pytest.raises(ValidationError) as e_info:
resource = BaseConfigurationResource(connection_mock, False)
resource.crud_operation('getTestList', {'data': {}})
result = e_info.value.args[0]
key = 'Invalid query_params provided'
assert result[key]
result[key] = json.loads(result[key])
assert result == {key: {
'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
'required': ['objects[0].type']}}
def test_module_should_fail_if_validation_error_in_path_params(self, connection_mock):
connection_mock.get_operation_spec.return_value = {'method': HTTPMethod.GET, 'url': '/test',
'returnMultipleItems': False}
report = {
'path_params': {
'required': ['objects[0].type'],
'invalid_type': [
{
'path': 'objects[3].id',
'expected_type': 'string',
'actually_value': 1
}
]
}
}
connection_mock.validate_path_params.return_value = (False, json.dumps(report, sort_keys=True, indent=4))
with pytest.raises(ValidationError) as e_info:
resource = BaseConfigurationResource(connection_mock, False)
resource.crud_operation('putTest', {'data': {}})
result = e_info.value.args[0]
key = 'Invalid path_params provided'
assert result[key]
result[key] = json.loads(result[key])
assert result == {key: {
'path_params': {
'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
'required': ['objects[0].type']}}}
def test_module_should_fail_if_validation_error_in_all_params(self, connection_mock):
connection_mock.get_operation_spec.return_value = {'method': HTTPMethod.POST, 'url': '/test'}
report = {
'data': {
'required': ['objects[0].type'],
'invalid_type': [
{
'path': 'objects[3].id',
'expected_type': 'string',
'actually_value': 1
}
]
},
'path_params': {
'required': ['some_param'],
'invalid_type': [
{
'path': 'name',
'expected_type': 'string',
'actually_value': True
}
]
},
'query_params': {
'required': ['other_param'],
'invalid_type': [
{
'path': 'f_integer',
'expected_type': 'integer',
'actually_value': "test"
}
]
}
}
connection_mock.validate_data.return_value = (False, json.dumps(report['data'], sort_keys=True, indent=4))
connection_mock.validate_query_params.return_value = (False,
json.dumps(report['query_params'], sort_keys=True,
indent=4))
connection_mock.validate_path_params.return_value = (False,
json.dumps(report['path_params'], sort_keys=True,
indent=4))
with pytest.raises(ValidationError) as e_info:
resource = BaseConfigurationResource(connection_mock, False)
resource.crud_operation('putTest', {'data': {}})
result = e_info.value.args[0]
key_data = 'Invalid data provided'
assert result[key_data]
result[key_data] = json.loads(result[key_data])
key_path_params = 'Invalid path_params provided'
assert result[key_path_params]
result[key_path_params] = json.loads(result[key_path_params])
key_query_params = 'Invalid query_params provided'
assert result[key_query_params]
result[key_query_params] = json.loads(result[key_query_params])
assert result == {
key_data: {'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
'required': ['objects[0].type']},
key_path_params: {'invalid_type': [{'actually_value': True, 'expected_type': 'string', 'path': 'name'}],
'required': ['some_param']},
key_query_params: {
'invalid_type': [{'actually_value': 'test', 'expected_type': 'integer', 'path': 'f_integer'}],
'required': ['other_param']}}
@pytest.mark.parametrize("test_api_version, expected_result",
[
("6.2.3", "name:object_name"),
("6.3.0", "name:object_name"),
("6.4.0", "fts~object_name")
]
)
def test_stringify_name_filter(self, test_api_version, expected_result, connection_mock):
filters = {"name": "object_name"}
with patch.object(BaseConfigurationResource, '_fetch_system_info') as fetch_system_info_mock:
fetch_system_info_mock.return_value = {
'databaseInfo': {
'buildVersion': test_api_version
}
}
resource = BaseConfigurationResource(connection_mock, False)
assert resource._stringify_name_filter(filters) == expected_result, "Unexpected result for version %s" % (
test_api_version)
class TestIterateOverPageableResource(object):
def test_iterate_over_pageable_resource_with_no_items(self):
resource_func = mock.Mock(return_value={'items': []})
items = iterate_over_pageable_resource(resource_func, {'query_params': {}})
assert [] == list(items)
def test_iterate_over_pageable_resource_with_one_page(self):
resource_func = mock.Mock(side_effect=[
{'items': ['foo', 'bar']},
{'items': []},
])
items = iterate_over_pageable_resource(resource_func, {'query_params': {}})
assert ['foo', 'bar'] == list(items)
resource_func.assert_has_calls([
call(params={'query_params': {'offset': 0, 'limit': 10}})
])
def test_iterate_over_pageable_resource_with_multiple_pages(self):
objects = [
{'items': ['foo']},
{'items': ['bar']},
{'items': ['buzz']},
{'items': []},
]
resource_func = mock.Mock(side_effect=objects)
items = iterate_over_pageable_resource(resource_func, {'query_params': {}})
assert ['foo'] == list(items)
resource_func.reset_mock()
resource_func = mock.Mock(side_effect=objects)
items = iterate_over_pageable_resource(resource_func, {'query_params': {'limit': 1}})
assert ['foo', 'bar', 'buzz'] == list(items)
def test_iterate_over_pageable_resource_should_preserve_query_params(self):
resource_func = mock.Mock(return_value={'items': []})
items = iterate_over_pageable_resource(resource_func, {'query_params': {'filter': 'name:123'}})
assert [] == list(items)
resource_func.assert_called_once_with(params={'query_params': {'filter': 'name:123', 'offset': 0, 'limit': 10}})
def test_iterate_over_pageable_resource_should_preserve_limit(self):
resource_func = mock.Mock(side_effect=[
{'items': ['foo']},
{'items': []},
])
items = iterate_over_pageable_resource(resource_func, {'query_params': {'limit': 1}})
assert ['foo'] == list(items)
resource_func.assert_has_calls([
call(params={'query_params': {'offset': 0, 'limit': 1}})
])
def test_iterate_over_pageable_resource_should_preserve_offset(self):
resource_func = mock.Mock(side_effect=[
{'items': ['foo']},
{'items': []},
])
items = iterate_over_pageable_resource(resource_func, {'query_params': {'offset': 3}})
assert ['foo'] == list(items)
resource_func.assert_has_calls([
call(params={'query_params': {'offset': 3, 'limit': 10}}),
])
def test_iterate_over_pageable_resource_should_pass_with_string_offset_and_limit(self):
resource_func = mock.Mock(side_effect=[
{'items': ['foo']},
{'items': []},
])
items = iterate_over_pageable_resource(resource_func, {'query_params': {'offset': '1', 'limit': '1'}})
assert ['foo'] == list(items)
resource_func.assert_has_calls([
call(params={'query_params': {'offset': '1', 'limit': '1'}}),
call(params={'query_params': {'offset': 2, 'limit': '1'}})
])
def test_iterate_over_pageable_resource_raises_exception_when_server_returned_more_items_than_requested(self):
resource_func = mock.Mock(side_effect=[
{'items': ['foo', 'redundant_bar']},
{'items': []},
])
with pytest.raises(FtdUnexpectedResponse):
list(iterate_over_pageable_resource(resource_func, {'query_params': {'offset': '1', 'limit': '1'}}))
resource_func.assert_has_calls([
call(params={'query_params': {'offset': '1', 'limit': '1'}})
])
class TestOperationCheckerClass(unittest.TestCase):
def setUp(self):
self._checker = OperationChecker
def test_is_add_operation_positive(self):
operation_name = OperationNamePrefix.ADD + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.POST}
assert self._checker.is_add_operation(operation_name, operation_spec)
def test_is_add_operation_wrong_method_in_spec(self):
operation_name = OperationNamePrefix.ADD + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.GET}
assert not self._checker.is_add_operation(operation_name, operation_spec)
def test_is_add_operation_negative_wrong_operation_name(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.POST}
assert not self._checker.is_add_operation(operation_name, operation_spec)
def test_is_edit_operation_positive(self):
operation_name = OperationNamePrefix.EDIT + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.PUT}
assert self._checker.is_edit_operation(operation_name, operation_spec)
def test_is_edit_operation_wrong_method_in_spec(self):
operation_name = OperationNamePrefix.EDIT + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.GET}
assert not self._checker.is_edit_operation(operation_name, operation_spec)
def test_is_edit_operation_negative_wrong_operation_name(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.PUT}
assert not self._checker.is_edit_operation(operation_name, operation_spec)
def test_is_delete_operation_positive(self):
operation_name = OperationNamePrefix.DELETE + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.DELETE}
self.assertTrue(
self._checker.is_delete_operation(operation_name, operation_spec)
)
def test_is_delete_operation_wrong_method_in_spec(self):
operation_name = OperationNamePrefix.DELETE + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.GET}
assert not self._checker.is_delete_operation(operation_name, operation_spec)
def test_is_delete_operation_negative_wrong_operation_name(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {OperationField.METHOD: HTTPMethod.DELETE}
assert not self._checker.is_delete_operation(operation_name, operation_spec)
def test_is_get_list_operation_positive(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.GET,
OperationField.RETURN_MULTIPLE_ITEMS: True
}
assert self._checker.is_get_list_operation(operation_name, operation_spec)
def test_is_get_list_operation_wrong_method_in_spec(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.POST,
OperationField.RETURN_MULTIPLE_ITEMS: True
}
assert not self._checker.is_get_list_operation(operation_name, operation_spec)
def test_is_get_list_operation_does_not_return_list(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.GET,
OperationField.RETURN_MULTIPLE_ITEMS: False
}
assert not self._checker.is_get_list_operation(operation_name, operation_spec)
def test_is_get_operation_positive(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.GET,
OperationField.RETURN_MULTIPLE_ITEMS: False
}
self.assertTrue(
self._checker.is_get_operation(operation_name, operation_spec)
)
def test_is_get_operation_wrong_method_in_spec(self):
operation_name = OperationNamePrefix.ADD + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.POST,
OperationField.RETURN_MULTIPLE_ITEMS: False
}
assert not self._checker.is_get_operation(operation_name, operation_spec)
def test_is_get_operation_negative_when_returns_multiple(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.GET,
OperationField.RETURN_MULTIPLE_ITEMS: True
}
assert not self._checker.is_get_operation(operation_name, operation_spec)
def test_is_upsert_operation_positive(self):
operation_name = OperationNamePrefix.UPSERT + "Object"
assert self._checker.is_upsert_operation(operation_name)
def test_is_upsert_operation_with_wrong_operation_name(self):
for op_type in [OperationNamePrefix.ADD, OperationNamePrefix.GET, OperationNamePrefix.EDIT,
OperationNamePrefix.DELETE]:
operation_name = op_type + "Object"
assert not self._checker.is_upsert_operation(operation_name)
def test_is_find_by_filter_operation(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.GET,
OperationField.RETURN_MULTIPLE_ITEMS: True
}
params = {ParamName.FILTERS: 1}
self.assertTrue(
self._checker.is_find_by_filter_operation(
operation_name, params, operation_spec
)
)
def test_is_find_by_filter_operation_negative_when_filters_empty(self):
operation_name = OperationNamePrefix.GET + "Object"
operation_spec = {
OperationField.METHOD: HTTPMethod.GET,
OperationField.RETURN_MULTIPLE_ITEMS: True
}
params = {ParamName.FILTERS: None}
assert not self._checker.is_find_by_filter_operation(
operation_name, params, operation_spec
)
params = {}
assert not self._checker.is_find_by_filter_operation(
operation_name, params, operation_spec
)
def test_is_upsert_operation_supported_operation(self):
get_list_op_spec = {OperationField.METHOD: HTTPMethod.GET, OperationField.RETURN_MULTIPLE_ITEMS: True}
add_op_spec = {OperationField.METHOD: HTTPMethod.POST}
edit_op_spec = {OperationField.METHOD: HTTPMethod.PUT}
assert self._checker.is_upsert_operation_supported({'getList': get_list_op_spec, 'edit': edit_op_spec})
assert self._checker.is_upsert_operation_supported(
{'add': add_op_spec, 'getList': get_list_op_spec, 'edit': edit_op_spec})
assert not self._checker.is_upsert_operation_supported({'getList': get_list_op_spec})
assert not self._checker.is_upsert_operation_supported({'edit': edit_op_spec})
assert not self._checker.is_upsert_operation_supported({'getList': get_list_op_spec, 'add': add_op_spec})

File diff suppressed because one or more lines are too long

View file

@ -1,145 +0,0 @@
# Copyright (c) 2019 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import pytest
pytest.importorskip("kick")
from ansible_collections.community.general.plugins.module_utils.network.ftd.device import FtdPlatformFactory, FtdModel, FtdAsa5500xPlatform, \
Ftd2100Platform, AbstractFtdPlatform
from ansible_collections.community.general.tests.unit.plugins.modules.network.ftd.test_ftd_install import DEFAULT_MODULE_PARAMS
class TestFtdModel(object):
def test_has_value_should_return_true_for_existing_models(self):
assert FtdModel.FTD_2120 in FtdModel.supported_models()
assert FtdModel.FTD_ASA5516_X in FtdModel.supported_models()
def test_has_value_should_return_false_for_non_existing_models(self):
assert 'nonExistingModel' not in FtdModel.supported_models()
assert None not in FtdModel.supported_models()
class TestFtdPlatformFactory(object):
@pytest.fixture(autouse=True)
def mock_devices(self, mocker):
mocker.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.device.Kp')
mocker.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.device.Ftd5500x')
def test_factory_should_return_corresponding_platform(self):
ftd_platform = FtdPlatformFactory.create(FtdModel.FTD_ASA5508_X, dict(DEFAULT_MODULE_PARAMS))
assert type(ftd_platform) is FtdAsa5500xPlatform
ftd_platform = FtdPlatformFactory.create(FtdModel.FTD_2130, dict(DEFAULT_MODULE_PARAMS))
assert type(ftd_platform) is Ftd2100Platform
def test_factory_should_raise_error_with_not_supported_model(self):
with pytest.raises(ValueError) as ex:
FtdPlatformFactory.create('nonExistingModel', dict(DEFAULT_MODULE_PARAMS))
assert "FTD model 'nonExistingModel' is not supported by this module." == ex.value.args[0]
class TestAbstractFtdPlatform(object):
def test_install_ftd_image_raise_error_on_abstract_class(self):
with pytest.raises(NotImplementedError):
AbstractFtdPlatform().install_ftd_image(dict(DEFAULT_MODULE_PARAMS))
def test_supports_ftd_model_should_return_true_for_supported_models(self):
assert Ftd2100Platform.supports_ftd_model(FtdModel.FTD_2120)
assert FtdAsa5500xPlatform.supports_ftd_model(FtdModel.FTD_ASA5516_X)
def test_supports_ftd_model_should_return_false_for_non_supported_models(self):
assert not AbstractFtdPlatform.supports_ftd_model(FtdModel.FTD_2120)
assert not Ftd2100Platform.supports_ftd_model(FtdModel.FTD_ASA5508_X)
assert not FtdAsa5500xPlatform.supports_ftd_model(FtdModel.FTD_2120)
def test_parse_rommon_file_location(self):
server, path = AbstractFtdPlatform.parse_rommon_file_location('tftp://1.2.3.4/boot/rommon-boot.foo')
assert '1.2.3.4' == server
assert '/boot/rommon-boot.foo' == path
def test_parse_rommon_file_location_should_fail_for_non_tftp_protocol(self):
with pytest.raises(ValueError) as ex:
AbstractFtdPlatform.parse_rommon_file_location('http://1.2.3.4/boot/rommon-boot.foo')
assert 'The ROMMON image must be downloaded from TFTP server' in str(ex.value)
class TestFtd2100Platform(object):
@pytest.fixture
def kp_mock(self, mocker):
return mocker.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.device.Kp')
@pytest.fixture
def module_params(self):
return dict(DEFAULT_MODULE_PARAMS)
def test_install_ftd_image_should_call_kp_module(self, kp_mock, module_params):
ftd = FtdPlatformFactory.create(FtdModel.FTD_2110, module_params)
ftd.install_ftd_image(module_params)
assert kp_mock.called
assert kp_mock.return_value.ssh_console.called
ftd_line = kp_mock.return_value.ssh_console.return_value
assert ftd_line.baseline_fp2k_ftd.called
assert ftd_line.disconnect.called
def test_install_ftd_image_should_call_disconnect_when_install_fails(self, kp_mock, module_params):
ftd_line = kp_mock.return_value.ssh_console.return_value
ftd_line.baseline_fp2k_ftd.side_effect = Exception('Something went wrong')
ftd = FtdPlatformFactory.create(FtdModel.FTD_2120, module_params)
with pytest.raises(Exception):
ftd.install_ftd_image(module_params)
assert ftd_line.baseline_fp2k_ftd.called
assert ftd_line.disconnect.called
class TestFtdAsa5500xPlatform(object):
@pytest.fixture
def asa5500x_mock(self, mocker):
return mocker.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.device.Ftd5500x')
@pytest.fixture
def module_params(self):
return dict(DEFAULT_MODULE_PARAMS)
def test_install_ftd_image_should_call_kp_module(self, asa5500x_mock, module_params):
ftd = FtdPlatformFactory.create(FtdModel.FTD_ASA5508_X, module_params)
ftd.install_ftd_image(module_params)
assert asa5500x_mock.called
assert asa5500x_mock.return_value.ssh_console.called
ftd_line = asa5500x_mock.return_value.ssh_console.return_value
assert ftd_line.rommon_to_new_image.called
assert ftd_line.disconnect.called
def test_install_ftd_image_should_call_disconnect_when_install_fails(self, asa5500x_mock, module_params):
ftd_line = asa5500x_mock.return_value.ssh_console.return_value
ftd_line.rommon_to_new_image.side_effect = Exception('Something went wrong')
ftd = FtdPlatformFactory.create(FtdModel.FTD_ASA5516_X, module_params)
with pytest.raises(Exception):
ftd.install_ftd_image(module_params)
assert ftd_line.rommon_to_new_image.called
assert ftd_line.disconnect.called

View file

@ -1,379 +0,0 @@
# Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import copy
import os
import unittest
from ansible_collections.community.general.plugins.module_utils.network.ftd.common import HTTPMethod
from ansible_collections.community.general.plugins.module_utils.network.ftd.fdm_swagger_client import FdmSwaggerParser
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
TEST_DATA_FOLDER = os.path.join(DIR_PATH, 'test_data')
base = {
'basePath': "/api/fdm/v2",
'definitions': {"NetworkObject": {"type": "object",
"properties": {"version": {"type": "string"}, "name": {"type": "string"},
"description": {"type": "string"},
"subType": {"type": "object",
"$ref": "#/definitions/NetworkObjectType"},
"value": {"type": "string"},
"isSystemDefined": {"type": "boolean"},
"dnsResolution": {"type": "object",
"$ref": "#/definitions/FQDNDNSResolution"},
"id": {"type": "string"},
"type": {"type": "string", "default": "networkobject"}},
"required": ["subType", "type", "value", "name"]},
"NetworkObjectWrapper": {
"allOf": [{"$ref": "#/definitions/NetworkObject"}, {"$ref": "#/definitions/LinksWrapper"}]}
},
'paths': {
"/object/networks": {
"get": {"tags": ["NetworkObject"],
"operationId": "getNetworkObjectList",
"responses": {
"200": {
"description": "",
"schema": {"type": "object",
"title": "NetworkObjectList",
"properties": {
"items": {
"type": "array",
"items": {"$ref": "#/definitions/NetworkObjectWrapper"}},
"paging": {
"$ref": "#/definitions/Paging"}},
"required": ["items", "paging"]}}},
"parameters": [
{"name": "offset", "in": "query", "required": False, "type": "integer"},
{"name": "limit", "in": "query", "required": False, "type": "integer"},
{"name": "sort", "in": "query", "required": False, "type": "string"},
{"name": "filter", "in": "query", "required": False, "type": "string"}]},
"post": {"tags": ["NetworkObject"], "operationId": "addNetworkObject",
"responses": {
"200": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/NetworkObjectWrapper"}},
"422": {"description": "",
"schema": {"type": "object", "$ref": "#/definitions/ErrorWrapper"}}},
"parameters": [{"in": "body", "name": "body",
"required": True,
"schema": {"$ref": "#/definitions/NetworkObject"}}]}
},
"/object/networks/{objId}": {
"get": {"tags": ["NetworkObject"], "operationId": "getNetworkObject",
"responses": {"200": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/NetworkObjectWrapper"}},
"404": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/ErrorWrapper"}}},
"parameters": [{"name": "objId", "in": "path", "required": True,
"type": "string"}]},
"put": {"tags": ["NetworkObject"], "operationId": "editNetworkObject",
"responses": {"200": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/NetworkObjectWrapper"}},
"422": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/ErrorWrapper"}}},
"parameters": [{"name": "objId", "in": "path", "required": True,
"type": "string"},
{"in": "body", "name": "body", "required": True,
"schema": {"$ref": "#/definitions/NetworkObject"}}]},
"delete": {"tags": ["NetworkObject"], "operationId": "deleteNetworkObject",
"responses": {"204": {"description": ""},
"422": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/ErrorWrapper"}}},
"parameters": [{"name": "objId", "in": "path", "required": True,
"type": "string"}]}}}
}
def _get_objects(base_object, key_names):
return dict((_key, base_object[_key]) for _key in key_names)
class TestFdmSwaggerParser(unittest.TestCase):
def test_simple_object(self):
self._data = copy.deepcopy(base)
self.fdm_data = FdmSwaggerParser().parse_spec(self._data)
expected_operations = {
'getNetworkObjectList': {
'method': HTTPMethod.GET,
'url': '/api/fdm/v2/object/networks',
'modelName': 'NetworkObject',
'parameters': {
'path': {},
'query': {
'offset': {
'required': False,
'type': 'integer'
},
'limit': {
'required': False,
'type': 'integer'
},
'sort': {
'required': False,
'type': 'string'
},
'filter': {
'required': False,
'type': 'string'
}
}
},
'returnMultipleItems': True,
"tags": ["NetworkObject"]
},
'addNetworkObject': {
'method': HTTPMethod.POST,
'url': '/api/fdm/v2/object/networks',
'modelName': 'NetworkObject',
'parameters': {'path': {},
'query': {}},
'returnMultipleItems': False,
"tags": ["NetworkObject"]
},
'getNetworkObject': {
'method': HTTPMethod.GET,
'url': '/api/fdm/v2/object/networks/{objId}',
'modelName': 'NetworkObject',
'parameters': {
'path': {
'objId': {
'required': True,
'type': "string"
}
},
'query': {}
},
'returnMultipleItems': False,
"tags": ["NetworkObject"]
},
'editNetworkObject': {
'method': HTTPMethod.PUT,
'url': '/api/fdm/v2/object/networks/{objId}',
'modelName': 'NetworkObject',
'parameters': {
'path': {
'objId': {
'required': True,
'type': "string"
}
},
'query': {}
},
'returnMultipleItems': False,
"tags": ["NetworkObject"]
},
'deleteNetworkObject': {
'method': HTTPMethod.DELETE,
'url': '/api/fdm/v2/object/networks/{objId}',
'modelName': 'NetworkObject',
'parameters': {
'path': {
'objId': {
'required': True,
'type': "string"
}
},
'query': {}
},
'returnMultipleItems': False,
"tags": ["NetworkObject"]
}
}
assert sorted(['NetworkObject', 'NetworkObjectWrapper']) == sorted(self.fdm_data['models'].keys())
assert expected_operations == self.fdm_data['operations']
assert {'NetworkObject': expected_operations} == self.fdm_data['model_operations']
def test_simple_object_with_documentation(self):
api_spec = copy.deepcopy(base)
docs = {
'definitions': {
'NetworkObject': {
'description': 'Description for Network Object',
'properties': {'name': 'Description for name field'}
}
},
'paths': {
'/object/networks': {
'get': {
'description': 'Description for getNetworkObjectList operation',
'parameters': [{'name': 'offset', 'description': 'Description for offset field'}]
},
'post': {'description': 'Description for addNetworkObject operation'}
}
}
}
self.fdm_data = FdmSwaggerParser().parse_spec(api_spec, docs)
assert 'Description for Network Object' == self.fdm_data['models']['NetworkObject']['description']
assert '' == self.fdm_data['models']['NetworkObjectWrapper']['description']
network_properties = self.fdm_data['models']['NetworkObject']['properties']
assert '' == network_properties['id']['description']
assert not network_properties['id']['required']
assert 'Description for name field' == network_properties['name']['description']
assert network_properties['name']['required']
ops = self.fdm_data['operations']
assert 'Description for getNetworkObjectList operation' == ops['getNetworkObjectList']['description']
assert 'Description for addNetworkObject operation' == ops['addNetworkObject']['description']
assert '' == ops['deleteNetworkObject']['description']
get_op_params = ops['getNetworkObjectList']['parameters']
assert 'Description for offset field' == get_op_params['query']['offset']['description']
assert '' == get_op_params['query']['limit']['description']
def test_model_operations_should_contain_all_operations(self):
data = {
'basePath': '/v2/',
'definitions': {
'Model1': {"type": "object"},
'Model2': {"type": "object"},
'Model3': {"type": "object"}
},
'paths': {
'path1': {
'get': {
'operationId': 'getSomeModelList',
"responses": {
"200": {"description": "",
"schema": {"type": "object",
"title": "NetworkObjectList",
"properties": {
"items": {
"type": "array",
"items": {
"$ref": "#/definitions/Model1"
}
}
}}
}
}
},
"post": {
"operationId": "addSomeModel",
"parameters": [{"in": "body",
"name": "body",
"schema": {"$ref": "#/definitions/Model2"}
}]}
},
'path2/{id}': {
"get": {"operationId": "getSomeModel",
"responses": {"200": {"description": "",
"schema": {"type": "object",
"$ref": "#/definitions/Model3"}},
}
},
"put": {"operationId": "editSomeModel",
"parameters": [{"in": "body",
"name": "body",
"schema": {"$ref": "#/definitions/Model1"}}
]},
"delete": {
"operationId": "deleteModel3",
}},
'path3': {
"delete": {
"operationId": "deleteNoneModel",
}
}
}
}
expected_operations = {
'getSomeModelList': {
'method': HTTPMethod.GET,
'url': '/v2/path1',
'modelName': 'Model1',
'returnMultipleItems': True,
'tags': []
},
'addSomeModel': {
'method': HTTPMethod.POST,
'url': '/v2/path1',
'modelName': 'Model2',
'parameters': {
'path': {},
'query': {}
},
'returnMultipleItems': False,
'tags': []
},
'getSomeModel': {
'method': HTTPMethod.GET,
'url': '/v2/path2/{id}',
'modelName': 'Model3',
'returnMultipleItems': False,
'tags': []
},
'editSomeModel': {
'method': HTTPMethod.PUT,
'url': '/v2/path2/{id}',
'modelName': 'Model1',
'parameters': {
'path': {},
'query': {}
},
'returnMultipleItems': False,
'tags': []
},
'deleteModel3': {
'method': HTTPMethod.DELETE,
'url': '/v2/path2/{id}',
'modelName': 'Model3',
'returnMultipleItems': False,
'tags': []
},
'deleteNoneModel': {
'method': HTTPMethod.DELETE,
'url': '/v2/path3',
'modelName': None,
'returnMultipleItems': False,
'tags': []
}
}
fdm_data = FdmSwaggerParser().parse_spec(data)
assert sorted(['Model1', 'Model2', 'Model3']) == sorted(fdm_data['models'].keys())
assert expected_operations == fdm_data['operations']
assert {
'Model1': {
'getSomeModelList': expected_operations['getSomeModelList'],
'editSomeModel': expected_operations['editSomeModel'],
},
'Model2': {
'addSomeModel': expected_operations['addSomeModel']
},
'Model3': {
'getSomeModel': expected_operations['getSomeModel'],
'deleteModel3': expected_operations['deleteModel3']
},
None: {
'deleteNoneModel': expected_operations['deleteNoneModel']
}
} == fdm_data['model_operations']

View file

@ -1,75 +0,0 @@
import json
import os
import unittest
from ansible_collections.community.general.plugins.module_utils.network.ftd.fdm_swagger_client import FdmSwaggerValidator, FdmSwaggerParser
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
TEST_DATA_FOLDER = os.path.join(DIR_PATH, 'test_data')
class TestFdmSwagger(unittest.TestCase):
def setUp(self):
self.init_mock_data()
def init_mock_data(self):
with open(os.path.join(TEST_DATA_FOLDER, 'ngfw_with_ex.json'), 'rb') as f:
self.base_data = json.loads(f.read().decode('utf-8'))
def test_with_all_data(self):
fdm_data = FdmSwaggerParser().parse_spec(self.base_data)
validator = FdmSwaggerValidator(fdm_data)
models = fdm_data['models']
operations = fdm_data['operations']
invalid = set({})
for operation in operations:
model_name = operations[operation]['modelName']
method = operations[operation]['method']
if method != 'get' and model_name in models:
if 'example' in models[model_name]:
example = models[model_name]['example']
try:
valid, rez = validator.validate_data(operation, example)
assert valid
except Exception:
invalid.add(model_name)
assert invalid == set(['TCPPortObject',
'UDPPortObject',
'ICMPv4PortObject',
'ICMPv6PortObject',
'StandardAccessList',
'ExtendedAccessList',
'ASPathList',
'RouteMap',
'StandardCommunityList',
'ExpandedCommunityList',
'IPV4PrefixList',
'IPV6PrefixList',
'PolicyList',
'SyslogServer',
'HAConfiguration',
'TestIdentitySource'])
def test_parse_all_data(self):
self.fdm_data = FdmSwaggerParser().parse_spec(self.base_data)
operations = self.fdm_data['operations']
without_model_name = []
expected_operations_counter = 0
for key in self.base_data['paths']:
operation = self.base_data['paths'][key]
for dummy in operation:
expected_operations_counter += 1
for key in operations:
operation = operations[key]
if not operation['modelName']:
without_model_name.append(operation['url'])
if operation['modelName'] == '_File' and 'download' not in operation['url']:
self.fail('File type can be defined for download operation only')
assert sorted(['/api/fdm/v2/operational/deploy/{objId}', '/api/fdm/v2/action/upgrade']) == sorted(
without_model_name)
assert sorted(self.fdm_data['model_operations'][None].keys()) == sorted(['deleteDeployment', 'startUpgrade'])
assert expected_operations_counter == len(operations)

View file

@ -1,886 +0,0 @@
# Copyright (c) 2018 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import
import copy
import json
import unittest
import pytest
from ansible_collections.community.general.tests.unit.compat import mock
from ansible_collections.community.general.plugins.module_utils.network.ftd.common import FtdServerError, HTTPMethod, ResponseParams, FtdConfigurationError
from ansible_collections.community.general.plugins.module_utils.network.ftd.configuration import DUPLICATE_NAME_ERROR_MESSAGE, UNPROCESSABLE_ENTITY_STATUS, \
MULTIPLE_DUPLICATES_FOUND_ERROR, BaseConfigurationResource, FtdInvalidOperationNameError, QueryParams, \
ADD_OPERATION_NOT_SUPPORTED_ERROR, ParamName
from ansible_collections.community.general.plugins.module_utils.network.ftd.fdm_swagger_client import ValidationError
ADD_RESPONSE = {'status': 'Object added'}
EDIT_RESPONSE = {'status': 'Object edited'}
DELETE_RESPONSE = {'status': 'Object deleted'}
GET_BY_FILTER_RESPONSE = [{'name': 'foo', 'description': 'bar'}]
ARBITRARY_RESPONSE = {'status': 'Arbitrary request sent'}
class TestUpsertOperationUnitTests(unittest.TestCase):
@mock.patch.object(BaseConfigurationResource, '_fetch_system_info')
def setUp(self, fetch_system_info_mock):
self._conn = mock.MagicMock()
self._resource = BaseConfigurationResource(self._conn)
fetch_system_info_mock.return_value = {
'databaseInfo': {
'buildVersion': '6.3.0'
}
}
def test_get_operation_name(self):
operation_a = mock.MagicMock()
operation_b = mock.MagicMock()
def checker_wrapper(expected_object):
def checker(obj, *args, **kwargs):
return obj == expected_object
return checker
operations = {
operation_a: "spec",
operation_b: "spec"
}
assert operation_a == self._resource._get_operation_name(checker_wrapper(operation_a), operations)
assert operation_b == self._resource._get_operation_name(checker_wrapper(operation_b), operations)
assert self._resource._get_operation_name(checker_wrapper(None), operations) is None
@mock.patch.object(BaseConfigurationResource, "_get_operation_name")
@mock.patch.object(BaseConfigurationResource, "add_object")
def test_add_upserted_object(self, add_object_mock, get_operation_mock):
model_operations = mock.MagicMock()
params = mock.MagicMock()
add_op_name = get_operation_mock.return_value
assert add_object_mock.return_value == self._resource._add_upserted_object(model_operations, params)
get_operation_mock.assert_called_once_with(
self._resource._operation_checker.is_add_operation,
model_operations)
add_object_mock.assert_called_once_with(add_op_name, params)
@mock.patch.object(BaseConfigurationResource, "_get_operation_name")
@mock.patch.object(BaseConfigurationResource, "add_object")
def test_add_upserted_object_with_no_add_operation(self, add_object_mock, get_operation_mock):
model_operations = mock.MagicMock()
get_operation_mock.return_value = None
with pytest.raises(FtdConfigurationError) as exc_info:
self._resource._add_upserted_object(model_operations, mock.MagicMock())
assert ADD_OPERATION_NOT_SUPPORTED_ERROR in str(exc_info.value)
get_operation_mock.assert_called_once_with(self._resource._operation_checker.is_add_operation, model_operations)
add_object_mock.assert_not_called()
@mock.patch.object(BaseConfigurationResource, "_get_operation_name")
@mock.patch.object(BaseConfigurationResource, "edit_object")
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.copy_identity_properties')
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration._set_default')
def test_edit_upserted_object(self, _set_default_mock, copy_properties_mock, edit_object_mock, get_operation_mock):
model_operations = mock.MagicMock()
existing_object = mock.MagicMock()
params = {
'path_params': {},
'data': {}
}
result = self._resource._edit_upserted_object(model_operations, existing_object, params)
assert result == edit_object_mock.return_value
_set_default_mock.assert_has_calls([
mock.call(params, 'path_params', {}),
mock.call(params, 'data', {})
])
get_operation_mock.assert_called_once_with(
self._resource._operation_checker.is_edit_operation,
model_operations
)
copy_properties_mock.assert_called_once_with(
existing_object,
params['data']
)
edit_object_mock.assert_called_once_with(
get_operation_mock.return_value,
params
)
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_successfully_added(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock):
params = mock.MagicMock()
is_upsert_supported_mock.return_value = True
find_object.return_value = None
result = self._resource.upsert_object('upsertFoo', params)
assert result == add_mock.return_value
self._conn.get_model_spec.assert_called_once_with('Foo')
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
get_operation_mock.assert_called_once_with('Foo')
find_object.assert_called_once_with('Foo', params)
add_mock.assert_called_once_with(get_operation_mock.return_value, params)
edit_mock.assert_not_called()
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.equal_objects')
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_successfully_edited(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock, equal_objects_mock):
params = mock.MagicMock()
existing_obj = mock.MagicMock()
is_upsert_supported_mock.return_value = True
find_object.return_value = existing_obj
equal_objects_mock.return_value = False
result = self._resource.upsert_object('upsertFoo', params)
assert result == edit_mock.return_value
self._conn.get_model_spec.assert_called_once_with('Foo')
get_operation_mock.assert_called_once_with('Foo')
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
add_mock.assert_not_called()
equal_objects_mock.assert_called_once_with(existing_obj, params[ParamName.DATA])
edit_mock.assert_called_once_with(get_operation_mock.return_value, existing_obj, params)
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.equal_objects')
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_returned_without_modifications(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock, equal_objects_mock):
params = mock.MagicMock()
existing_obj = mock.MagicMock()
is_upsert_supported_mock.return_value = True
find_object.return_value = existing_obj
equal_objects_mock.return_value = True
result = self._resource.upsert_object('upsertFoo', params)
assert result == existing_obj
self._conn.get_model_spec.assert_called_once_with('Foo')
get_operation_mock.assert_called_once_with('Foo')
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
add_mock.assert_not_called()
equal_objects_mock.assert_called_once_with(existing_obj, params[ParamName.DATA])
edit_mock.assert_not_called()
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_not_supported(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock):
params = mock.MagicMock()
is_upsert_supported_mock.return_value = False
self.assertRaises(
FtdInvalidOperationNameError,
self._resource.upsert_object, 'upsertFoo', params
)
self._conn.get_model_spec.assert_called_once_with('Foo')
get_operation_mock.assert_called_once_with('Foo')
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
find_object.assert_not_called()
add_mock.assert_not_called()
edit_mock.assert_not_called()
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_when_model_not_supported(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock):
params = mock.MagicMock()
self._conn.get_model_spec.return_value = None
self.assertRaises(
FtdInvalidOperationNameError,
self._resource.upsert_object, 'upsertNonExisting', params
)
self._conn.get_model_spec.assert_called_once_with('NonExisting')
get_operation_mock.assert_not_called()
is_upsert_supported_mock.assert_not_called()
find_object.assert_not_called()
add_mock.assert_not_called()
edit_mock.assert_not_called()
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.equal_objects')
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_with_fatal_error_during_edit(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock, equal_objects_mock):
params = mock.MagicMock()
existing_obj = mock.MagicMock()
is_upsert_supported_mock.return_value = True
find_object.return_value = existing_obj
equal_objects_mock.return_value = False
edit_mock.side_effect = FtdConfigurationError("Some object edit error")
self.assertRaises(
FtdConfigurationError,
self._resource.upsert_object, 'upsertFoo', params
)
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
self._conn.get_model_spec.assert_called_once_with('Foo')
get_operation_mock.assert_called_once_with('Foo')
find_object.assert_called_once_with('Foo', params)
add_mock.assert_not_called()
edit_mock.assert_called_once_with(get_operation_mock.return_value, existing_obj, params)
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
def test_upsert_object_with_fatal_error_during_add(self, edit_mock, add_mock, find_object, get_operation_mock,
is_upsert_supported_mock):
params = mock.MagicMock()
is_upsert_supported_mock.return_value = True
find_object.return_value = None
error = FtdConfigurationError("Obj duplication error")
add_mock.side_effect = error
self.assertRaises(
FtdConfigurationError,
self._resource.upsert_object, 'upsertFoo', params
)
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
self._conn.get_model_spec.assert_called_once_with('Foo')
get_operation_mock.assert_called_once_with('Foo')
find_object.assert_called_once_with('Foo', params)
add_mock.assert_called_once_with(get_operation_mock.return_value, params)
edit_mock.assert_not_called()
# functional tests below
class TestUpsertOperationFunctionalTests(object):
@pytest.fixture(autouse=True)
def connection_mock(self, mocker):
connection_class_mock = mocker.patch('ansible_collections.community.general.plugins.modules.network.ftd.ftd_configuration.Connection')
connection_instance = connection_class_mock.return_value
connection_instance.validate_data.return_value = True, None
connection_instance.validate_query_params.return_value = True, None
connection_instance.validate_path_params.return_value = True, None
return connection_instance
def test_module_should_create_object_when_upsert_operation_and_object_does_not_exist(self, connection_mock):
url = '/test'
operations = {
'getObjectList': {
'method': HTTPMethod.GET,
'url': url,
'modelName': 'Object',
'returnMultipleItems': True},
'addObject': {
'method': HTTPMethod.POST,
'modelName': 'Object',
'url': url},
'editObject': {
'method': HTTPMethod.PUT,
'modelName': 'Object',
'url': '/test/{objId}'},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': '/test/{objId}',
'returnMultipleItems': False
}
}
def get_operation_spec(name):
return operations[name]
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
if http_method == HTTPMethod.POST:
assert url_path == url
assert body_params == params['data']
assert query_params == {}
assert path_params == params['path_params']
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: ADD_RESPONSE
}
elif http_method == HTTPMethod.GET:
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {'items': []}
}
else:
assert False
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request = request_handler
params = {
'operation': 'upsertObject',
'data': {'id': '123', 'name': 'testObject', 'type': 'object'},
'path_params': {'objId': '123'},
'register_as': 'test_var'
}
result = self._resource_execute_operation(params, connection=connection_mock)
assert ADD_RESPONSE == result
def test_module_should_fail_when_no_model(self, connection_mock):
connection_mock.get_model_spec.return_value = None
params = {
'operation': 'upsertObject',
'data': {'id': '123', 'name': 'testObject', 'type': 'object'},
'path_params': {'objId': '123'},
'register_as': 'test_var'
}
with pytest.raises(FtdInvalidOperationNameError) as exc_info:
self._resource_execute_operation(params, connection=connection_mock)
assert 'upsertObject' == exc_info.value.operation_name
def test_module_should_fail_when_no_add_operation_and_no_object(self, connection_mock):
url = '/test'
operations = {
'getObjectList': {
'method': HTTPMethod.GET,
'url': url,
'modelName': 'Object',
'returnMultipleItems': True},
'editObject': {
'method': HTTPMethod.PUT,
'modelName': 'Object',
'url': '/test/{objId}'},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': '/test/{objId}',
'returnMultipleItems': False
}}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request.return_value = {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {'items': []}
}
params = {
'operation': 'upsertObject',
'data': {'id': '123', 'name': 'testObject', 'type': 'object'},
'path_params': {'objId': '123'},
'register_as': 'test_var'
}
with pytest.raises(FtdConfigurationError) as exc_info:
self._resource_execute_operation(params, connection=connection_mock)
assert ADD_OPERATION_NOT_SUPPORTED_ERROR in str(exc_info.value)
# test when object exists but with different fields(except id)
def test_module_should_update_object_when_upsert_operation_and_object_exists(self, connection_mock):
url = '/test'
obj_id = '456'
version = 'test_version'
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
new_value = '0000'
old_value = '1111'
params = {
'operation': 'upsertObject',
'data': {'name': 'testObject', 'value': new_value, 'type': 'object'},
'register_as': 'test_var'
}
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
if http_method == HTTPMethod.POST:
assert url_path == url
assert body_params == params['data']
assert query_params == {}
assert path_params == {}
return {
ResponseParams.SUCCESS: False,
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
}
elif http_method == HTTPMethod.GET:
is_get_list_req = url_path == url
is_get_req = url_path == url_with_id_templ
assert is_get_req or is_get_list_req
if is_get_list_req:
assert body_params == {}
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
assert path_params == {}
elif is_get_req:
assert body_params == {}
assert query_params == {}
assert path_params == {'objId': obj_id}
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {
'items': [
{'name': 'testObject', 'value': old_value, 'type': 'object', 'id': obj_id,
'version': version}
]
}
}
elif http_method == HTTPMethod.PUT:
assert url_path == url_with_id_templ
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: body_params
}
else:
assert False
operations = {
'getObjectList': {'method': HTTPMethod.GET, 'url': url, 'modelName': 'Object', 'returnMultipleItems': True},
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': url_with_id_templ,
'returnMultipleItems': False}
}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request = request_handler
expected_val = {'name': 'testObject', 'value': new_value, 'type': 'object', 'id': obj_id, 'version': version}
result = self._resource_execute_operation(params, connection=connection_mock)
assert expected_val == result
# test when object exists and all fields have the same value
def test_module_should_not_update_object_when_upsert_operation_and_object_exists_with_the_same_fields(
self, connection_mock):
url = '/test'
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
params = {
'operation': 'upsertObject',
'data': {'name': 'testObject', 'value': '3333', 'type': 'object'},
'register_as': 'test_var'
}
expected_val = copy.deepcopy(params['data'])
expected_val['version'] = 'test_version'
expected_val['id'] = 'test_id'
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
if http_method == HTTPMethod.POST:
assert url_path == url
assert body_params == params['data']
assert query_params == {}
assert path_params == {}
return {
ResponseParams.SUCCESS: False,
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
}
elif http_method == HTTPMethod.GET:
assert url_path == url
assert body_params == {}
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
assert path_params == {}
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {
'items': [expected_val]
}
}
else:
assert False
operations = {
'getObjectList': {'method': HTTPMethod.GET, 'modelName': 'Object', 'url': url, 'returnMultipleItems': True},
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': url_with_id_templ,
'returnMultipleItems': False}
}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request = request_handler
result = self._resource_execute_operation(params, connection=connection_mock)
assert expected_val == result
def test_module_should_fail_when_upsert_operation_is_not_supported(self, connection_mock):
connection_mock.get_operation_specs_by_model_name.return_value = {
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': '/test'},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': '/test/{objId}'},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': '/test/{objId}',
'returnMultipleItems': False}
}
operation_name = 'upsertObject'
params = {
'operation': operation_name,
'data': {'id': '123', 'name': 'testObject', 'type': 'object'},
'path_params': {'objId': '123'},
'register_as': 'test_var'
}
result = self._resource_execute_operation_with_expected_failure(
expected_exception_class=FtdInvalidOperationNameError,
params=params, connection=connection_mock)
connection_mock.send_request.assert_not_called()
assert operation_name == result.operation_name
# when create operation raised FtdConfigurationError exception without id and version
def test_module_should_fail_when_upsert_operation_and_failed_create_without_id_and_version(self, connection_mock):
url = '/test'
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
params = {
'operation': 'upsertObject',
'data': {'name': 'testObject', 'value': '3333', 'type': 'object'},
'register_as': 'test_var'
}
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
if http_method == HTTPMethod.POST:
assert url_path == url
assert body_params == params['data']
assert query_params == {}
assert path_params == {}
return {
ResponseParams.SUCCESS: False,
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
}
elif http_method == HTTPMethod.GET:
assert url_path == url
assert body_params == {}
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
assert path_params == {}
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {
'items': []
}
}
else:
assert False
operations = {
'getObjectList': {'method': HTTPMethod.GET, 'modelName': 'Object', 'url': url, 'returnMultipleItems': True},
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': url_with_id_templ,
'returnMultipleItems': False}
}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request = request_handler
result = self._resource_execute_operation_with_expected_failure(
expected_exception_class=FtdServerError,
params=params, connection=connection_mock)
assert result.code == 422
assert result.response == 'Validation failed due to a duplicate name'
def test_module_should_fail_when_upsert_operation_and_failed_update_operation(self, connection_mock):
url = '/test'
obj_id = '456'
version = 'test_version'
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
error_code = 404
new_value = '0000'
old_value = '1111'
params = {
'operation': 'upsertObject',
'data': {'name': 'testObject', 'value': new_value, 'type': 'object'},
'register_as': 'test_var'
}
error_msg = 'test error'
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
if http_method == HTTPMethod.POST:
assert url_path == url
assert body_params == params['data']
assert query_params == {}
assert path_params == {}
return {
ResponseParams.SUCCESS: False,
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
}
elif http_method == HTTPMethod.GET:
is_get_list_req = url_path == url
is_get_req = url_path == url_with_id_templ
assert is_get_req or is_get_list_req
if is_get_list_req:
assert body_params == {}
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
elif is_get_req:
assert body_params == {}
assert query_params == {}
assert path_params == {'objId': obj_id}
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {
'items': [
{'name': 'testObject', 'value': old_value, 'type': 'object', 'id': obj_id,
'version': version}
]
}
}
elif http_method == HTTPMethod.PUT:
assert url_path == url_with_id_templ
raise FtdServerError(error_msg, error_code)
else:
assert False
operations = {
'getObjectList': {'method': HTTPMethod.GET, 'modelName': 'Object', 'url': url, 'returnMultipleItems': True},
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': url_with_id_templ,
'returnMultipleItems': False}
}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request = request_handler
result = self._resource_execute_operation_with_expected_failure(
expected_exception_class=FtdServerError,
params=params, connection=connection_mock)
assert result.code == error_code
assert result.response == error_msg
def test_module_should_fail_when_upsert_operation_and_invalid_data_for_create_operation(self, connection_mock):
new_value = '0000'
params = {
'operation': 'upsertObject',
'data': {'name': 'testObject', 'value': new_value, 'type': 'object'},
'register_as': 'test_var'
}
connection_mock.send_request.assert_not_called()
operations = {
'getObjectList': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': 'sd',
'returnMultipleItems': True},
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': 'sdf'},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': 'sadf'},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': 'sdfs',
'returnMultipleItems': False}
}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
report = {
'required': ['objects[0].type'],
'invalid_type': [
{
'path': 'objects[3].id',
'expected_type': 'string',
'actually_value': 1
}
]
}
connection_mock.validate_data.return_value = (False, json.dumps(report, sort_keys=True, indent=4))
key = 'Invalid data provided'
result = self._resource_execute_operation_with_expected_failure(
expected_exception_class=ValidationError,
params=params, connection=connection_mock)
assert len(result.args) == 1
assert key in result.args[0]
assert json.loads(result.args[0][key]) == {
'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
'required': ['objects[0].type']
}
def test_module_should_fail_when_upsert_operation_and_few_objects_found_by_filter(self, connection_mock):
url = '/test'
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
sample_obj = {'name': 'testObject', 'value': '3333', 'type': 'object'}
params = {
'operation': 'upsertObject',
'data': sample_obj,
'register_as': 'test_var'
}
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
if http_method == HTTPMethod.POST:
assert url_path == url
assert body_params == params['data']
assert query_params == {}
assert path_params == {}
return {
ResponseParams.SUCCESS: False,
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
}
elif http_method == HTTPMethod.GET:
assert url_path == url
assert body_params == {}
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
assert path_params == {}
return {
ResponseParams.SUCCESS: True,
ResponseParams.RESPONSE: {
'items': [sample_obj, sample_obj]
}
}
else:
assert False
operations = {
'getObjectList': {'method': HTTPMethod.GET, 'modelName': 'Object', 'url': url, 'returnMultipleItems': True},
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
'otherObjectOperation': {
'method': HTTPMethod.GET,
'modelName': 'Object',
'url': url_with_id_templ,
'returnMultipleItems': False}
}
def get_operation_spec(name):
return operations[name]
connection_mock.get_operation_spec = get_operation_spec
connection_mock.get_operation_specs_by_model_name.return_value = operations
connection_mock.send_request = request_handler
result = self._resource_execute_operation_with_expected_failure(
expected_exception_class=FtdConfigurationError,
params=params, connection=connection_mock)
assert result.msg is MULTIPLE_DUPLICATES_FOUND_ERROR
assert result.obj is None
@staticmethod
def _resource_execute_operation(params, connection):
with mock.patch.object(BaseConfigurationResource, '_fetch_system_info') as fetch_system_info_mock:
fetch_system_info_mock.return_value = {
'databaseInfo': {
'buildVersion': '6.3.0'
}
}
resource = BaseConfigurationResource(connection)
op_name = params['operation']
resp = resource.execute_operation(op_name, params)
return resp
def _resource_execute_operation_with_expected_failure(self, expected_exception_class, params, connection):
with pytest.raises(expected_exception_class) as ex:
self._resource_execute_operation(params, connection)
# 'ex' here is the instance of '_pytest._code.code.ExceptionInfo' but not <expected_exception_class>
# actual instance of <expected_exception_class> is in the value attribute of 'ex'. That's why we should return
# 'ex.value' here, so it can be checked in a test later.
return ex.value

View file

@ -1,176 +0,0 @@
# Copyright (c) 2017 Citrix Systems
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.tests.unit.compat.mock import Mock
from ansible_collections.community.general.plugins.module_utils.network.netscaler.netscaler import (ConfigProxy, get_immutables_intersection,
ensure_feature_is_enabled, log, loglines)
class TestNetscalerConfigProxy(unittest.TestCase):
def test_values_copied_to_actual(self):
actual = Mock()
client = Mock()
values = {
'some_key': 'some_value',
}
ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['some_key']
)
self.assertEqual(actual.some_key, values['some_key'], msg='Failed to pass correct value from values dict')
def test_none_values_not_copied_to_actual(self):
actual = Mock()
client = Mock()
actual.key_for_none = 'initial'
print('actual %s' % actual.key_for_none)
values = {
'key_for_none': None,
}
print('value %s' % actual.key_for_none)
ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['key_for_none']
)
self.assertEqual(actual.key_for_none, 'initial')
def test_missing_from_values_dict_not_copied_to_actual(self):
actual = Mock()
client = Mock()
values = {
'irrelevant_key': 'irrelevant_value',
}
print('value %s' % actual.key_for_none)
ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['key_for_none']
)
print('none %s' % getattr(actual, 'key_for_none'))
self.assertIsInstance(actual.key_for_none, Mock)
def test_bool_yes_no_transform(self):
actual = Mock()
client = Mock()
values = {
'yes_key': True,
'no_key': False,
}
transforms = {
'yes_key': ['bool_yes_no'],
'no_key': ['bool_yes_no']
}
ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['yes_key', 'no_key'],
transforms=transforms,
)
actual_values = [actual.yes_key, actual.no_key]
self.assertListEqual(actual_values, ['YES', 'NO'])
def test_bool_on_off_transform(self):
actual = Mock()
client = Mock()
values = {
'on_key': True,
'off_key': False,
}
transforms = {
'on_key': ['bool_on_off'],
'off_key': ['bool_on_off']
}
ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['on_key', 'off_key'],
transforms=transforms,
)
actual_values = [actual.on_key, actual.off_key]
self.assertListEqual(actual_values, ['ON', 'OFF'])
def test_callable_transform(self):
actual = Mock()
client = Mock()
values = {
'transform_key': 'hello',
'transform_chain': 'hello',
}
transforms = {
'transform_key': [lambda v: v.upper()],
'transform_chain': [lambda v: v.upper(), lambda v: v[:4]]
}
ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['transform_key', 'transform_chain'],
transforms=transforms,
)
actual_values = [actual.transform_key, actual.transform_chain]
self.assertListEqual(actual_values, ['HELLO', 'HELL'])
class TestNetscalerModuleUtils(unittest.TestCase):
def test_immutables_intersection(self):
actual = Mock()
client = Mock()
values = {
'mutable_key': 'some value',
'immutable_key': 'some other value',
}
proxy = ConfigProxy(
actual=actual,
client=client,
attribute_values_dict=values,
readwrite_attrs=['mutable_key', 'immutable_key'],
immutable_attrs=['immutable_key'],
)
keys_to_check = ['mutable_key', 'immutable_key', 'non_existant_key']
result = get_immutables_intersection(proxy, keys_to_check)
self.assertListEqual(result, ['immutable_key'])
def test_ensure_feature_is_enabled(self):
client = Mock()
attrs = {'get_enabled_features.return_value': ['GSLB']}
client.configure_mock(**attrs)
ensure_feature_is_enabled(client, 'GSLB')
ensure_feature_is_enabled(client, 'LB')
client.enable_features.assert_called_once_with('LB')
def test_log_function(self):
messages = [
'First message',
'Second message',
]
log(messages[0])
log(messages[1])
self.assertListEqual(messages, loglines, msg='Log messages not recorded correctly')

View file

@ -1,148 +0,0 @@
#
# (c) 2018 Extreme Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from mock import MagicMock, patch, call
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.plugins.module_utils.network.nos import nos
class TestPluginCLIConfNOS(unittest.TestCase):
""" Test class for NOS CLI Conf Methods
"""
def test_get_connection_established(self):
""" Test get_connection with established connection
"""
module = MagicMock()
connection = nos.get_connection(module)
self.assertEqual(connection, module.nos_connection)
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
def test_get_connection_new(self, connection):
""" Test get_connection with new connection
"""
socket_path = "little red riding hood"
module = MagicMock(spec=[
'fail_json',
])
module._socket_path = socket_path
connection().get_capabilities.return_value = '{"network_api": "cliconf"}'
returned_connection = nos.get_connection(module)
connection.assert_called_with(socket_path)
self.assertEqual(returned_connection, module.nos_connection)
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
def test_get_connection_incorrect_network_api(self, connection):
""" Test get_connection with incorrect network_api response
"""
socket_path = "little red riding hood"
module = MagicMock(spec=[
'fail_json',
])
module._socket_path = socket_path
module.fail_json.side_effect = TypeError
connection().get_capabilities.return_value = '{"network_api": "nope"}'
with self.assertRaises(TypeError):
nos.get_connection(module)
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
def test_get_capabilities(self, connection):
""" Test get_capabilities
"""
socket_path = "little red riding hood"
module = MagicMock(spec=[
'fail_json',
])
module._socket_path = socket_path
module.fail_json.side_effect = TypeError
capabilities = {'network_api': 'cliconf'}
connection().get_capabilities.return_value = json.dumps(capabilities)
capabilities_returned = nos.get_capabilities(module)
self.assertEqual(capabilities, capabilities_returned)
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
def test_run_commands(self, connection):
""" Test get_capabilities
"""
module = MagicMock()
commands = [
'hello',
'dolly',
'well hello',
'dolly',
'its so nice to have you back',
'where you belong',
]
responses = [
'Dolly, never go away again1',
'Dolly, never go away again2',
'Dolly, never go away again3',
'Dolly, never go away again4',
'Dolly, never go away again5',
'Dolly, never go away again6',
]
module.nos_connection.get.side_effect = responses
run_command_responses = nos.run_commands(module, commands)
calls = []
for command in commands:
calls.append(call(
command,
None,
None
))
module.nos_connection.get.assert_has_calls(calls)
self.assertEqual(responses, run_command_responses)
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
def test_load_config(self, connection):
""" Test load_config
"""
module = MagicMock()
commands = [
'what does it take',
'to be',
'number one?',
'two is not a winner',
'and three nobody remember',
]
nos.load_config(module, commands)
module.nos_connection.edit_config.assert_called_once_with(commands)

View file

@ -1,659 +0,0 @@
# Copyright (c) 2017 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
import json
from ansible_collections.community.general.tests.unit.compat.mock import patch
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.plugins.module_utils.network.nso import nso
MODULE_PREFIX_MAP = '''
{
"ansible-nso": "an",
"test": "test",
"tailf-ncs": "ncs"
}
'''
SCHEMA_DATA = {
'/an:id-name-leaf': '''
{
"meta": {
"prefix": "an",
"namespace": "http://github.com/ansible/nso",
"types": {
"http://github.com/ansible/nso:id-name-t": [
{
"name": "http://github.com/ansible/nso:id-name-t",
"enumeration": [
{
"label": "id-one"
},
{
"label": "id-two"
}
]
},
{
"name": "identityref"
}
]
},
"keypath": "/an:id-name-leaf"
},
"data": {
"kind": "leaf",
"type": {
"namespace": "http://github.com/ansible/nso",
"name": "id-name-t"
},
"name": "id-name-leaf",
"qname": "an:id-name-leaf"
}
}''',
'/an:id-name-values': '''
{
"meta": {
"prefix": "an",
"namespace": "http://github.com/ansible/nso",
"types": {},
"keypath": "/an:id-name-values"
},
"data": {
"kind": "container",
"name": "id-name-values",
"qname": "an:id-name-values",
"children": [
{
"kind": "list",
"name": "id-name-value",
"qname": "an:id-name-value",
"key": [
"name"
]
}
]
}
}
''',
'/an:id-name-values/id-name-value': '''
{
"meta": {
"prefix": "an",
"namespace": "http://github.com/ansible/nso",
"types": {
"http://github.com/ansible/nso:id-name-t": [
{
"name": "http://github.com/ansible/nso:id-name-t",
"enumeration": [
{
"label": "id-one"
},
{
"label": "id-two"
}
]
},
{
"name": "identityref"
}
]
},
"keypath": "/an:id-name-values/id-name-value"
},
"data": {
"kind": "list",
"name": "id-name-value",
"qname": "an:id-name-value",
"key": [
"name"
],
"children": [
{
"kind": "key",
"name": "name",
"qname": "an:name",
"type": {
"namespace": "http://github.com/ansible/nso",
"name": "id-name-t"
}
},
{
"kind": "leaf",
"type": {
"primitive": true,
"name": "string"
},
"name": "value",
"qname": "an:value"
}
]
}
}
''',
'/test:test': '''
{
"meta": {
"types": {
"http://example.com/test:t15": [
{
"leaf_type":[
{
"name":"string"
}
],
"list_type":[
{
"name":"http://example.com/test:t15",
"leaf-list":true
}
]
}
]
}
},
"data": {
"kind": "list",
"name":"test",
"qname":"test:test",
"key":["name"],
"children": [
{
"kind": "key",
"name": "name",
"qname": "test:name",
"type": {"name":"string","primitive":true}
},
{
"kind": "choice",
"name": "test-choice",
"qname": "test:test-choice",
"cases": [
{
"kind": "case",
"name": "direct-child-case",
"qname":"test:direct-child-case",
"children":[
{
"kind": "leaf",
"name": "direct-child",
"qname": "test:direct-child",
"type": {"name":"string","primitive":true}
}
]
},
{
"kind":"case","name":"nested-child-case","qname":"test:nested-child-case",
"children": [
{
"kind": "choice",
"name": "nested-choice",
"qname": "test:nested-choice",
"cases": [
{
"kind":"case","name":"nested-child","qname":"test:nested-child",
"children": [
{
"kind": "leaf",
"name":"nested-child",
"qname":"test:nested-child",
"type":{"name":"string","primitive":true}}
]
}
]
}
]
}
]
},
{
"kind":"leaf-list",
"name":"device-list",
"qname":"test:device-list",
"type": {
"namespace":"http://example.com/test",
"name":"t15"
}
}
]
}
}
''',
'/test:test/device-list': '''
{
"meta": {
"types": {
"http://example.com/test:t15": [
{
"leaf_type":[
{
"name":"string"
}
],
"list_type":[
{
"name":"http://example.com/test:t15",
"leaf-list":true
}
]
}
]
}
},
"data": {
"kind":"leaf-list",
"name":"device-list",
"qname":"test:device-list",
"type": {
"namespace":"http://example.com/test",
"name":"t15"
}
}
}
''',
'/test:deps': '''
{
"meta": {
},
"data": {
"kind":"container",
"name":"deps",
"qname":"test:deps",
"children": [
{
"kind": "leaf",
"type": {
"primitive": true,
"name": "string"
},
"name": "a",
"qname": "test:a",
"deps": ["/test:deps/c"]
},
{
"kind": "leaf",
"type": {
"primitive": true,
"name": "string"
},
"name": "b",
"qname": "test:b",
"deps": ["/test:deps/a"]
},
{
"kind": "leaf",
"type": {
"primitive": true,
"name": "string"
},
"name": "c",
"qname": "test:c"
}
]
}
}
'''
}
class MockResponse(object):
def __init__(self, method, params, code, body, headers=None):
if headers is None:
headers = {}
self.method = method
self.params = params
self.code = code
self.body = body
self.headers = dict(headers)
def read(self):
return self.body
def mock_call(calls, url, timeout, validate_certs, data=None, headers=None, method=None):
result = calls[0]
del calls[0]
request = json.loads(data)
if result.method != request['method']:
raise ValueError('expected method {0}({1}), got {2}({3})'.format(
result.method, result.params,
request['method'], request['params']))
for key, value in result.params.items():
if key not in request['params']:
raise ValueError('{0} not in parameters'.format(key))
if value != request['params'][key]:
raise ValueError('expected {0} to be {1}, got {2}'.format(
key, value, request['params'][key]))
return result
def get_schema_response(path):
return MockResponse(
'get_schema', {'path': path}, 200, '{{"result": {0}}}'.format(
SCHEMA_DATA[path]))
class TestJsonRpc(unittest.TestCase):
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_exists(self, open_url_mock):
calls = [
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
MockResponse('exists', {'path': '/exists'}, 200, '{"result": {"exists": true}}'),
MockResponse('exists', {'path': '/not-exists'}, 200, '{"result": {"exists": false}}')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
client = nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False)
self.assertEqual(True, client.exists('/exists'))
self.assertEqual(False, client.exists('/not-exists'))
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_exists_data_not_found(self, open_url_mock):
calls = [
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
MockResponse('exists', {'path': '/list{missing-parent}/list{child}'}, 200, '{"error":{"type":"data.not_found"}}')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
client = nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False)
self.assertEqual(False, client.exists('/list{missing-parent}/list{child}'))
self.assertEqual(0, len(calls))
class TestValueBuilder(unittest.TestCase):
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_identityref_leaf(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/an:id-name-leaf'),
MockResponse('get_module_prefix_map', {}, 200, '{{"result": {0}}}'.format(MODULE_PREFIX_MAP))
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/an:id-name-leaf"
schema_data = json.loads(
SCHEMA_DATA['/an:id-name-leaf'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, None, 'ansible-nso:id-two', schema)
values = list(vb.values)
self.assertEqual(1, len(values))
value = values[0]
self.assertEqual(parent, value.path)
self.assertEqual('set', value.state)
self.assertEqual('an:id-two', value.value)
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_identityref_key(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/an:id-name-values/id-name-value'),
MockResponse('get_module_prefix_map', {}, 200, '{{"result": {0}}}'.format(MODULE_PREFIX_MAP)),
MockResponse('exists', {'path': '/an:id-name-values/id-name-value{an:id-one}'}, 200, '{"result": {"exists": true}}')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/an:id-name-values"
schema_data = json.loads(
SCHEMA_DATA['/an:id-name-values/id-name-value'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, 'id-name-value', [{'name': 'ansible-nso:id-one', 'value': '1'}], schema)
values = list(vb.values)
self.assertEqual(1, len(values))
value = values[0]
self.assertEqual('{0}/id-name-value{{an:id-one}}/value'.format(parent), value.path)
self.assertEqual('set', value.state)
self.assertEqual('1', value.value)
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_nested_choice(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/test:test'),
MockResponse('exists', {'path': '/test:test{direct}'}, 200, '{"result": {"exists": true}}'),
MockResponse('exists', {'path': '/test:test{nested}'}, 200, '{"result": {"exists": true}}')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/test:test"
schema_data = json.loads(
SCHEMA_DATA['/test:test'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, None, [{'name': 'direct', 'direct-child': 'direct-value'},
{'name': 'nested', 'nested-child': 'nested-value'}], schema)
values = list(vb.values)
self.assertEqual(2, len(values))
value = values[0]
self.assertEqual('{0}{{direct}}/direct-child'.format(parent), value.path)
self.assertEqual('set', value.state)
self.assertEqual('direct-value', value.value)
value = values[1]
self.assertEqual('{0}{{nested}}/nested-child'.format(parent), value.path)
self.assertEqual('set', value.state)
self.assertEqual('nested-value', value.value)
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_leaf_list_type(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.4"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/test:test')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/test:test"
schema_data = json.loads(
SCHEMA_DATA['/test:test'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, None, {'device-list': ['one', 'two']}, schema)
values = list(vb.values)
self.assertEqual(1, len(values))
value = values[0]
self.assertEqual('{0}/device-list'.format(parent), value.path)
self.assertEqual(['one', 'two'], value.value)
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_leaf_list_type_45(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/test:test/device-list')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/test:test"
schema_data = json.loads(
SCHEMA_DATA['/test:test'])
schema = schema_data['data']
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, None, {'device-list': ['one', 'two']}, schema)
values = list(vb.values)
self.assertEqual(3, len(values))
value = values[0]
self.assertEqual('{0}/device-list'.format(parent), value.path)
self.assertEqual(nso.State.ABSENT, value.state)
value = values[1]
self.assertEqual('{0}/device-list{{one}}'.format(parent), value.path)
self.assertEqual(nso.State.PRESENT, value.state)
value = values[2]
self.assertEqual('{0}/device-list{{two}}'.format(parent), value.path)
self.assertEqual(nso.State.PRESENT, value.state)
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_sort_by_deps(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/test:deps')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/test:deps"
schema_data = json.loads(
SCHEMA_DATA['/test:deps'])
schema = schema_data['data']
values = {
'a': '1',
'b': '2',
'c': '3',
}
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, None, values, schema)
values = list(vb.values)
self.assertEqual(3, len(values))
value = values[0]
self.assertEqual('{0}/c'.format(parent), value.path)
self.assertEqual('3', value.value)
value = values[1]
self.assertEqual('{0}/a'.format(parent), value.path)
self.assertEqual('1', value.value)
value = values[2]
self.assertEqual('{0}/b'.format(parent), value.path)
self.assertEqual('2', value.value)
self.assertEqual(0, len(calls))
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
def test_sort_by_deps_not_included(self, open_url_mock):
calls = [
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
get_schema_response('/test:deps')
]
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
parent = "/test:deps"
schema_data = json.loads(
SCHEMA_DATA['/test:deps'])
schema = schema_data['data']
values = {
'a': '1',
'b': '2'
}
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
vb.build(parent, None, values, schema)
values = list(vb.values)
self.assertEqual(2, len(values))
value = values[0]
self.assertEqual('{0}/a'.format(parent), value.path)
self.assertEqual('1', value.value)
value = values[1]
self.assertEqual('{0}/b'.format(parent), value.path)
self.assertEqual('2', value.value)
self.assertEqual(0, len(calls))
class TestVerifyVersion(unittest.TestCase):
def test_valid_versions(self):
self.assertTrue(nso.verify_version_str('5.0', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('5.1.1', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('5.1.1.2', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('4.6', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('4.6.2', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('4.6.2.1', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('4.5.1', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('4.5.2', [(4, 6), (4, 5, 1)]))
self.assertTrue(nso.verify_version_str('4.5.1.2', [(4, 6), (4, 5, 1)]))
def test_invalid_versions(self):
self.assertFalse(nso.verify_version_str('4.4', [(4, 6), (4, 5, 1)]))
self.assertFalse(nso.verify_version_str('4.4.1', [(4, 6), (4, 5, 1)]))
self.assertFalse(nso.verify_version_str('4.4.1.2', [(4, 6), (4, 5, 1)]))
self.assertFalse(nso.verify_version_str('4.5.0', [(4, 6), (4, 5, 1)]))
class TestValueSort(unittest.TestCase):
def test_sort_parent_depend(self):
values = [
nso.ValueBuilder.Value('/test/list{entry}', '/test/list', 'CREATE', ['']),
nso.ValueBuilder.Value('/test/list{entry}/description', '/test/list/description', 'TEST', ['']),
nso.ValueBuilder.Value('/test/entry', '/test/entry', 'VALUE', ['/test/list', '/test/list/name'])
]
result = [v.path for v in nso.ValueBuilder.sort_values(values)]
self.assertEqual(['/test/list{entry}', '/test/entry', '/test/list{entry}/description'], result)
def test_sort_break_direct_cycle(self):
values = [
nso.ValueBuilder.Value('/test/a', '/test/a', 'VALUE', ['/test/c']),
nso.ValueBuilder.Value('/test/b', '/test/b', 'VALUE', ['/test/a']),
nso.ValueBuilder.Value('/test/c', '/test/c', 'VALUE', ['/test/a'])
]
result = [v.path for v in nso.ValueBuilder.sort_values(values)]
self.assertEqual(['/test/a', '/test/b', '/test/c'], result)
def test_sort_break_indirect_cycle(self):
values = [
nso.ValueBuilder.Value('/test/c', '/test/c', 'VALUE', ['/test/a']),
nso.ValueBuilder.Value('/test/a', '/test/a', 'VALUE', ['/test/b']),
nso.ValueBuilder.Value('/test/b', '/test/b', 'VALUE', ['/test/c'])
]
result = [v.path for v in nso.ValueBuilder.sort_values(values)]
self.assertEqual(['/test/a', '/test/c', '/test/b'], result)
def test_sort_depend_on_self(self):
values = [
nso.ValueBuilder.Value('/test/a', '/test/a', 'VALUE', ['/test/a']),
nso.ValueBuilder.Value('/test/b', '/test/b', 'VALUE', [])
]
result = [v.path for v in nso.ValueBuilder.sort_values(values)]
self.assertEqual(['/test/a', '/test/b'], result)

View file

@ -1,148 +0,0 @@
#
# (c) 2018 Extreme Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from mock import MagicMock, patch, call
from ansible_collections.community.general.tests.unit.compat import unittest
from ansible_collections.community.general.plugins.module_utils.network.slxos import slxos
class TestPluginCLIConfSLXOS(unittest.TestCase):
""" Test class for SLX-OS CLI Conf Methods
"""
def test_get_connection_established(self):
""" Test get_connection with established connection
"""
module = MagicMock()
connection = slxos.get_connection(module)
self.assertEqual(connection, module.slxos_connection)
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
def test_get_connection_new(self, connection):
""" Test get_connection with new connection
"""
socket_path = "little red riding hood"
module = MagicMock(spec=[
'fail_json',
])
module._socket_path = socket_path
connection().get_capabilities.return_value = '{"network_api": "cliconf"}'
returned_connection = slxos.get_connection(module)
connection.assert_called_with(socket_path)
self.assertEqual(returned_connection, module.slxos_connection)
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
def test_get_connection_incorrect_network_api(self, connection):
""" Test get_connection with incorrect network_api response
"""
socket_path = "little red riding hood"
module = MagicMock(spec=[
'fail_json',
])
module._socket_path = socket_path
module.fail_json.side_effect = TypeError
connection().get_capabilities.return_value = '{"network_api": "nope"}'
with self.assertRaises(TypeError):
slxos.get_connection(module)
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
def test_get_capabilities(self, connection):
""" Test get_capabilities
"""
socket_path = "little red riding hood"
module = MagicMock(spec=[
'fail_json',
])
module._socket_path = socket_path
module.fail_json.side_effect = TypeError
capabilities = {'network_api': 'cliconf'}
connection().get_capabilities.return_value = json.dumps(capabilities)
capabilities_returned = slxos.get_capabilities(module)
self.assertEqual(capabilities, capabilities_returned)
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
def test_run_commands(self, connection):
""" Test get_capabilities
"""
module = MagicMock()
commands = [
'hello',
'dolly',
'well hello',
'dolly',
'its so nice to have you back',
'where you belong',
]
responses = [
'Dolly, never go away again1',
'Dolly, never go away again2',
'Dolly, never go away again3',
'Dolly, never go away again4',
'Dolly, never go away again5',
'Dolly, never go away again6',
]
module.slxos_connection.get.side_effect = responses
run_command_responses = slxos.run_commands(module, commands)
calls = []
for command in commands:
calls.append(call(
command,
None,
None
))
module.slxos_connection.get.assert_has_calls(calls)
self.assertEqual(responses, run_command_responses)
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
def test_load_config(self, connection):
""" Test load_config
"""
module = MagicMock()
commands = [
'what does it take',
'to be',
'number one?',
'two is not a winner',
'and three nobody remember',
]
slxos.load_config(module, commands)
module.slxos_connection.edit_config.assert_called_once_with(commands)