mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-06-27 10:40:22 -07:00
[wip] Remove network content (#84)
* rebase * remove broken symlinks * more deletes * restore cs_* integration tests * More deletes - from Felix * cs_common * Remove some more ignores
This commit is contained in:
parent
8d203225d3
commit
c313c825f4
2215 changed files with 0 additions and 333978 deletions
|
@ -1,54 +0,0 @@
|
|||
Cisco IOS XE Software, Version 16.06.01
|
||||
Cisco IOS Software [Everest], Virtual XE Software (X86_64_LINUX_IOSD-UNIVERSALK9-M), Version 16.6.1, RELEASE SOFTWARE (fc2)
|
||||
Technical Support: http://www.cisco.com/techsupport
|
||||
Copyright (c) 1986-2017 by Cisco Systems, Inc.
|
||||
Compiled Sat 22-Jul-17 05:51 by mcpre
|
||||
|
||||
|
||||
Cisco IOS-XE software, Copyright (c) 2005-2017 by cisco Systems, Inc.
|
||||
All rights reserved. Certain components of Cisco IOS-XE software are
|
||||
licensed under the GNU General Public License ("GPL") Version 2.0. The
|
||||
software code licensed under GPL Version 2.0 is free software that comes
|
||||
with ABSOLUTELY NO WARRANTY. You can redistribute and/or modify such
|
||||
GPL code under the terms of GPL Version 2.0. For more details, see the
|
||||
documentation or "License Notice" file accompanying the IOS-XE software,
|
||||
or the applicable URL provided on the flyer accompanying the IOS-XE
|
||||
software.
|
||||
|
||||
|
||||
ROM: IOS-XE ROMMON
|
||||
|
||||
an-csr-01 uptime is 1 day, 16 hours, 15 minutes
|
||||
Uptime for this control processor is 1 day, 16 hours, 16 minutes
|
||||
System returned to ROM by reload
|
||||
System image file is "bootflash:packages.conf"
|
||||
Last reload reason: Reload Command
|
||||
|
||||
|
||||
|
||||
This product contains cryptographic features and is subject to United
|
||||
States and local country laws governing import, export, transfer and
|
||||
use. Delivery of Cisco cryptographic products does not imply
|
||||
third-party authority to import, export, distribute or use encryption.
|
||||
Importers, exporters, distributors and users are responsible for
|
||||
compliance with U.S. and local country laws. By using this product you
|
||||
agree to comply with applicable laws and regulations. If you are unable
|
||||
to comply with U.S. and local laws, return this product immediately.
|
||||
|
||||
A summary of U.S. laws governing Cisco cryptographic products may be found at:
|
||||
http://www.cisco.com/wwl/export/crypto/tool/stqrg.html
|
||||
|
||||
If you require further assistance please contact us by sending email to
|
||||
export@cisco.com.
|
||||
|
||||
License Level: ax
|
||||
License Type: Default. No valid license found.
|
||||
Next reload license Level: ax
|
||||
|
||||
cisco CSR1000V (VXE) processor (revision VXE) with 1225511K/3075K bytes of memory.
|
||||
Processor board ID 9I5BX4UHSO4
|
||||
3 Gigabit Ethernet interfaces
|
||||
32768K bytes of non-volatile configuration memory.
|
||||
3018776K bytes of physical memory.
|
||||
16162815K bytes of virtual hard disk at bootflash:.
|
||||
0K bytes of WebUI ODM Files at webui:.
|
|
@ -1,30 +0,0 @@
|
|||
|
||||
Chassis Name: BR-VDX6740
|
||||
switchType: 131
|
||||
|
||||
FAN Unit: 1
|
||||
Time Awake: 0 days
|
||||
|
||||
FAN Unit: 2
|
||||
Time Awake: 0 days
|
||||
|
||||
POWER SUPPLY Unit: 1
|
||||
Factory Part Num: 23-1000043-01
|
||||
Factory Serial Num:
|
||||
Time Awake: 0 days
|
||||
|
||||
POWER SUPPLY Unit: 2
|
||||
Factory Part Num: 23-1000043-01
|
||||
Factory Serial Num:
|
||||
Time Awake: 0 days
|
||||
|
||||
CHASSIS/WWN Unit: 1
|
||||
Power Consume Factor: 0
|
||||
Factory Part Num: 40-1000927-06
|
||||
Factory Serial Num: CPL2541K01E
|
||||
Manufacture: Day: 11 Month: 8 Year: 14
|
||||
Update: Day: 18 Month: 7 Year: 2018
|
||||
Time Alive: 1116 days
|
||||
Time Awake: 0 days
|
||||
|
||||
Airflow direction : Port side INTAKE
|
|
@ -1,549 +0,0 @@
|
|||
diag post rbridge-id 104 enable
|
||||
ntp server 10.10.10.1 use-vrf mgmt-vrf
|
||||
logging raslog console INFO
|
||||
logging auditlog class SECURITY
|
||||
logging auditlog class CONFIGURATION
|
||||
logging auditlog class FIRMWARE
|
||||
logging syslog-facility local LOG_LOCAL7
|
||||
logging syslog-client localip CHASSIS_IP
|
||||
switch-attributes 104
|
||||
chassis-name VDX6740
|
||||
host-name LEAF4
|
||||
!
|
||||
no support autoupload enable
|
||||
line vty
|
||||
exec-timeout 10
|
||||
!
|
||||
zoning enabled-configuration cfg-name ""
|
||||
zoning enabled-configuration default-zone-access allaccess
|
||||
zoning enabled-configuration cfg-action cfg-save
|
||||
dpod 104/0/1
|
||||
reserve
|
||||
!
|
||||
dpod 104/0/2
|
||||
!
|
||||
dpod 104/0/3
|
||||
!
|
||||
dpod 104/0/4
|
||||
!
|
||||
dpod 104/0/5
|
||||
!
|
||||
dpod 104/0/6
|
||||
!
|
||||
dpod 104/0/7
|
||||
!
|
||||
dpod 104/0/8
|
||||
!
|
||||
dpod 104/0/9
|
||||
!
|
||||
dpod 104/0/10
|
||||
!
|
||||
dpod 104/0/11
|
||||
!
|
||||
dpod 104/0/12
|
||||
!
|
||||
dpod 104/0/13
|
||||
!
|
||||
dpod 104/0/14
|
||||
!
|
||||
dpod 104/0/15
|
||||
!
|
||||
dpod 104/0/16
|
||||
!
|
||||
dpod 104/0/17
|
||||
!
|
||||
dpod 104/0/18
|
||||
!
|
||||
dpod 104/0/19
|
||||
!
|
||||
dpod 104/0/20
|
||||
!
|
||||
dpod 104/0/21
|
||||
!
|
||||
dpod 104/0/22
|
||||
!
|
||||
dpod 104/0/23
|
||||
!
|
||||
dpod 104/0/24
|
||||
!
|
||||
dpod 104/0/25
|
||||
!
|
||||
dpod 104/0/26
|
||||
!
|
||||
dpod 104/0/27
|
||||
!
|
||||
dpod 104/0/28
|
||||
!
|
||||
dpod 104/0/29
|
||||
!
|
||||
dpod 104/0/30
|
||||
!
|
||||
dpod 104/0/31
|
||||
!
|
||||
dpod 104/0/32
|
||||
!
|
||||
dpod 104/0/33
|
||||
!
|
||||
dpod 104/0/34
|
||||
!
|
||||
dpod 104/0/35
|
||||
!
|
||||
dpod 104/0/36
|
||||
!
|
||||
dpod 104/0/37
|
||||
!
|
||||
dpod 104/0/38
|
||||
!
|
||||
dpod 104/0/39
|
||||
!
|
||||
dpod 104/0/40
|
||||
!
|
||||
dpod 104/0/41
|
||||
!
|
||||
dpod 104/0/42
|
||||
!
|
||||
dpod 104/0/43
|
||||
!
|
||||
dpod 104/0/44
|
||||
!
|
||||
dpod 104/0/45
|
||||
!
|
||||
dpod 104/0/46
|
||||
!
|
||||
dpod 104/0/47
|
||||
!
|
||||
dpod 104/0/48
|
||||
!
|
||||
dpod 104/0/49
|
||||
!
|
||||
dpod 104/0/50
|
||||
!
|
||||
dpod 104/0/51
|
||||
!
|
||||
dpod 104/0/52
|
||||
!
|
||||
role name admin desc Administrator
|
||||
role name user desc User
|
||||
aaa authentication login local
|
||||
aaa accounting exec default start-stop none
|
||||
aaa accounting commands default start-stop none
|
||||
service password-encryption
|
||||
username admin password "BwrsDbB+tABWGWpINOVKoQ==\n" encryption-level 7 role admin desc Administrator
|
||||
username user password "BwrsDbB+tABWGWpINOVKoQ==\n" encryption-level 7 role user desc User
|
||||
ip access-list extended test
|
||||
seq 10 permit ip host 1.1.1.1 any log
|
||||
!
|
||||
snmp-server contact "Field Support."
|
||||
snmp-server location "End User Premise."
|
||||
snmp-server sys-descr "Extreme VDX Switch."
|
||||
snmp-server enable trap
|
||||
snmp-server community private groupname admin
|
||||
snmp-server community public groupname user
|
||||
snmp-server view All 1 included
|
||||
snmp-server group admin v1 read All write All notify All
|
||||
snmp-server group public v1 read All
|
||||
snmp-server group public v2c read All
|
||||
snmp-server group user v1 read All
|
||||
snmp-server group user v2c read All
|
||||
hardware
|
||||
connector-group 104/0/1
|
||||
speed LowMixed
|
||||
!
|
||||
connector-group 104/0/3
|
||||
speed LowMixed
|
||||
!
|
||||
connector-group 104/0/5
|
||||
speed LowMixed
|
||||
!
|
||||
connector-group 104/0/6
|
||||
speed LowMixed
|
||||
!
|
||||
!
|
||||
cee-map default
|
||||
precedence 1
|
||||
priority-group-table 1 weight 40 pfc on
|
||||
priority-group-table 15.0 pfc off
|
||||
priority-group-table 15.1 pfc off
|
||||
priority-group-table 15.2 pfc off
|
||||
priority-group-table 15.3 pfc off
|
||||
priority-group-table 15.4 pfc off
|
||||
priority-group-table 15.5 pfc off
|
||||
priority-group-table 15.6 pfc off
|
||||
priority-group-table 15.7 pfc off
|
||||
priority-group-table 2 weight 60 pfc off
|
||||
priority-table 2 2 2 1 2 2 2 15.0
|
||||
remap fabric-priority priority 0
|
||||
remap lossless-priority priority 0
|
||||
!
|
||||
fcoe
|
||||
fabric-map default
|
||||
vlan 1002
|
||||
san-mode local
|
||||
priority 3
|
||||
virtual-fabric 128
|
||||
fcmap 0E:FC:00
|
||||
advertisement interval 8000
|
||||
keep-alive timeout
|
||||
!
|
||||
!
|
||||
interface Vlan 1
|
||||
!
|
||||
fabric route mcast rbridge-id 104
|
||||
!
|
||||
protocol lldp
|
||||
advertise dcbx-fcoe-app-tlv
|
||||
advertise dcbx-fcoe-logical-link-tlv
|
||||
advertise dcbx-tlv
|
||||
advertise bgp-auto-nbr-tlv
|
||||
advertise optional-tlv management-address
|
||||
advertise optional-tlv system-name
|
||||
system-description Extreme-VDX-VCS 120
|
||||
!
|
||||
vlan dot1q tag native
|
||||
port-profile UpgradedVlanProfile
|
||||
vlan-profile
|
||||
switchport
|
||||
switchport mode trunk
|
||||
switchport trunk allowed vlan all
|
||||
!
|
||||
!
|
||||
port-profile default
|
||||
vlan-profile
|
||||
switchport
|
||||
switchport mode trunk
|
||||
switchport trunk native-vlan 1
|
||||
!
|
||||
!
|
||||
port-profile-domain default
|
||||
port-profile UpgradedVlanProfile
|
||||
!
|
||||
class-map cee
|
||||
!
|
||||
class-map default
|
||||
!
|
||||
rbridge-id 104
|
||||
switch-attributes chassis-name VDX6740
|
||||
switch-attributes host-name LEAF4
|
||||
vrf mgmt-vrf
|
||||
address-family ipv4 unicast
|
||||
ip route 0.0.0.0/0 10.26.0.1
|
||||
!
|
||||
address-family ipv6 unicast
|
||||
!
|
||||
!
|
||||
system-monitor fan threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor fan alert state removed action raslog
|
||||
system-monitor power threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor power alert state removed action raslog
|
||||
system-monitor temp threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor cid-card threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor cid-card alert state none action none
|
||||
system-monitor sfp alert state none action none
|
||||
system-monitor compact-flash threshold marginal-threshold 1 down-threshold 0
|
||||
system-monitor MM threshold marginal-threshold 1 down-threshold 0
|
||||
system-monitor LineCard threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor LineCard alert state none action none
|
||||
system-monitor SFM threshold marginal-threshold 1 down-threshold 2
|
||||
resource-monitor cpu enable
|
||||
resource-monitor memory enable threshold 100 action raslog
|
||||
resource-monitor process memory enable alarm 500 critical 600
|
||||
no protocol vrrp
|
||||
no protocol vrrp-extended
|
||||
hardware-profile tcam default
|
||||
hardware-profile route-table default maximum_paths 8 openflow off
|
||||
hardware-profile kap default
|
||||
fabric neighbor-discovery
|
||||
clock timezone America/Los_Angeles
|
||||
ag
|
||||
enable
|
||||
counter reliability 25
|
||||
timeout fnm 120
|
||||
pg 0
|
||||
modes lb
|
||||
rename pg0
|
||||
!
|
||||
!
|
||||
telnet server use-vrf default-vrf
|
||||
telnet server use-vrf mgmt-vrf
|
||||
ssh server key rsa 2048
|
||||
ssh server key ecdsa 256
|
||||
ssh server key dsa
|
||||
ssh server use-vrf default-vrf
|
||||
ssh server use-vrf mgmt-vrf
|
||||
http server use-vrf default-vrf
|
||||
http server use-vrf mgmt-vrf
|
||||
fcoe
|
||||
fcoe-enodes 0
|
||||
!
|
||||
!
|
||||
interface Management 104/0
|
||||
no tcp burstrate
|
||||
ip icmp echo-reply
|
||||
no ip address dhcp
|
||||
ip address 10.26.7.226/17
|
||||
ipv6 icmpv6 echo-reply
|
||||
no ipv6 address autoconfig
|
||||
no ipv6 address dhcp
|
||||
vrf forwarding mgmt-vrf
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/1
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/2
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/3
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/4
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/5
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/6
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/7
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/8
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/9
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/10
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/11
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/12
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/13
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/14
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/15
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/16
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/17
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/18
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/19
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/20
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/21
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/22
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/23
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/24
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/25
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/26
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/27
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/28
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/29
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/30
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/31
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/32
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/33
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/34
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/35
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/36
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/37
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/38
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/39
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/40
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/41
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/42
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/43
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/44
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/45
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/46
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/47
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface TenGigabitEthernet 104/0/48
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface FortyGigabitEthernet 104/0/49
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface FortyGigabitEthernet 104/0/50
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface FortyGigabitEthernet 104/0/51
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
||||
interface FortyGigabitEthernet 104/0/52
|
||||
fabric isl enable
|
||||
fabric trunk enable
|
||||
no shutdown
|
||||
!
|
|
@ -1,17 +0,0 @@
|
|||
Network Operating System Software
|
||||
Network Operating System Version: 7.2.0
|
||||
Copyright (c) 1995-2017 Brocade Communications Systems, Inc.
|
||||
Firmware name: 7.2.0
|
||||
Build Time: 10:52:47 Jul 10, 2017
|
||||
Install Time: 01:32:03 Jan 5, 2018
|
||||
Kernel: 2.6.34.6
|
||||
|
||||
BootProm: 1.0.1
|
||||
Control Processor: e500mc with 4096 MB of memory
|
||||
|
||||
Slot Name Primary/Secondary Versions Status
|
||||
---------------------------------------------------------------------------
|
||||
SW/0 NOS 7.2.0 ACTIVE*
|
||||
7.2.0
|
||||
SW/1 NOS 7.2.0 STANDBY
|
||||
7.2.0
|
|
@ -1,40 +0,0 @@
|
|||
|
||||
Chassis Name: BR-SLX9140
|
||||
switchType: 3001
|
||||
|
||||
FAN Unit: 1
|
||||
Time Awake: 36 days
|
||||
|
||||
FAN Unit: 2
|
||||
Time Awake: 36 days
|
||||
|
||||
FAN Unit: 3
|
||||
Time Awake: 36 days
|
||||
|
||||
FAN Unit: 5
|
||||
Time Awake: 36 days
|
||||
|
||||
FAN Unit: 6
|
||||
Time Awake: 36 days
|
||||
|
||||
POWER SUPPLY Unit: 1
|
||||
Factory Part Num: 11-1111111-11
|
||||
Factory Serial Num: ASERIALNUMB
|
||||
Time Awake: 36 days
|
||||
|
||||
POWER SUPPLY Unit: 2
|
||||
Factory Part Num: 11-1111111-11
|
||||
Factory Serial Num: ASERIALNUMB
|
||||
Time Awake: 36 days
|
||||
|
||||
CHASSIS/WWN Unit: 1
|
||||
Power Consume Factor: 0
|
||||
Factory Part Num: 11-1111111-11
|
||||
Factory Serial Num: ASERIALNUMB
|
||||
Manufacture: Day: 12 Month: 1 Year: 2017
|
||||
Update: Day: 5 Month: 4 Year: 2018
|
||||
Time Alive: 277 days
|
||||
Time Awake: 36 days
|
||||
|
||||
Airflow direction : Port side INTAKE
|
||||
|
|
@ -1,624 +0,0 @@
|
|||
root enable
|
||||
host-table aging-mode conversational
|
||||
clock timezone Europe/Warsaw
|
||||
hardware
|
||||
profile tcam default
|
||||
profile overlay-visibility default
|
||||
profile route-table default maximum_paths 8
|
||||
system-mode default
|
||||
!
|
||||
http server use-vrf default-vrf
|
||||
http server use-vrf mgmt-vrf
|
||||
node-id 1
|
||||
!
|
||||
ntp server 172.16.10.2 use-vrf mgmt-vrf
|
||||
!
|
||||
logging raslog console INFO
|
||||
logging syslog-server 10.1.5.11 use-vrf mgmt-vrf
|
||||
!
|
||||
logging auditlog class SECURITY
|
||||
logging auditlog class CONFIGURATION
|
||||
logging auditlog class FIRMWARE
|
||||
logging syslog-facility local LOG_LOCAL0
|
||||
logging syslog-client localip CHASSIS_IP
|
||||
switch-attributes chassis-name SLX9140-LEAF2
|
||||
switch-attributes host-name DC2LEAF2
|
||||
no support autoupload enable
|
||||
support ffdc
|
||||
resource-monitor cpu enable threshold 90 action raslog
|
||||
resource-monitor memory enable threshold 100 action raslog
|
||||
resource-monitor process memory enable alarm 1000 critical 1200
|
||||
system-monitor fan threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor fan alert state removed action raslog
|
||||
system-monitor power threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor power alert state removed action raslog
|
||||
system-monitor temp threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor cid-card threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor cid-card alert state none action none
|
||||
system-monitor compact-flash threshold marginal-threshold 1 down-threshold 0
|
||||
system-monitor MM threshold marginal-threshold 1 down-threshold 0
|
||||
system-monitor LineCard threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor LineCard alert state none action none
|
||||
system-monitor SFM threshold marginal-threshold 1 down-threshold 2
|
||||
telemetry server use-vrf mgmt-vrf
|
||||
transport tcp
|
||||
port 50051
|
||||
activate
|
||||
!
|
||||
telemetry profile system-utilization default_system_utilization_statistics
|
||||
interval 60
|
||||
add total-system-memory
|
||||
add total-used-memory
|
||||
add total-free-memory
|
||||
add cached-memory
|
||||
add buffers
|
||||
add user-free-memory
|
||||
add kernel-free-memory
|
||||
add total-swap-memory
|
||||
add total-free-swap-memory
|
||||
add total-used-swap-memory
|
||||
add user-process
|
||||
add system-process
|
||||
add niced-process
|
||||
add iowait
|
||||
add hw-interrupt
|
||||
add sw-interrupt
|
||||
add idle-state
|
||||
add steal-time
|
||||
add uptime
|
||||
!
|
||||
telemetry profile interface default_interface_statistics
|
||||
interval 30
|
||||
add out-pkts
|
||||
add in-pkts
|
||||
add out-unicast-pkts
|
||||
add in-unicast-pkts
|
||||
add out-broadcast-pkts
|
||||
add in-broadcast-pkts
|
||||
add out-multicast-pkts
|
||||
add in-multicast-pkts
|
||||
add out-pkts-per-second
|
||||
add in-pkts-per-second
|
||||
add out-bandwidth
|
||||
add in-bandwidth
|
||||
add out-octets
|
||||
add in-octets
|
||||
add out-errors
|
||||
add in-errors
|
||||
add out-crc-errors
|
||||
add in-crc-errors
|
||||
add out-discards
|
||||
add in-discards
|
||||
!
|
||||
line vty
|
||||
exec-timeout 10
|
||||
!
|
||||
threshold-monitor Buffer limit 70
|
||||
vrf mgmt-vrf
|
||||
address-family ipv4 unicast
|
||||
ip route 0.0.0.0/0 172.168.192.1
|
||||
!
|
||||
address-family ipv6 unicast
|
||||
!
|
||||
!
|
||||
ssh server key rsa 2048
|
||||
ssh server key ecdsa 256
|
||||
ssh server key dsa
|
||||
ssh server use-vrf default-vrf
|
||||
ssh server use-vrf mgmt-vrf
|
||||
telnet server use-vrf default-vrf
|
||||
telnet server use-vrf mgmt-vrf
|
||||
role name admin desc Administrator
|
||||
role name user desc User
|
||||
aaa authentication login local
|
||||
aaa accounting exec default start-stop none
|
||||
aaa accounting commands default start-stop none
|
||||
service password-encryption
|
||||
username admin password "AINTNOPARTYLIKEAHOTELPARTYCAUSEAHOTELPARTYDONTSLEEPNOONEWOULDEVERACTUALLYTYPETHISWHYAREYOUHERE\n" encryption-level 7 role admin desc Administrator
|
||||
cee-map default
|
||||
precedence 1
|
||||
priority-group-table 1 weight 40 pfc on
|
||||
priority-group-table 15.0 pfc off
|
||||
priority-group-table 15.1 pfc off
|
||||
priority-group-table 15.2 pfc off
|
||||
priority-group-table 15.3 pfc off
|
||||
priority-group-table 15.4 pfc off
|
||||
priority-group-table 15.5 pfc off
|
||||
priority-group-table 15.6 pfc off
|
||||
priority-group-table 15.7 pfc off
|
||||
priority-group-table 2 weight 60 pfc off
|
||||
priority-table 2 2 2 1 2 2 2 15.0
|
||||
remap lossless-priority priority 0
|
||||
!
|
||||
mac access-list extended M1
|
||||
seq 10 permit any any
|
||||
!
|
||||
vlan 1
|
||||
ip igmp snooping startup-query-interval 100
|
||||
ipv6 mld snooping startup-query-interval 100
|
||||
!
|
||||
vlan 100
|
||||
!
|
||||
vlan 200
|
||||
!
|
||||
vlan 1001
|
||||
router-interface Ve 1001
|
||||
description Thomas-Test-Cluster
|
||||
!
|
||||
qos map cos-mutation all-zero-map
|
||||
map cos 0 to cos 0
|
||||
map cos 1 to cos 0
|
||||
map cos 2 to cos 0
|
||||
map cos 3 to cos 0
|
||||
map cos 4 to cos 0
|
||||
map cos 5 to cos 0
|
||||
map cos 6 to cos 0
|
||||
map cos 7 to cos 0
|
||||
!
|
||||
qos map cos-mutation default
|
||||
map cos 0 to cos 0
|
||||
map cos 1 to cos 1
|
||||
map cos 2 to cos 2
|
||||
map cos 3 to cos 3
|
||||
map cos 4 to cos 4
|
||||
map cos 5 to cos 5
|
||||
map cos 6 to cos 6
|
||||
map cos 7 to cos 7
|
||||
!
|
||||
qos map cos-traffic-class all-zero-map
|
||||
map cos 0 to traffic-class 0
|
||||
map cos 1 to traffic-class 0
|
||||
map cos 2 to traffic-class 0
|
||||
map cos 3 to traffic-class 0
|
||||
map cos 4 to traffic-class 0
|
||||
map cos 5 to traffic-class 0
|
||||
map cos 6 to traffic-class 0
|
||||
map cos 7 to traffic-class 0
|
||||
!
|
||||
qos map cos-traffic-class default
|
||||
map cos 0 to traffic-class 1
|
||||
map cos 1 to traffic-class 0
|
||||
map cos 2 to traffic-class 2
|
||||
map cos 3 to traffic-class 3
|
||||
map cos 4 to traffic-class 4
|
||||
map cos 5 to traffic-class 5
|
||||
map cos 6 to traffic-class 6
|
||||
map cos 7 to traffic-class 7
|
||||
!
|
||||
qos map cos-dscp all-zero-map
|
||||
map cos 0 to dscp 0
|
||||
map cos 1 to dscp 0
|
||||
map cos 2 to dscp 0
|
||||
map cos 3 to dscp 0
|
||||
map cos 4 to dscp 0
|
||||
map cos 5 to dscp 0
|
||||
map cos 6 to dscp 0
|
||||
map cos 7 to dscp 0
|
||||
!
|
||||
qos map cos-dscp default
|
||||
map cos 0 to dscp 0
|
||||
map cos 1 to dscp 8
|
||||
map cos 2 to dscp 16
|
||||
map cos 3 to dscp 24
|
||||
map cos 4 to dscp 32
|
||||
map cos 5 to dscp 40
|
||||
map cos 6 to dscp 48
|
||||
map cos 7 to dscp 56
|
||||
!
|
||||
qos map traffic-class-cos all-zero-map
|
||||
map traffic-class 0 to cos 0
|
||||
map traffic-class 1 to cos 0
|
||||
map traffic-class 2 to cos 0
|
||||
map traffic-class 3 to cos 0
|
||||
map traffic-class 4 to cos 0
|
||||
map traffic-class 5 to cos 0
|
||||
map traffic-class 6 to cos 0
|
||||
map traffic-class 7 to cos 0
|
||||
!
|
||||
qos map traffic-class-cos default
|
||||
map traffic-class 0 to cos 0
|
||||
map traffic-class 1 to cos 1
|
||||
map traffic-class 2 to cos 2
|
||||
map traffic-class 3 to cos 3
|
||||
map traffic-class 4 to cos 4
|
||||
map traffic-class 5 to cos 5
|
||||
map traffic-class 6 to cos 6
|
||||
map traffic-class 7 to cos 7
|
||||
!
|
||||
qos map traffic-class-mutation all-zero-map
|
||||
map traffic-class 0 to traffic-class 0
|
||||
map traffic-class 1 to traffic-class 0
|
||||
map traffic-class 2 to traffic-class 0
|
||||
map traffic-class 3 to traffic-class 0
|
||||
map traffic-class 4 to traffic-class 0
|
||||
map traffic-class 5 to traffic-class 0
|
||||
map traffic-class 6 to traffic-class 0
|
||||
map traffic-class 7 to traffic-class 0
|
||||
!
|
||||
qos map traffic-class-mutation default
|
||||
map traffic-class 0 to traffic-class 0
|
||||
map traffic-class 1 to traffic-class 1
|
||||
map traffic-class 2 to traffic-class 2
|
||||
map traffic-class 3 to traffic-class 3
|
||||
map traffic-class 4 to traffic-class 4
|
||||
map traffic-class 5 to traffic-class 5
|
||||
map traffic-class 6 to traffic-class 6
|
||||
map traffic-class 7 to traffic-class 7
|
||||
!
|
||||
qos map traffic-class-dscp all-zero-map
|
||||
map traffic-class 0 to dscp 0
|
||||
map traffic-class 1 to dscp 0
|
||||
map traffic-class 2 to dscp 0
|
||||
map traffic-class 3 to dscp 0
|
||||
map traffic-class 4 to dscp 0
|
||||
map traffic-class 5 to dscp 0
|
||||
map traffic-class 6 to dscp 0
|
||||
map traffic-class 7 to dscp 0
|
||||
!
|
||||
qos map traffic-class-dscp default
|
||||
map traffic-class 0 to dscp 0
|
||||
map traffic-class 1 to dscp 8
|
||||
map traffic-class 2 to dscp 16
|
||||
map traffic-class 3 to dscp 24
|
||||
map traffic-class 4 to dscp 32
|
||||
map traffic-class 5 to dscp 40
|
||||
map traffic-class 6 to dscp 48
|
||||
map traffic-class 7 to dscp 56
|
||||
!
|
||||
qos map dscp-mutation all-zero-map
|
||||
map dscp 0-63 to dscp 0
|
||||
!
|
||||
qos map dscp-mutation default
|
||||
map dscp 0 to dscp 0
|
||||
map dscp 1 to dscp 1
|
||||
map dscp 10 to dscp 10
|
||||
map dscp 11 to dscp 11
|
||||
map dscp 12 to dscp 12
|
||||
map dscp 13 to dscp 13
|
||||
map dscp 14 to dscp 14
|
||||
map dscp 15 to dscp 15
|
||||
map dscp 16 to dscp 16
|
||||
map dscp 17 to dscp 17
|
||||
map dscp 18 to dscp 18
|
||||
map dscp 19 to dscp 19
|
||||
map dscp 2 to dscp 2
|
||||
map dscp 20 to dscp 20
|
||||
map dscp 21 to dscp 21
|
||||
map dscp 22 to dscp 22
|
||||
map dscp 23 to dscp 23
|
||||
map dscp 24 to dscp 24
|
||||
map dscp 25 to dscp 25
|
||||
map dscp 26 to dscp 26
|
||||
map dscp 27 to dscp 27
|
||||
map dscp 28 to dscp 28
|
||||
map dscp 29 to dscp 29
|
||||
map dscp 3 to dscp 3
|
||||
map dscp 30 to dscp 30
|
||||
map dscp 31 to dscp 31
|
||||
map dscp 32 to dscp 32
|
||||
map dscp 33 to dscp 33
|
||||
map dscp 34 to dscp 34
|
||||
map dscp 35 to dscp 35
|
||||
map dscp 36 to dscp 36
|
||||
map dscp 37 to dscp 37
|
||||
map dscp 38 to dscp 38
|
||||
map dscp 39 to dscp 39
|
||||
map dscp 4 to dscp 4
|
||||
map dscp 40 to dscp 40
|
||||
map dscp 41 to dscp 41
|
||||
map dscp 42 to dscp 42
|
||||
map dscp 43 to dscp 43
|
||||
map dscp 44 to dscp 44
|
||||
map dscp 45 to dscp 45
|
||||
map dscp 46 to dscp 46
|
||||
map dscp 47 to dscp 47
|
||||
map dscp 48 to dscp 48
|
||||
map dscp 49 to dscp 49
|
||||
map dscp 5 to dscp 5
|
||||
map dscp 50 to dscp 50
|
||||
map dscp 51 to dscp 51
|
||||
map dscp 52 to dscp 52
|
||||
map dscp 53 to dscp 53
|
||||
map dscp 54 to dscp 54
|
||||
map dscp 55 to dscp 55
|
||||
map dscp 56 to dscp 56
|
||||
map dscp 57 to dscp 57
|
||||
map dscp 58 to dscp 58
|
||||
map dscp 59 to dscp 59
|
||||
map dscp 6 to dscp 6
|
||||
map dscp 60 to dscp 60
|
||||
map dscp 61 to dscp 61
|
||||
map dscp 62 to dscp 62
|
||||
map dscp 63 to dscp 63
|
||||
map dscp 7 to dscp 7
|
||||
map dscp 8 to dscp 8
|
||||
map dscp 9 to dscp 9
|
||||
!
|
||||
qos map dscp-traffic-class all-zero-map
|
||||
map dscp 0-63 to traffic-class 0
|
||||
!
|
||||
qos map dscp-traffic-class default
|
||||
map dscp 0-7 to traffic-class 0
|
||||
map dscp 16-23 to traffic-class 2
|
||||
map dscp 24-31 to traffic-class 3
|
||||
map dscp 32-39 to traffic-class 4
|
||||
map dscp 40-47 to traffic-class 5
|
||||
map dscp 48-55 to traffic-class 6
|
||||
map dscp 56-63 to traffic-class 7
|
||||
map dscp 8-15 to traffic-class 1
|
||||
!
|
||||
qos map dscp-cos all-zero-map
|
||||
map dscp 0-63 to cos 0
|
||||
!
|
||||
qos map dscp-cos default
|
||||
map dscp 0-7 to cos 0
|
||||
map dscp 16-23 to cos 2
|
||||
map dscp 24-31 to cos 3
|
||||
map dscp 32-39 to cos 4
|
||||
map dscp 40-47 to cos 5
|
||||
map dscp 48-55 to cos 6
|
||||
map dscp 56-63 to cos 7
|
||||
map dscp 8-15 to cos 1
|
||||
!
|
||||
protocol lldp
|
||||
advertise optional-tlv management-address
|
||||
system-description Brocade BR-SLX9140 Router
|
||||
!
|
||||
vlan dot1q tag native
|
||||
police-remark-profile default
|
||||
!
|
||||
class-map BD-100
|
||||
!
|
||||
class-map C1
|
||||
match access-group M1
|
||||
!
|
||||
class-map cee
|
||||
!
|
||||
class-map default
|
||||
!
|
||||
policy-map P1
|
||||
class C1
|
||||
police cir 1000000
|
||||
!
|
||||
!
|
||||
policy-map P2
|
||||
class default
|
||||
police cir 12121212
|
||||
!
|
||||
!
|
||||
no protocol vrrp
|
||||
no protocol vrrp-extended
|
||||
router bgp
|
||||
local-as 65301
|
||||
capability as4-enable
|
||||
bfd interval 300 min-rx 300 multiplier 3
|
||||
neighbor leaf_group peer-group
|
||||
neighbor leaf_group remote-as 65500
|
||||
neighbor leaf_group bfd
|
||||
neighbor 10.220.4.3 remote-as 65500
|
||||
neighbor 10.220.4.3 peer-group leaf_group
|
||||
address-family ipv4 unicast
|
||||
network 172.32.252.5/32
|
||||
maximum-paths 8
|
||||
!
|
||||
address-family ipv6 unicast
|
||||
!
|
||||
address-family l2vpn evpn
|
||||
!
|
||||
!
|
||||
interface Loopback 1
|
||||
ip address 172.16.128.6/32
|
||||
no shutdown
|
||||
!
|
||||
interface Loopback 2
|
||||
ip address 172.16.129.5/32
|
||||
no shutdown
|
||||
!
|
||||
interface Management 0
|
||||
no tcp burstrate
|
||||
no shutdown
|
||||
vrf forwarding mgmt-vrf
|
||||
ip address dhcp
|
||||
!
|
||||
interface Ethernet 0/1
|
||||
speed 25000
|
||||
fec mode disabled
|
||||
switchport
|
||||
switchport mode access
|
||||
switchport access vlan 1
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/2
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/3
|
||||
speed 25000
|
||||
fec mode RS-FEC
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/4
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/5
|
||||
service-policy in P1
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/6
|
||||
mtu 1548
|
||||
description L2 Interface
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/7
|
||||
mtu 1548
|
||||
description L2 Interface
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/8
|
||||
switchport
|
||||
switchport mode trunk
|
||||
switchport trunk allowed vlan add 100,200
|
||||
switchport trunk tag native-vlan
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/9
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/10
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/11
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/12
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/13
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/14
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/15
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/16
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/17
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/18
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/19
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/20
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/21
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/22
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/23
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/24
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/25
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/26
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/27
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/28
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/29
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/30
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/31
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/32
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/33
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/34
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/35
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/36
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/37
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/38
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/39
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/40
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/41
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/42
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/43
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/44
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/45
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/46
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/47
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/48
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/49
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/50
|
||||
fec mode RS-FEC
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/51
|
||||
fec mode RS-FEC
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/52
|
||||
fec mode RS-FEC
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/53
|
||||
fec mode RS-FEC
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/54
|
||||
fec mode disabled
|
||||
no shutdown
|
||||
!
|
||||
interface Port-channel 200
|
||||
switchport
|
||||
switchport mode access
|
||||
switchport access vlan 200
|
||||
shutdown
|
||||
!
|
||||
interface Port-channel 1024
|
||||
insight enable
|
||||
no shutdown
|
||||
!
|
||||
monitor session 1
|
||||
source ethernet 0/1 destination port-channel 1024 direction both
|
||||
!
|
||||
monitor session 2
|
||||
!
|
||||
bridge-domain 100 p2mp
|
||||
!
|
||||
cluster MCT1 1
|
||||
!
|
|
@ -1,624 +0,0 @@
|
|||
root enable
|
||||
host-table aging-mode conversational
|
||||
clock timezone Europe/Warsaw
|
||||
hardware
|
||||
profile tcam default
|
||||
profile overlay-visibility default
|
||||
profile route-table default maximum_paths 8
|
||||
system-mode default
|
||||
!
|
||||
http server use-vrf default-vrf
|
||||
http server use-vrf mgmt-vrf
|
||||
node-id 1
|
||||
!
|
||||
ntp server 172.16.10.2 use-vrf mgmt-vrf
|
||||
!
|
||||
logging raslog console INFO
|
||||
logging syslog-server 10.1.5.11 use-vrf mgmt-vrf
|
||||
!
|
||||
logging auditlog class SECURITY
|
||||
logging auditlog class CONFIGURATION
|
||||
logging auditlog class FIRMWARE
|
||||
logging syslog-facility local LOG_LOCAL0
|
||||
logging syslog-client localip CHASSIS_IP
|
||||
switch-attributes chassis-name SLX9140-LEAF2
|
||||
switch-attributes host-name DC2LEAF2
|
||||
no support autoupload enable
|
||||
support ffdc
|
||||
resource-monitor cpu enable threshold 90 action raslog
|
||||
resource-monitor memory enable threshold 100 action raslog
|
||||
resource-monitor process memory enable alarm 1000 critical 1200
|
||||
system-monitor fan threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor fan alert state removed action raslog
|
||||
system-monitor power threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor power alert state removed action raslog
|
||||
system-monitor temp threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor cid-card threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor cid-card alert state none action none
|
||||
system-monitor compact-flash threshold marginal-threshold 1 down-threshold 0
|
||||
system-monitor MM threshold marginal-threshold 1 down-threshold 0
|
||||
system-monitor LineCard threshold marginal-threshold 1 down-threshold 2
|
||||
system-monitor LineCard alert state none action none
|
||||
system-monitor SFM threshold marginal-threshold 1 down-threshold 2
|
||||
telemetry server use-vrf mgmt-vrf
|
||||
transport tcp
|
||||
port 50051
|
||||
activate
|
||||
!
|
||||
telemetry profile system-utilization default_system_utilization_statistics
|
||||
interval 60
|
||||
add total-system-memory
|
||||
add total-used-memory
|
||||
add total-free-memory
|
||||
add cached-memory
|
||||
add buffers
|
||||
add user-free-memory
|
||||
add kernel-free-memory
|
||||
add total-swap-memory
|
||||
add total-free-swap-memory
|
||||
add total-used-swap-memory
|
||||
add user-process
|
||||
add system-process
|
||||
add niced-process
|
||||
add iowait
|
||||
add hw-interrupt
|
||||
add sw-interrupt
|
||||
add idle-state
|
||||
add steal-time
|
||||
add uptime
|
||||
!
|
||||
telemetry profile interface default_interface_statistics
|
||||
interval 30
|
||||
add out-pkts
|
||||
add in-pkts
|
||||
add out-unicast-pkts
|
||||
add in-unicast-pkts
|
||||
add out-broadcast-pkts
|
||||
add in-broadcast-pkts
|
||||
add out-multicast-pkts
|
||||
add in-multicast-pkts
|
||||
add out-pkts-per-second
|
||||
add in-pkts-per-second
|
||||
add out-bandwidth
|
||||
add in-bandwidth
|
||||
add out-octets
|
||||
add in-octets
|
||||
add out-errors
|
||||
add in-errors
|
||||
add out-crc-errors
|
||||
add in-crc-errors
|
||||
add out-discards
|
||||
add in-discards
|
||||
!
|
||||
line vty
|
||||
exec-timeout 10
|
||||
!
|
||||
threshold-monitor Buffer limit 70
|
||||
vrf mgmt-vrf
|
||||
address-family ipv4 unicast
|
||||
ip route 0.0.0.0/0 172.168.192.1
|
||||
!
|
||||
address-family ipv6 unicast
|
||||
!
|
||||
!
|
||||
ssh server key rsa 2048
|
||||
ssh server key ecdsa 256
|
||||
ssh server key dsa
|
||||
ssh server use-vrf default-vrf
|
||||
ssh server use-vrf mgmt-vrf
|
||||
telnet server use-vrf default-vrf
|
||||
telnet server use-vrf mgmt-vrf
|
||||
role name admin desc Administrator
|
||||
role name user desc User
|
||||
aaa authentication login local
|
||||
aaa accounting exec default start-stop none
|
||||
aaa accounting commands default start-stop none
|
||||
service password-encryption
|
||||
username admin password "AINTNOPARTYLIKEAHOTELPARTYCAUSEAHOTELPARTYDONTSLEEPNOONEWOULDEVERACTUALLYTYPETHISWHYAREYOUHERE\n" encryption-level 7 role admin desc Administrator
|
||||
cee-map default
|
||||
precedence 1
|
||||
priority-group-table 1 weight 40 pfc on
|
||||
priority-group-table 15.0 pfc off
|
||||
priority-group-table 15.1 pfc off
|
||||
priority-group-table 15.2 pfc off
|
||||
priority-group-table 15.3 pfc off
|
||||
priority-group-table 15.4 pfc off
|
||||
priority-group-table 15.5 pfc off
|
||||
priority-group-table 15.6 pfc off
|
||||
priority-group-table 15.7 pfc off
|
||||
priority-group-table 2 weight 60 pfc off
|
||||
priority-table 2 2 2 1 2 2 2 15.0
|
||||
remap lossless-priority priority 0
|
||||
!
|
||||
mac access-list extended M1
|
||||
seq 10 permit any any
|
||||
!
|
||||
vlan 1
|
||||
ip igmp snooping startup-query-interval 100
|
||||
ipv6 mld snooping startup-query-interval 100
|
||||
!
|
||||
vlan 100
|
||||
!
|
||||
vlan 200
|
||||
!
|
||||
vlan 1001
|
||||
router-interface Ve 1001
|
||||
description Thomas-Test-Cluster
|
||||
!
|
||||
qos map cos-mutation all-zero-map
|
||||
map cos 0 to cos 0
|
||||
map cos 1 to cos 0
|
||||
map cos 2 to cos 0
|
||||
map cos 3 to cos 0
|
||||
map cos 4 to cos 0
|
||||
map cos 5 to cos 0
|
||||
map cos 6 to cos 0
|
||||
map cos 7 to cos 0
|
||||
!
|
||||
qos map cos-mutation default
|
||||
map cos 0 to cos 0
|
||||
map cos 1 to cos 1
|
||||
map cos 2 to cos 2
|
||||
map cos 3 to cos 3
|
||||
map cos 4 to cos 4
|
||||
map cos 5 to cos 5
|
||||
map cos 6 to cos 6
|
||||
map cos 7 to cos 7
|
||||
!
|
||||
qos map cos-traffic-class all-zero-map
|
||||
map cos 0 to traffic-class 0
|
||||
map cos 1 to traffic-class 0
|
||||
map cos 2 to traffic-class 0
|
||||
map cos 3 to traffic-class 0
|
||||
map cos 4 to traffic-class 0
|
||||
map cos 5 to traffic-class 0
|
||||
map cos 6 to traffic-class 0
|
||||
map cos 7 to traffic-class 0
|
||||
!
|
||||
qos map cos-traffic-class default
|
||||
map cos 0 to traffic-class 1
|
||||
map cos 1 to traffic-class 0
|
||||
map cos 2 to traffic-class 2
|
||||
map cos 3 to traffic-class 3
|
||||
map cos 4 to traffic-class 4
|
||||
map cos 5 to traffic-class 5
|
||||
map cos 6 to traffic-class 6
|
||||
map cos 7 to traffic-class 7
|
||||
!
|
||||
qos map cos-dscp all-zero-map
|
||||
map cos 0 to dscp 0
|
||||
map cos 1 to dscp 0
|
||||
map cos 2 to dscp 0
|
||||
map cos 3 to dscp 0
|
||||
map cos 4 to dscp 0
|
||||
map cos 5 to dscp 0
|
||||
map cos 6 to dscp 0
|
||||
map cos 7 to dscp 0
|
||||
!
|
||||
qos map cos-dscp default
|
||||
map cos 0 to dscp 0
|
||||
map cos 1 to dscp 8
|
||||
map cos 2 to dscp 16
|
||||
map cos 3 to dscp 24
|
||||
map cos 4 to dscp 32
|
||||
map cos 5 to dscp 40
|
||||
map cos 6 to dscp 48
|
||||
map cos 7 to dscp 56
|
||||
!
|
||||
qos map traffic-class-cos all-zero-map
|
||||
map traffic-class 0 to cos 0
|
||||
map traffic-class 1 to cos 0
|
||||
map traffic-class 2 to cos 0
|
||||
map traffic-class 3 to cos 0
|
||||
map traffic-class 4 to cos 0
|
||||
map traffic-class 5 to cos 0
|
||||
map traffic-class 6 to cos 0
|
||||
map traffic-class 7 to cos 0
|
||||
!
|
||||
qos map traffic-class-cos default
|
||||
map traffic-class 0 to cos 0
|
||||
map traffic-class 1 to cos 1
|
||||
map traffic-class 2 to cos 2
|
||||
map traffic-class 3 to cos 3
|
||||
map traffic-class 4 to cos 4
|
||||
map traffic-class 5 to cos 5
|
||||
map traffic-class 6 to cos 6
|
||||
map traffic-class 7 to cos 7
|
||||
!
|
||||
qos map traffic-class-mutation all-zero-map
|
||||
map traffic-class 0 to traffic-class 0
|
||||
map traffic-class 1 to traffic-class 0
|
||||
map traffic-class 2 to traffic-class 0
|
||||
map traffic-class 3 to traffic-class 0
|
||||
map traffic-class 4 to traffic-class 0
|
||||
map traffic-class 5 to traffic-class 0
|
||||
map traffic-class 6 to traffic-class 0
|
||||
map traffic-class 7 to traffic-class 0
|
||||
!
|
||||
qos map traffic-class-mutation default
|
||||
map traffic-class 0 to traffic-class 0
|
||||
map traffic-class 1 to traffic-class 1
|
||||
map traffic-class 2 to traffic-class 2
|
||||
map traffic-class 3 to traffic-class 3
|
||||
map traffic-class 4 to traffic-class 4
|
||||
map traffic-class 5 to traffic-class 5
|
||||
map traffic-class 6 to traffic-class 6
|
||||
map traffic-class 7 to traffic-class 7
|
||||
!
|
||||
qos map traffic-class-dscp all-zero-map
|
||||
map traffic-class 0 to dscp 0
|
||||
map traffic-class 1 to dscp 0
|
||||
map traffic-class 2 to dscp 0
|
||||
map traffic-class 3 to dscp 0
|
||||
map traffic-class 4 to dscp 0
|
||||
map traffic-class 5 to dscp 0
|
||||
map traffic-class 6 to dscp 0
|
||||
map traffic-class 7 to dscp 0
|
||||
!
|
||||
qos map traffic-class-dscp default
|
||||
map traffic-class 0 to dscp 0
|
||||
map traffic-class 1 to dscp 8
|
||||
map traffic-class 2 to dscp 16
|
||||
map traffic-class 3 to dscp 24
|
||||
map traffic-class 4 to dscp 32
|
||||
map traffic-class 5 to dscp 40
|
||||
map traffic-class 6 to dscp 48
|
||||
map traffic-class 7 to dscp 56
|
||||
!
|
||||
qos map dscp-mutation all-zero-map
|
||||
map dscp 0-63 to dscp 0
|
||||
!
|
||||
qos map dscp-mutation default
|
||||
map dscp 0 to dscp 0
|
||||
map dscp 1 to dscp 1
|
||||
map dscp 10 to dscp 10
|
||||
map dscp 11 to dscp 11
|
||||
map dscp 12 to dscp 12
|
||||
map dscp 13 to dscp 13
|
||||
map dscp 14 to dscp 14
|
||||
map dscp 15 to dscp 15
|
||||
map dscp 16 to dscp 16
|
||||
map dscp 17 to dscp 17
|
||||
map dscp 18 to dscp 18
|
||||
map dscp 19 to dscp 19
|
||||
map dscp 2 to dscp 2
|
||||
map dscp 20 to dscp 20
|
||||
map dscp 21 to dscp 21
|
||||
map dscp 22 to dscp 22
|
||||
map dscp 23 to dscp 23
|
||||
map dscp 24 to dscp 24
|
||||
map dscp 25 to dscp 25
|
||||
map dscp 26 to dscp 26
|
||||
map dscp 27 to dscp 27
|
||||
map dscp 28 to dscp 28
|
||||
map dscp 29 to dscp 29
|
||||
map dscp 3 to dscp 3
|
||||
map dscp 30 to dscp 30
|
||||
map dscp 31 to dscp 31
|
||||
map dscp 32 to dscp 32
|
||||
map dscp 33 to dscp 33
|
||||
map dscp 34 to dscp 34
|
||||
map dscp 35 to dscp 35
|
||||
map dscp 36 to dscp 36
|
||||
map dscp 37 to dscp 37
|
||||
map dscp 38 to dscp 38
|
||||
map dscp 39 to dscp 39
|
||||
map dscp 4 to dscp 4
|
||||
map dscp 40 to dscp 40
|
||||
map dscp 41 to dscp 41
|
||||
map dscp 42 to dscp 42
|
||||
map dscp 43 to dscp 43
|
||||
map dscp 44 to dscp 44
|
||||
map dscp 45 to dscp 45
|
||||
map dscp 46 to dscp 46
|
||||
map dscp 47 to dscp 47
|
||||
map dscp 48 to dscp 48
|
||||
map dscp 49 to dscp 49
|
||||
map dscp 5 to dscp 5
|
||||
map dscp 50 to dscp 50
|
||||
map dscp 51 to dscp 51
|
||||
map dscp 52 to dscp 52
|
||||
map dscp 53 to dscp 53
|
||||
map dscp 54 to dscp 54
|
||||
map dscp 55 to dscp 55
|
||||
map dscp 56 to dscp 56
|
||||
map dscp 57 to dscp 57
|
||||
map dscp 58 to dscp 58
|
||||
map dscp 59 to dscp 59
|
||||
map dscp 6 to dscp 6
|
||||
map dscp 60 to dscp 60
|
||||
map dscp 61 to dscp 61
|
||||
map dscp 62 to dscp 62
|
||||
map dscp 63 to dscp 63
|
||||
map dscp 7 to dscp 7
|
||||
map dscp 8 to dscp 8
|
||||
map dscp 9 to dscp 9
|
||||
!
|
||||
qos map dscp-traffic-class all-zero-map
|
||||
map dscp 0-63 to traffic-class 0
|
||||
!
|
||||
qos map dscp-traffic-class default
|
||||
map dscp 0-7 to traffic-class 0
|
||||
map dscp 16-23 to traffic-class 2
|
||||
map dscp 24-31 to traffic-class 3
|
||||
map dscp 32-39 to traffic-class 4
|
||||
map dscp 40-47 to traffic-class 5
|
||||
map dscp 48-55 to traffic-class 6
|
||||
map dscp 56-63 to traffic-class 7
|
||||
map dscp 8-15 to traffic-class 1
|
||||
!
|
||||
qos map dscp-cos all-zero-map
|
||||
map dscp 0-63 to cos 0
|
||||
!
|
||||
qos map dscp-cos default
|
||||
map dscp 0-7 to cos 0
|
||||
map dscp 16-23 to cos 2
|
||||
map dscp 24-31 to cos 3
|
||||
map dscp 32-39 to cos 4
|
||||
map dscp 40-47 to cos 5
|
||||
map dscp 48-55 to cos 6
|
||||
map dscp 56-63 to cos 7
|
||||
map dscp 8-15 to cos 1
|
||||
!
|
||||
protocol lldp
|
||||
advertise optional-tlv management-address
|
||||
system-description Brocade BR-SLX9140 Router
|
||||
!
|
||||
vlan dot1q tag native
|
||||
police-remark-profile default
|
||||
!
|
||||
class-map BD-100
|
||||
!
|
||||
class-map C1
|
||||
match access-group M1
|
||||
!
|
||||
class-map cee
|
||||
!
|
||||
class-map default
|
||||
!
|
||||
policy-map P1
|
||||
class C1
|
||||
police cir 1000000
|
||||
!
|
||||
!
|
||||
policy-map P2
|
||||
class default
|
||||
police cir 12121212
|
||||
!
|
||||
!
|
||||
no protocol vrrp
|
||||
no protocol vrrp-extended
|
||||
router bgp
|
||||
local-as 65301
|
||||
capability as4-enable
|
||||
bfd interval 300 min-rx 300 multiplier 3
|
||||
neighbor leaf_group peer-group
|
||||
neighbor leaf_group remote-as 65500
|
||||
neighbor leaf_group bfd
|
||||
neighbor 10.220.4.3 remote-as 65500
|
||||
neighbor 10.220.4.3 peer-group leaf_group
|
||||
address-family ipv4 unicast
|
||||
network 172.32.252.5/32
|
||||
maximum-paths 8
|
||||
!
|
||||
address-family ipv6 unicast
|
||||
!
|
||||
address-family l2vpn evpn
|
||||
!
|
||||
!
|
||||
interface Loopback 1
|
||||
ip address 172.16.128.6/32
|
||||
no shutdown
|
||||
!
|
||||
interface Loopback 2
|
||||
ip address 172.16.129.5/32
|
||||
no shutdown
|
||||
!
|
||||
interface Management 0
|
||||
no tcp burstrate
|
||||
no shutdown
|
||||
vrf forwarding mgmt-vrf
|
||||
ip address dhcp
|
||||
!
|
||||
interface Ethernet 0/1
|
||||
speed 25000
|
||||
fec mode disabled
|
||||
switchport
|
||||
switchport mode access
|
||||
switchport access vlan 1
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/2
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/3
|
||||
speed 25000
|
||||
fec mode RS-FEC
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/4
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/5
|
||||
service-policy in P1
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/6
|
||||
mtu 1548
|
||||
description L2 Interface
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/7
|
||||
mtu 1548
|
||||
description L2 Interface
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/8
|
||||
switchport
|
||||
switchport mode trunk
|
||||
switchport trunk allowed vlan add 100,200
|
||||
switchport trunk tag native-vlan
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/9
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/10
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/11
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/12
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/13
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/14
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/15
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/16
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/17
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/18
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/19
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/20
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/21
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/22
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/23
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/24
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/25
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/26
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/27
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/28
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/29
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/30
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/31
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/32
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/33
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/34
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/35
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/36
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/37
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/38
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/39
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/40
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/41
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/42
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/43
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/44
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/45
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/46
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/47
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/48
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/49
|
||||
shutdown
|
||||
!
|
||||
interface Ethernet 0/50
|
||||
fec mode RS-FEC
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/51
|
||||
fec mode RS-FEC
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/52
|
||||
fec mode RS-FEC
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/53
|
||||
fec mode RS-FEC
|
||||
no shutdown
|
||||
!
|
||||
interface Ethernet 0/54
|
||||
fec mode disabled
|
||||
no shutdown
|
||||
!
|
||||
interface Port-channel 200
|
||||
switchport
|
||||
switchport mode access
|
||||
switchport access vlan 200
|
||||
shutdown
|
||||
!
|
||||
interface Port-channel 1024
|
||||
insight enable
|
||||
no shutdown
|
||||
!
|
||||
monitor session 1
|
||||
source ethernet 0/1 destination port-channel 1024 direction both
|
||||
!
|
||||
monitor session 2
|
||||
!
|
||||
bridge-domain 100 p2mp
|
||||
!
|
||||
cluster MCT1 1
|
||||
!
|
|
@ -1,18 +0,0 @@
|
|||
SLX-OS Operating System Software
|
||||
SLX-OS Operating System Version: 17s.1.02
|
||||
Copyright (c) 1995-2018 Brocade Communications Systems, Inc.
|
||||
Firmware name: 17s.1.02
|
||||
Build Time: 00:06:59 Sep 28, 2017
|
||||
Install Time: 15:58:29 Feb 9, 2018
|
||||
Kernel: 2.6.34.6
|
||||
Host Version: Ubuntu 14.04 LTS
|
||||
Host Kernel: Linux 3.14.17
|
||||
|
||||
Control Processor: QEMU Virtual CPU version 2.0.0
|
||||
|
||||
System Uptime: 34days 4hrs 41mins 53secs
|
||||
|
||||
Slot Name Primary/Secondary Versions Status
|
||||
---------------------------------------------------------------------------
|
||||
SW/0 SLX-OS 17s.1.02 ACTIVE*
|
||||
17s.1.02
|
|
@ -1,137 +0,0 @@
|
|||
#
|
||||
# (c) 2018 Extreme Networks Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from os import path
|
||||
import json
|
||||
|
||||
from mock import MagicMock, call
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.plugins.cliconf import nos
|
||||
|
||||
FIXTURE_DIR = b'%s/fixtures/nos' % (
|
||||
path.dirname(path.abspath(__file__)).encode('utf-8')
|
||||
)
|
||||
|
||||
|
||||
def _connection_side_effect(*args, **kwargs):
|
||||
try:
|
||||
if args:
|
||||
value = args[0]
|
||||
else:
|
||||
value = kwargs.get('command')
|
||||
|
||||
fixture_path = path.abspath(
|
||||
b'%s/%s' % (FIXTURE_DIR, b'_'.join(value.split(b' ')))
|
||||
)
|
||||
with open(fixture_path, 'rb') as file_desc:
|
||||
return file_desc.read()
|
||||
except (OSError, IOError):
|
||||
if args:
|
||||
value = args[0]
|
||||
return value
|
||||
elif kwargs.get('command'):
|
||||
value = kwargs.get('command')
|
||||
return value
|
||||
|
||||
return 'Nope'
|
||||
|
||||
|
||||
class TestPluginCLIConfNOS(unittest.TestCase):
|
||||
""" Test class for NOS CLI Conf Methods
|
||||
"""
|
||||
def setUp(self):
|
||||
self._mock_connection = MagicMock()
|
||||
self._mock_connection.send.side_effect = _connection_side_effect
|
||||
self._cliconf = nos.Cliconf(self._mock_connection)
|
||||
self.maxDiff = None
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_get_device_info(self):
|
||||
""" Test get_device_info
|
||||
"""
|
||||
device_info = self._cliconf.get_device_info()
|
||||
|
||||
mock_device_info = {
|
||||
'network_os': 'nos',
|
||||
'network_os_model': 'BR-VDX6740',
|
||||
'network_os_version': '7.2.0',
|
||||
}
|
||||
|
||||
self.assertEqual(device_info, mock_device_info)
|
||||
|
||||
def test_get_config(self):
|
||||
""" Test get_config
|
||||
"""
|
||||
running_config = self._cliconf.get_config()
|
||||
|
||||
fixture_path = path.abspath(b'%s/show_running-config' % FIXTURE_DIR)
|
||||
with open(fixture_path, 'rb') as file_desc:
|
||||
mock_running_config = file_desc.read()
|
||||
self.assertEqual(running_config, mock_running_config)
|
||||
|
||||
def test_edit_config(self):
|
||||
""" Test edit_config
|
||||
"""
|
||||
test_config_command = b'this\nis\nthe\nsong\nthat\nnever\nends'
|
||||
|
||||
self._cliconf.edit_config(test_config_command)
|
||||
|
||||
send_calls = []
|
||||
|
||||
for command in [b'configure terminal', test_config_command, b'end']:
|
||||
send_calls.append(call(
|
||||
command=command,
|
||||
prompt_retry_check=False,
|
||||
sendonly=False,
|
||||
newline=True,
|
||||
check_all=False
|
||||
))
|
||||
|
||||
self._mock_connection.send.assert_has_calls(send_calls)
|
||||
|
||||
def test_get_capabilities(self):
|
||||
""" Test get_capabilities
|
||||
"""
|
||||
capabilities = json.loads(self._cliconf.get_capabilities())
|
||||
mock_capabilities = {
|
||||
'network_api': 'cliconf',
|
||||
'rpc': [
|
||||
'get_config',
|
||||
'edit_config',
|
||||
'get_capabilities',
|
||||
'get',
|
||||
'enable_response_logging',
|
||||
'disable_response_logging'
|
||||
],
|
||||
'device_info': {
|
||||
'network_os_model': 'BR-VDX6740',
|
||||
'network_os_version': '7.2.0',
|
||||
'network_os': 'nos'
|
||||
}
|
||||
}
|
||||
|
||||
self.assertEqual(
|
||||
mock_capabilities,
|
||||
capabilities
|
||||
)
|
|
@ -1,144 +0,0 @@
|
|||
#
|
||||
# (c) 2018 Extreme Networks Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from os import path
|
||||
import json
|
||||
|
||||
from mock import MagicMock, call
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.plugins.cliconf import slxos
|
||||
|
||||
FIXTURE_DIR = b'%s/fixtures/slxos' % (
|
||||
path.dirname(path.abspath(__file__)).encode('utf-8')
|
||||
)
|
||||
|
||||
|
||||
def _connection_side_effect(*args, **kwargs):
|
||||
try:
|
||||
if args:
|
||||
value = args[0]
|
||||
else:
|
||||
value = kwargs.get('command')
|
||||
|
||||
fixture_path = path.abspath(
|
||||
b'%s/%s' % (FIXTURE_DIR, b'_'.join(value.split(b' ')))
|
||||
)
|
||||
with open(fixture_path, 'rb') as file_desc:
|
||||
return file_desc.read()
|
||||
except (OSError, IOError):
|
||||
if args:
|
||||
value = args[0]
|
||||
return value
|
||||
elif kwargs.get('command'):
|
||||
value = kwargs.get('command')
|
||||
return value
|
||||
|
||||
return 'Nope'
|
||||
|
||||
|
||||
class TestPluginCLIConfSLXOS(unittest.TestCase):
|
||||
""" Test class for SLX-OS CLI Conf Methods
|
||||
"""
|
||||
def setUp(self):
|
||||
self._mock_connection = MagicMock()
|
||||
self._mock_connection.send.side_effect = _connection_side_effect
|
||||
self._cliconf = slxos.Cliconf(self._mock_connection)
|
||||
self.maxDiff = None
|
||||
|
||||
def tearDown(self):
|
||||
pass
|
||||
|
||||
def test_get_device_info(self):
|
||||
""" Test get_device_info
|
||||
"""
|
||||
device_info = self._cliconf.get_device_info()
|
||||
|
||||
mock_device_info = {
|
||||
'network_os': 'slxos',
|
||||
'network_os_model': 'BR-SLX9140',
|
||||
'network_os_version': '17s.1.02',
|
||||
}
|
||||
|
||||
self.assertEqual(device_info, mock_device_info)
|
||||
|
||||
def test_get_config(self):
|
||||
""" Test get_config
|
||||
"""
|
||||
running_config = self._cliconf.get_config()
|
||||
|
||||
fixture_path = path.abspath(b'%s/show_running-config' % FIXTURE_DIR)
|
||||
with open(fixture_path, 'rb') as file_desc:
|
||||
mock_running_config = file_desc.read()
|
||||
self.assertEqual(running_config, mock_running_config)
|
||||
|
||||
startup_config = self._cliconf.get_config()
|
||||
|
||||
fixture_path = path.abspath(b'%s/show_running-config' % FIXTURE_DIR)
|
||||
with open(fixture_path, 'rb') as file_desc:
|
||||
mock_startup_config = file_desc.read()
|
||||
self.assertEqual(startup_config, mock_startup_config)
|
||||
|
||||
def test_edit_config(self):
|
||||
""" Test edit_config
|
||||
"""
|
||||
test_config_command = b'this\nis\nthe\nsong\nthat\nnever\nends'
|
||||
|
||||
self._cliconf.edit_config(test_config_command)
|
||||
|
||||
send_calls = []
|
||||
|
||||
for command in [b'configure terminal', test_config_command, b'end']:
|
||||
send_calls.append(call(
|
||||
command=command,
|
||||
prompt_retry_check=False,
|
||||
sendonly=False,
|
||||
newline=True,
|
||||
check_all=False
|
||||
))
|
||||
|
||||
self._mock_connection.send.assert_has_calls(send_calls)
|
||||
|
||||
def test_get_capabilities(self):
|
||||
""" Test get_capabilities
|
||||
"""
|
||||
capabilities = json.loads(self._cliconf.get_capabilities())
|
||||
mock_capabilities = {
|
||||
'network_api': 'cliconf',
|
||||
'rpc': [
|
||||
'get_config',
|
||||
'edit_config',
|
||||
'get_capabilities',
|
||||
'get',
|
||||
'enable_response_logging',
|
||||
'disable_response_logging'
|
||||
],
|
||||
'device_info': {
|
||||
'network_os_model': 'BR-SLX9140',
|
||||
'network_os_version': '17s.1.02',
|
||||
'network_os': 'slxos'
|
||||
}
|
||||
}
|
||||
|
||||
self.assertEqual(
|
||||
mock_capabilities,
|
||||
capabilities
|
||||
)
|
|
@ -1,413 +0,0 @@
|
|||
# Copyright (c) 2018 Cisco and/or its affiliates.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
import json
|
||||
|
||||
from ansible.module_utils.six.moves.urllib.error import HTTPError
|
||||
from ansible_collections.community.general.tests.unit.compat import mock
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import mock_open, patch
|
||||
|
||||
from ansible.errors import AnsibleConnectionFailure
|
||||
from ansible.module_utils.connection import ConnectionError
|
||||
from ansible_collections.community.general.plugins.module_utils.network.ftd.common import HTTPMethod, ResponseParams
|
||||
from ansible_collections.community.general.plugins.module_utils.network.ftd.fdm_swagger_client import SpecProp, FdmSwaggerParser
|
||||
from ansible.module_utils.six import BytesIO, PY3, StringIO
|
||||
from ansible_collections.community.general.plugins.httpapi.ftd import HttpApi, BASE_HEADERS, TOKEN_PATH_TEMPLATE, DEFAULT_API_VERSIONS
|
||||
|
||||
|
||||
if PY3:
|
||||
BUILTINS_NAME = 'builtins'
|
||||
else:
|
||||
BUILTINS_NAME = '__builtin__'
|
||||
|
||||
|
||||
class FakeFtdHttpApiPlugin(HttpApi):
|
||||
def __init__(self, conn):
|
||||
super(FakeFtdHttpApiPlugin, self).__init__(conn)
|
||||
self.hostvars = {
|
||||
'token_path': '/testLoginUrl',
|
||||
'spec_path': '/testSpecUrl'
|
||||
}
|
||||
|
||||
def get_option(self, var):
|
||||
return self.hostvars[var]
|
||||
|
||||
def set_option(self, var, val):
|
||||
self.hostvars[var] = val
|
||||
|
||||
|
||||
class TestFtdHttpApi(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.connection_mock = mock.Mock()
|
||||
self.ftd_plugin = FakeFtdHttpApiPlugin(self.connection_mock)
|
||||
self.ftd_plugin.access_token = 'ACCESS_TOKEN'
|
||||
self.ftd_plugin._load_name = 'httpapi'
|
||||
|
||||
def test_login_should_request_tokens_when_no_refresh_token(self):
|
||||
self.connection_mock.send.return_value = self._connection_response(
|
||||
{'access_token': 'ACCESS_TOKEN', 'refresh_token': 'REFRESH_TOKEN'}
|
||||
)
|
||||
|
||||
self.ftd_plugin.login('foo', 'bar')
|
||||
|
||||
assert 'ACCESS_TOKEN' == self.ftd_plugin.access_token
|
||||
assert 'REFRESH_TOKEN' == self.ftd_plugin.refresh_token
|
||||
assert {'Authorization': 'Bearer ACCESS_TOKEN'} == self.ftd_plugin.connection._auth
|
||||
expected_body = json.dumps({'grant_type': 'password', 'username': 'foo', 'password': 'bar'})
|
||||
self.connection_mock.send.assert_called_once_with(mock.ANY, expected_body, headers=mock.ANY, method=mock.ANY)
|
||||
|
||||
def test_login_should_update_tokens_when_refresh_token_exists(self):
|
||||
self.ftd_plugin.refresh_token = 'REFRESH_TOKEN'
|
||||
self.connection_mock.send.return_value = self._connection_response(
|
||||
{'access_token': 'NEW_ACCESS_TOKEN', 'refresh_token': 'NEW_REFRESH_TOKEN'}
|
||||
)
|
||||
|
||||
self.ftd_plugin.login('foo', 'bar')
|
||||
|
||||
assert 'NEW_ACCESS_TOKEN' == self.ftd_plugin.access_token
|
||||
assert 'NEW_REFRESH_TOKEN' == self.ftd_plugin.refresh_token
|
||||
assert {'Authorization': 'Bearer NEW_ACCESS_TOKEN'} == self.ftd_plugin.connection._auth
|
||||
expected_body = json.dumps({'grant_type': 'refresh_token', 'refresh_token': 'REFRESH_TOKEN'})
|
||||
self.connection_mock.send.assert_called_once_with(mock.ANY, expected_body, headers=mock.ANY, method=mock.ANY)
|
||||
|
||||
def test_login_should_use_env_variable_when_set(self):
|
||||
temp_token_path = self.ftd_plugin.hostvars['token_path']
|
||||
self.ftd_plugin.hostvars['token_path'] = '/testFakeLoginUrl'
|
||||
self.connection_mock.send.return_value = self._connection_response(
|
||||
{'access_token': 'ACCESS_TOKEN', 'refresh_token': 'REFRESH_TOKEN'}
|
||||
)
|
||||
|
||||
self.ftd_plugin.login('foo', 'bar')
|
||||
|
||||
self.connection_mock.send.assert_called_once_with('/testFakeLoginUrl', mock.ANY, headers=mock.ANY,
|
||||
method=mock.ANY)
|
||||
self.ftd_plugin.hostvars['token_path'] = temp_token_path
|
||||
|
||||
def test_login_raises_exception_when_no_refresh_token_and_no_credentials(self):
|
||||
with self.assertRaises(AnsibleConnectionFailure) as res:
|
||||
self.ftd_plugin.login(None, None)
|
||||
assert 'Username and password are required' in str(res.exception)
|
||||
|
||||
def test_login_raises_exception_when_invalid_response(self):
|
||||
self.connection_mock.send.return_value = self._connection_response(
|
||||
{'no_access_token': 'ACCESS_TOKEN'}
|
||||
)
|
||||
|
||||
with self.assertRaises(ConnectionError) as res:
|
||||
self.ftd_plugin.login('foo', 'bar')
|
||||
|
||||
assert 'Server returned response without token info during connection authentication' in str(res.exception)
|
||||
|
||||
def test_login_raises_exception_when_http_error(self):
|
||||
self.connection_mock.send.side_effect = HTTPError('http://testhost.com', 400, '', {},
|
||||
StringIO('{"message": "Failed to authenticate user"}'))
|
||||
|
||||
with self.assertRaises(ConnectionError) as res:
|
||||
self.ftd_plugin.login('foo', 'bar')
|
||||
|
||||
assert 'Failed to authenticate user' in str(res.exception)
|
||||
|
||||
def test_logout_should_revoke_tokens(self):
|
||||
self.ftd_plugin.access_token = 'ACCESS_TOKEN_TO_REVOKE'
|
||||
self.ftd_plugin.refresh_token = 'REFRESH_TOKEN_TO_REVOKE'
|
||||
self.connection_mock.send.return_value = self._connection_response(None)
|
||||
|
||||
self.ftd_plugin.logout()
|
||||
|
||||
assert self.ftd_plugin.access_token is None
|
||||
assert self.ftd_plugin.refresh_token is None
|
||||
expected_body = json.dumps({'grant_type': 'revoke_token', 'access_token': 'ACCESS_TOKEN_TO_REVOKE',
|
||||
'token_to_revoke': 'REFRESH_TOKEN_TO_REVOKE'})
|
||||
self.connection_mock.send.assert_called_once_with(mock.ANY, expected_body, headers=mock.ANY, method=mock.ANY)
|
||||
|
||||
def test_send_request_should_send_correct_request(self):
|
||||
exp_resp = {'id': '123', 'name': 'foo'}
|
||||
self.connection_mock.send.return_value = self._connection_response(exp_resp)
|
||||
|
||||
resp = self.ftd_plugin.send_request('/test/{objId}', HTTPMethod.PUT,
|
||||
body_params={'name': 'foo'},
|
||||
path_params={'objId': '123'},
|
||||
query_params={'at': 0})
|
||||
|
||||
assert {ResponseParams.SUCCESS: True, ResponseParams.STATUS_CODE: 200,
|
||||
ResponseParams.RESPONSE: exp_resp} == resp
|
||||
self.connection_mock.send.assert_called_once_with('/test/123?at=0', '{"name": "foo"}', method=HTTPMethod.PUT,
|
||||
headers=BASE_HEADERS)
|
||||
|
||||
def test_send_request_should_return_empty_dict_when_no_response_data(self):
|
||||
self.connection_mock.send.return_value = self._connection_response(None)
|
||||
|
||||
resp = self.ftd_plugin.send_request('/test', HTTPMethod.GET)
|
||||
|
||||
assert {ResponseParams.SUCCESS: True, ResponseParams.STATUS_CODE: 200, ResponseParams.RESPONSE: {}} == resp
|
||||
self.connection_mock.send.assert_called_once_with('/test', None, method=HTTPMethod.GET,
|
||||
headers=BASE_HEADERS)
|
||||
|
||||
def test_send_request_should_return_error_info_when_http_error_raises(self):
|
||||
self.connection_mock.send.side_effect = HTTPError('http://testhost.com', 500, '', {},
|
||||
StringIO('{"errorMessage": "ERROR"}'))
|
||||
|
||||
resp = self.ftd_plugin.send_request('/test', HTTPMethod.GET)
|
||||
|
||||
assert {ResponseParams.SUCCESS: False, ResponseParams.STATUS_CODE: 500,
|
||||
ResponseParams.RESPONSE: {'errorMessage': 'ERROR'}} == resp
|
||||
|
||||
def test_send_request_raises_exception_when_invalid_response(self):
|
||||
self.connection_mock.send.return_value = self._connection_response('nonValidJson')
|
||||
|
||||
with self.assertRaises(ConnectionError) as res:
|
||||
self.ftd_plugin.send_request('/test', HTTPMethod.GET)
|
||||
|
||||
assert 'Invalid JSON response' in str(res.exception)
|
||||
|
||||
def test_handle_httperror_should_update_tokens_and_retry_on_auth_errors(self):
|
||||
self.ftd_plugin.refresh_token = 'REFRESH_TOKEN'
|
||||
self.connection_mock.send.return_value = self._connection_response(
|
||||
{'access_token': 'NEW_ACCESS_TOKEN', 'refresh_token': 'NEW_REFRESH_TOKEN'}
|
||||
)
|
||||
|
||||
retry = self.ftd_plugin.handle_httperror(HTTPError('http://testhost.com', 401, '', {}, None))
|
||||
|
||||
assert retry
|
||||
assert 'NEW_ACCESS_TOKEN' == self.ftd_plugin.access_token
|
||||
assert 'NEW_REFRESH_TOKEN' == self.ftd_plugin.refresh_token
|
||||
|
||||
def test_handle_httperror_should_not_retry_on_non_auth_errors(self):
|
||||
assert not self.ftd_plugin.handle_httperror(HTTPError('http://testhost.com', 500, '', {}, None))
|
||||
|
||||
def test_handle_httperror_should_not_retry_when_ignoring_http_errors(self):
|
||||
self.ftd_plugin._ignore_http_errors = True
|
||||
assert not self.ftd_plugin.handle_httperror(HTTPError('http://testhost.com', 401, '', {}, None))
|
||||
|
||||
@patch('os.path.isdir', mock.Mock(return_value=False))
|
||||
def test_download_file(self):
|
||||
self.connection_mock.send.return_value = self._connection_response('File content')
|
||||
|
||||
open_mock = mock_open()
|
||||
with patch('%s.open' % BUILTINS_NAME, open_mock):
|
||||
self.ftd_plugin.download_file('/files/1', '/tmp/test.txt')
|
||||
|
||||
open_mock.assert_called_once_with('/tmp/test.txt', 'wb')
|
||||
open_mock().write.assert_called_once_with(b'File content')
|
||||
|
||||
@patch('os.path.isdir', mock.Mock(return_value=True))
|
||||
def test_download_file_should_extract_filename_from_headers(self):
|
||||
filename = 'test_file.txt'
|
||||
response = mock.Mock()
|
||||
response.info.return_value = {'Content-Disposition': 'attachment; filename="%s"' % filename}
|
||||
dummy, response_data = self._connection_response('File content')
|
||||
self.connection_mock.send.return_value = response, response_data
|
||||
|
||||
open_mock = mock_open()
|
||||
with patch('%s.open' % BUILTINS_NAME, open_mock):
|
||||
self.ftd_plugin.download_file('/files/1', '/tmp/')
|
||||
|
||||
open_mock.assert_called_once_with('/tmp/%s' % filename, 'wb')
|
||||
open_mock().write.assert_called_once_with(b'File content')
|
||||
|
||||
@patch('os.path.basename', mock.Mock(return_value='test.txt'))
|
||||
@patch('ansible_collections.community.general.plugins.httpapi.ftd.encode_multipart_formdata',
|
||||
mock.Mock(return_value=('--Encoded data--', 'multipart/form-data')))
|
||||
def test_upload_file(self):
|
||||
self.connection_mock.send.return_value = self._connection_response({'id': '123'})
|
||||
|
||||
open_mock = mock_open()
|
||||
with patch('%s.open' % BUILTINS_NAME, open_mock):
|
||||
resp = self.ftd_plugin.upload_file('/tmp/test.txt', '/files')
|
||||
|
||||
assert {'id': '123'} == resp
|
||||
exp_headers = dict(BASE_HEADERS)
|
||||
exp_headers['Content-Length'] = len('--Encoded data--')
|
||||
exp_headers['Content-Type'] = 'multipart/form-data'
|
||||
self.connection_mock.send.assert_called_once_with('/files', data='--Encoded data--',
|
||||
headers=exp_headers, method=HTTPMethod.POST)
|
||||
open_mock.assert_called_once_with('/tmp/test.txt', 'rb')
|
||||
|
||||
@patch('os.path.basename', mock.Mock(return_value='test.txt'))
|
||||
@patch('ansible_collections.community.general.plugins.httpapi.ftd.encode_multipart_formdata',
|
||||
mock.Mock(return_value=('--Encoded data--', 'multipart/form-data')))
|
||||
def test_upload_file_raises_exception_when_invalid_response(self):
|
||||
self.connection_mock.send.return_value = self._connection_response('invalidJsonResponse')
|
||||
|
||||
open_mock = mock_open()
|
||||
with patch('%s.open' % BUILTINS_NAME, open_mock):
|
||||
with self.assertRaises(ConnectionError) as res:
|
||||
self.ftd_plugin.upload_file('/tmp/test.txt', '/files')
|
||||
|
||||
assert 'Invalid JSON response' in str(res.exception)
|
||||
|
||||
@patch.object(FdmSwaggerParser, 'parse_spec')
|
||||
def test_get_operation_spec(self, parse_spec_mock):
|
||||
self.connection_mock.send.return_value = self._connection_response(None)
|
||||
parse_spec_mock.return_value = {
|
||||
SpecProp.OPERATIONS: {'testOp': 'Specification for testOp'}
|
||||
}
|
||||
|
||||
assert 'Specification for testOp' == self.ftd_plugin.get_operation_spec('testOp')
|
||||
assert self.ftd_plugin.get_operation_spec('nonExistingTestOp') is None
|
||||
|
||||
@patch.object(FdmSwaggerParser, 'parse_spec')
|
||||
def test_get_model_spec(self, parse_spec_mock):
|
||||
self.connection_mock.send.return_value = self._connection_response(None)
|
||||
parse_spec_mock.return_value = {
|
||||
SpecProp.MODELS: {'TestModel': 'Specification for TestModel'}
|
||||
}
|
||||
|
||||
assert 'Specification for TestModel' == self.ftd_plugin.get_model_spec('TestModel')
|
||||
assert self.ftd_plugin.get_model_spec('NonExistingTestModel') is None
|
||||
|
||||
@patch.object(FdmSwaggerParser, 'parse_spec')
|
||||
def test_get_operation_spec_by_model_name(self, parse_spec_mock):
|
||||
self.connection_mock.send.return_value = self._connection_response(None)
|
||||
operation1 = {'modelName': 'TestModel'}
|
||||
op_model_name_is_none = {'modelName': None}
|
||||
op_without_model_name = {'url': 'testUrl'}
|
||||
|
||||
parse_spec_mock.return_value = {
|
||||
SpecProp.MODEL_OPERATIONS: {
|
||||
'TestModel': {
|
||||
'testOp1': operation1,
|
||||
'testOp2': 'spec2'
|
||||
},
|
||||
'TestModel2': {
|
||||
'testOp10': 'spec10',
|
||||
'testOp20': 'spec20'
|
||||
}
|
||||
},
|
||||
SpecProp.OPERATIONS: {
|
||||
'testOp1': operation1,
|
||||
'testOp10': {
|
||||
'modelName': 'TestModel2'
|
||||
},
|
||||
'testOpWithoutModelName': op_without_model_name,
|
||||
'testOpModelNameIsNone': op_model_name_is_none
|
||||
}
|
||||
}
|
||||
|
||||
assert {'testOp1': operation1, 'testOp2': 'spec2'} == self.ftd_plugin.get_operation_specs_by_model_name(
|
||||
'TestModel')
|
||||
assert None is self.ftd_plugin.get_operation_specs_by_model_name(
|
||||
'testOpModelNameIsNone')
|
||||
|
||||
assert None is self.ftd_plugin.get_operation_specs_by_model_name(
|
||||
'testOpWithoutModelName')
|
||||
|
||||
assert self.ftd_plugin.get_operation_specs_by_model_name('nonExistingOperation') is None
|
||||
|
||||
@staticmethod
|
||||
def _connection_response(response, status=200):
|
||||
response_mock = mock.Mock()
|
||||
response_mock.getcode.return_value = status
|
||||
response_text = json.dumps(response) if type(response) is dict else response
|
||||
response_data = BytesIO(response_text.encode() if response_text else ''.encode())
|
||||
return response_mock, response_data
|
||||
|
||||
def test_get_list_of_supported_api_versions_with_failed_http_request(self):
|
||||
error_msg = "Invalid Credentials"
|
||||
fp = mock.MagicMock()
|
||||
fp.read.return_value = '{{"error-msg": "{0}"}}'.format(error_msg)
|
||||
send_mock = mock.MagicMock(side_effect=HTTPError('url', 400, 'msg', 'hdrs', fp))
|
||||
with mock.patch.object(self.ftd_plugin.connection, 'send', send_mock):
|
||||
with self.assertRaises(ConnectionError) as res:
|
||||
self.ftd_plugin._get_supported_api_versions()
|
||||
|
||||
assert error_msg in str(res.exception)
|
||||
|
||||
def test_get_list_of_supported_api_versions_with_buggy_response(self):
|
||||
error_msg = "Non JSON value"
|
||||
http_response_mock = mock.MagicMock()
|
||||
http_response_mock.getvalue.return_value = error_msg
|
||||
|
||||
send_mock = mock.MagicMock(return_value=(None, http_response_mock))
|
||||
|
||||
with mock.patch.object(self.ftd_plugin.connection, 'send', send_mock):
|
||||
with self.assertRaises(ConnectionError) as res:
|
||||
self.ftd_plugin._get_supported_api_versions()
|
||||
assert error_msg in str(res.exception)
|
||||
|
||||
def test_get_list_of_supported_api_versions_with_positive_response(self):
|
||||
http_response_mock = mock.MagicMock()
|
||||
http_response_mock.getvalue.return_value = '{"supportedVersions": ["v1"]}'
|
||||
|
||||
send_mock = mock.MagicMock(return_value=(None, http_response_mock))
|
||||
with mock.patch.object(self.ftd_plugin.connection, 'send', send_mock):
|
||||
supported_versions = self.ftd_plugin._get_supported_api_versions()
|
||||
assert supported_versions == ['v1']
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.httpapi.ftd.HttpApi._get_api_token_path', mock.MagicMock(return_value=None))
|
||||
@patch('ansible_collections.community.general.plugins.httpapi.ftd.HttpApi._get_known_token_paths')
|
||||
def test_lookup_login_url_with_empty_response(self, get_known_token_paths_mock):
|
||||
payload = mock.MagicMock()
|
||||
get_known_token_paths_mock.return_value = []
|
||||
self.assertRaises(
|
||||
ConnectionError,
|
||||
self.ftd_plugin._lookup_login_url,
|
||||
payload
|
||||
)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.httpapi.ftd.HttpApi._get_known_token_paths')
|
||||
@patch('ansible_collections.community.general.plugins.httpapi.ftd.HttpApi._send_login_request')
|
||||
def test_lookup_login_url_with_failed_request(self, api_request_mock, get_known_token_paths_mock):
|
||||
payload = mock.MagicMock()
|
||||
url = mock.MagicMock()
|
||||
get_known_token_paths_mock.return_value = [url]
|
||||
api_request_mock.side_effect = ConnectionError('Error message')
|
||||
with mock.patch.object(self.ftd_plugin.connection, 'queue_message') as display_mock:
|
||||
self.assertRaises(
|
||||
ConnectionError,
|
||||
self.ftd_plugin._lookup_login_url,
|
||||
payload
|
||||
)
|
||||
assert display_mock.called
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.httpapi.ftd.HttpApi._get_api_token_path', mock.MagicMock(return_value=None))
|
||||
@patch('ansible_collections.community.general.plugins.httpapi.ftd.HttpApi._get_known_token_paths')
|
||||
@patch('ansible_collections.community.general.plugins.httpapi.ftd.HttpApi._send_login_request')
|
||||
@patch('ansible_collections.community.general.plugins.httpapi.ftd.HttpApi._set_api_token_path')
|
||||
def test_lookup_login_url_with_positive_result(self, set_api_token_mock, api_request_mock,
|
||||
get_known_token_paths_mock):
|
||||
payload = mock.MagicMock()
|
||||
url = mock.MagicMock()
|
||||
get_known_token_paths_mock.return_value = [url]
|
||||
response_mock = mock.MagicMock()
|
||||
api_request_mock.return_value = response_mock
|
||||
|
||||
resp = self.ftd_plugin._lookup_login_url(payload)
|
||||
|
||||
set_api_token_mock.assert_called_once_with(url)
|
||||
assert resp == response_mock
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.httpapi.ftd.HttpApi._get_supported_api_versions')
|
||||
def test_get_known_token_paths_with_positive_response(self, get_list_of_supported_api_versions_mock):
|
||||
test_versions = ['v1', 'v2']
|
||||
get_list_of_supported_api_versions_mock.return_value = test_versions
|
||||
result = self.ftd_plugin._get_known_token_paths()
|
||||
assert result == [TOKEN_PATH_TEMPLATE.format(version) for version in test_versions]
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.httpapi.ftd.HttpApi._get_supported_api_versions')
|
||||
def test_get_known_token_paths_with_failed_api_call(self, get_list_of_supported_api_versions_mock):
|
||||
get_list_of_supported_api_versions_mock.side_effect = ConnectionError('test error message')
|
||||
result = self.ftd_plugin._get_known_token_paths()
|
||||
assert result == [TOKEN_PATH_TEMPLATE.format(version) for version in DEFAULT_API_VERSIONS]
|
||||
|
||||
def test_set_api_token_path(self):
|
||||
url = mock.MagicMock()
|
||||
self.ftd_plugin._set_api_token_path(url)
|
||||
assert self.ftd_plugin._get_api_token_path() == url
|
|
@ -1,104 +0,0 @@
|
|||
{
|
||||
"mock_single_obj": {
|
||||
"_last_modified": "",
|
||||
"cloud_ref": "https://192.0.2.132/api/cloud/cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"dhcp_enabled": true,
|
||||
"exclude_discovered_subnets": false,
|
||||
"name": "PG-123",
|
||||
"synced_from_se": true,
|
||||
"tenant_ref": "https://192.0.2.132/api/tenant/admin",
|
||||
"url": "https://192.0.2.132/api/network/dvportgroup-2084-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"uuid": "dvportgroup-2084-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vcenter_dvs": true,
|
||||
"vimgrnw_ref": "https://192.0.2.132/api/vimgrnwruntime/dvportgroup-2084-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vrf_context_ref": "https://192.0.2.132/api/vrfcontext/vrfcontext-31f1b55f-319c-44eb-862f-69d79ffdf295"
|
||||
},
|
||||
"mock_multiple_obj": {
|
||||
"results": [
|
||||
{
|
||||
"_last_modified": "",
|
||||
"cloud_ref": "https://192.0.2.132/api/cloud/cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"dhcp_enabled": true,
|
||||
"exclude_discovered_subnets": false,
|
||||
"name": "J-PG-0682",
|
||||
"synced_from_se": true,
|
||||
"tenant_ref": "https://192.0.2.132/api/tenant/admin",
|
||||
"url": "https://192.0.2.132/api/network/dvportgroup-2084-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"uuid": "dvportgroup-2084-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vcenter_dvs": true,
|
||||
"vimgrnw_ref": "https://192.0.2.132/api/vimgrnwruntime/dvportgroup-2084-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vrf_context_ref": "https://192.0.2.132/api/vrfcontext/vrfcontext-31f1b55f-319c-44eb-862f-69d79ffdf295"
|
||||
},
|
||||
{
|
||||
"_last_modified": "",
|
||||
"cloud_ref": "https://192.0.2.132/api/cloud/cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"dhcp_enabled": true,
|
||||
"exclude_discovered_subnets": false,
|
||||
"name": "J-PG-0231",
|
||||
"synced_from_se": true,
|
||||
"tenant_ref": "https://192.0.2.132/api/tenant/admin",
|
||||
"url": "https://192.0.2.132/api/network/dvportgroup-1627-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"uuid": "dvportgroup-1627-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vcenter_dvs": true,
|
||||
"vimgrnw_ref": "https://192.0.2.132/api/vimgrnwruntime/dvportgroup-1627-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vrf_context_ref": "https://192.0.2.132/api/vrfcontext/vrfcontext-31f1b55f-319c-44eb-862f-69d79ffdf295"
|
||||
},
|
||||
{
|
||||
"_last_modified": "",
|
||||
"cloud_ref": "https://192.0.2.132/api/cloud/cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"dhcp_enabled": true,
|
||||
"exclude_discovered_subnets": false,
|
||||
"name": "J-PG-0535",
|
||||
"synced_from_se": true,
|
||||
"tenant_ref": "https://192.0.2.132/api/tenant/admin",
|
||||
"url": "https://192.0.2.132/api/network/dvportgroup-1934-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"uuid": "dvportgroup-1934-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vcenter_dvs": true,
|
||||
"vimgrnw_ref": "https://192.0.2.132/api/vimgrnwruntime/dvportgroup-1934-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vrf_context_ref": "https://192.0.2.132/api/vrfcontext/vrfcontext-31f1b55f-319c-44eb-862f-69d79ffdf295"
|
||||
},
|
||||
{
|
||||
"_last_modified": "",
|
||||
"cloud_ref": "https://192.0.2.132/api/cloud/cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"dhcp_enabled": true,
|
||||
"exclude_discovered_subnets": false,
|
||||
"name": "J-PG-0094",
|
||||
"synced_from_se": true,
|
||||
"tenant_ref": "https://192.0.2.132/api/tenant/admin",
|
||||
"url": "https://192.0.2.132/api/network/dvportgroup-1458-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"uuid": "dvportgroup-1458-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vcenter_dvs": true,
|
||||
"vimgrnw_ref": "https://192.0.2.132/api/vimgrnwruntime/dvportgroup-1458-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vrf_context_ref": "https://192.0.2.132/api/vrfcontext/vrfcontext-31f1b55f-319c-44eb-862f-69d79ffdf295"
|
||||
},
|
||||
{
|
||||
"_last_modified": "",
|
||||
"cloud_ref": "https://192.0.2.132/api/cloud/cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"dhcp_enabled": true,
|
||||
"exclude_discovered_subnets": false,
|
||||
"name": "J-PG-0437",
|
||||
"synced_from_se": true,
|
||||
"tenant_ref": "https://192.0.2.132/api/tenant/admin",
|
||||
"url": "https://192.0.2.132/api/network/dvportgroup-1836-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"uuid": "dvportgroup-1836-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vcenter_dvs": true,
|
||||
"vimgrnw_ref": "https://192.0.2.132/api/vimgrnwruntime/dvportgroup-1836-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vrf_context_ref": "https://192.0.2.132/api/vrfcontext/vrfcontext-31f1b55f-319c-44eb-862f-69d79ffdf295"
|
||||
},
|
||||
{
|
||||
"_last_modified": "",
|
||||
"cloud_ref": "https://192.0.2.132/api/cloud/cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"dhcp_enabled": true,
|
||||
"exclude_discovered_subnets": false,
|
||||
"name": "J-PG-0673",
|
||||
"synced_from_se": true,
|
||||
"tenant_ref": "https://192.0.2.132/api/tenant/admin",
|
||||
"url": "https://192.0.2.132/api/network/dvportgroup-2075-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"uuid": "dvportgroup-2075-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vcenter_dvs": true,
|
||||
"vimgrnw_ref": "https://192.0.2.132/api/vimgrnwruntime/dvportgroup-2075-cloud-4d063be1-99c2-44cf-8b28-977bd970524c",
|
||||
"vrf_context_ref": "https://192.0.2.132/api/vrfcontext/vrfcontext-31f1b55f-319c-44eb-862f-69d79ffdf295"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
|
@ -1,92 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
# (c) 2019, Sandeep Bandi <sandeepb@avinetworks.com>
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import pytest
|
||||
import json
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch, MagicMock
|
||||
|
||||
from ansible.errors import AnsibleError
|
||||
from ansible.plugins.loader import lookup_loader
|
||||
from ansible_collections.community.general.plugins.lookup import avi
|
||||
|
||||
|
||||
try:
|
||||
import builtins as __builtin__
|
||||
except ImportError:
|
||||
import __builtin__
|
||||
|
||||
|
||||
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
|
||||
|
||||
with open(fixture_path + '/avi.json') as json_file:
|
||||
data = json.load(json_file)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def dummy_credentials():
|
||||
dummy_credentials = {}
|
||||
dummy_credentials['controller'] = "192.0.2.13"
|
||||
dummy_credentials['username'] = "admin"
|
||||
dummy_credentials['password'] = "password"
|
||||
dummy_credentials['api_version'] = "17.2.14"
|
||||
dummy_credentials['tenant'] = 'admin'
|
||||
return dummy_credentials
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def super_switcher(scope="function", autouse=True):
|
||||
# Mocking the inbuilt super as it is used in ApiSession initialization
|
||||
original_super = __builtin__.super
|
||||
__builtin__.super = MagicMock()
|
||||
yield
|
||||
# Revert the super to default state
|
||||
__builtin__.super = original_super
|
||||
|
||||
|
||||
def test_lookup_multiple_obj(dummy_credentials):
|
||||
avi_lookup = lookup_loader.get('community.general.avi')
|
||||
avi_mock = MagicMock()
|
||||
avi_mock.return_value.get.return_value.json.return_value = data["mock_multiple_obj"]
|
||||
with patch.object(avi, 'ApiSession', avi_mock):
|
||||
retval = avi_lookup.run([], {}, avi_credentials=dummy_credentials,
|
||||
obj_type="network")
|
||||
assert retval == data["mock_multiple_obj"]["results"]
|
||||
|
||||
|
||||
def test_lookup_single_obj(dummy_credentials):
|
||||
avi_lookup = lookup_loader.get('community.general.avi')
|
||||
avi_mock = MagicMock()
|
||||
avi_mock.return_value.get_object_by_name.return_value = data["mock_single_obj"]
|
||||
with patch.object(avi, 'ApiSession', avi_mock):
|
||||
retval = avi_lookup.run([], {}, avi_credentials=dummy_credentials,
|
||||
obj_type="network", obj_name='PG-123')
|
||||
assert retval[0] == data["mock_single_obj"]
|
||||
|
||||
|
||||
def test_invalid_lookup(dummy_credentials):
|
||||
avi_lookup = lookup_loader.get('community.general.avi')
|
||||
avi_mock = MagicMock()
|
||||
with pytest.raises(AnsibleError):
|
||||
with patch.object(avi, 'ApiSession', avi_mock):
|
||||
avi_lookup.run([], {}, avi_credentials=dummy_credentials)
|
|
@ -1,328 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Copyright: (c) 2017, Dag Wieers (@dagwieers) <dag@wieers.com>
|
||||
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import sys
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.cisco.aci.plugins.module_utils.aci import ACIModule
|
||||
from ansible.module_utils.six import PY2
|
||||
from ansible.module_utils._text import to_native
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class AltModule():
|
||||
params = dict(
|
||||
hostname='dummy',
|
||||
port=123,
|
||||
protocol='https',
|
||||
state='present',
|
||||
)
|
||||
|
||||
|
||||
class AltACIModule(ACIModule):
|
||||
def __init__(self):
|
||||
self.result = dict(changed=False)
|
||||
self.module = AltModule
|
||||
self.params = self.module.params
|
||||
|
||||
|
||||
aci = AltACIModule()
|
||||
|
||||
|
||||
try:
|
||||
from lxml import etree
|
||||
if sys.version_info >= (2, 7):
|
||||
from xmljson import cobra
|
||||
except ImportError:
|
||||
pytestmark = pytest.mark.skip("ACI Ansible modules require the lxml and xmljson Python libraries")
|
||||
|
||||
|
||||
class AciRest(unittest.TestCase):
|
||||
|
||||
def test_invalid_aci_login(self):
|
||||
self.maxDiff = None
|
||||
|
||||
error = dict(
|
||||
code='401',
|
||||
text='Username or password is incorrect - FAILED local authentication',
|
||||
)
|
||||
|
||||
imdata = [{
|
||||
'error': {
|
||||
'attributes': {
|
||||
'code': '401',
|
||||
'text': 'Username or password is incorrect - FAILED local authentication',
|
||||
},
|
||||
},
|
||||
}]
|
||||
|
||||
totalCount = 1
|
||||
|
||||
json_response = '{"totalCount":"1","imdata":[{"error":{"attributes":{"code":"401","text":"Username or password is incorrect - FAILED local authentication"}}}]}' # NOQA
|
||||
json_result = dict()
|
||||
aci.response_json(json_response)
|
||||
self.assertEqual(aci.error, error)
|
||||
self.assertEqual(aci.imdata, imdata)
|
||||
self.assertEqual(aci.totalCount, totalCount)
|
||||
|
||||
# Python 2.7+ is needed for xmljson
|
||||
if sys.version_info < (2, 7):
|
||||
return
|
||||
|
||||
xml_response = '''<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">
|
||||
<error code="401" text="Username or password is incorrect - FAILED local authentication"/>
|
||||
</imdata>
|
||||
'''
|
||||
xml_result = dict()
|
||||
aci.response_xml(xml_response)
|
||||
self.assertEqual(aci.error, error)
|
||||
self.assertEqual(aci.imdata, imdata)
|
||||
self.assertEqual(aci.totalCount, totalCount)
|
||||
|
||||
def test_valid_aci_login(self):
|
||||
self.maxDiff = None
|
||||
|
||||
imdata = [{
|
||||
'aaaLogin': {
|
||||
'attributes': {
|
||||
'token': 'ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem', # NOQA
|
||||
'siteFingerprint': 'NdxD72K/uXaUK0wn',
|
||||
'refreshTimeoutSeconds': '600',
|
||||
'maximumLifetimeSeconds': '86400',
|
||||
'guiIdleTimeoutSeconds': '1200',
|
||||
'restTimeoutSeconds': '90',
|
||||
'creationTime': '1500134817',
|
||||
'firstLoginTime': '1500134817',
|
||||
'userName': 'admin',
|
||||
'remoteUser': 'false',
|
||||
'unixUserId': '15374',
|
||||
'sessionId': 'o7hObsqNTfCmDGcZI5c4ng==',
|
||||
'lastName': '',
|
||||
'firstName': '',
|
||||
'version': '2.0(2f)',
|
||||
'buildTime': 'Sat Aug 20 23:07:07 PDT 2016',
|
||||
'node': 'topology/pod-1/node-1',
|
||||
},
|
||||
'children': [{
|
||||
'aaaUserDomain': {
|
||||
'attributes': {
|
||||
'name': 'all',
|
||||
'rolesR': 'admin',
|
||||
'rolesW': 'admin',
|
||||
},
|
||||
'children': [{
|
||||
'aaaReadRoles': {
|
||||
'attributes': {},
|
||||
},
|
||||
}, {
|
||||
'aaaWriteRoles': {
|
||||
'attributes': {},
|
||||
'children': [{
|
||||
'role': {
|
||||
'attributes': {
|
||||
'name': 'admin',
|
||||
},
|
||||
},
|
||||
}],
|
||||
},
|
||||
}],
|
||||
},
|
||||
}, {
|
||||
'DnDomainMapEntry': {
|
||||
'attributes': {
|
||||
'dn': 'uni/tn-common',
|
||||
'readPrivileges': 'admin',
|
||||
'writePrivileges': 'admin',
|
||||
},
|
||||
},
|
||||
}, {
|
||||
'DnDomainMapEntry': {
|
||||
'attributes': {
|
||||
'dn': 'uni/tn-infra',
|
||||
'readPrivileges': 'admin',
|
||||
'writePrivileges': 'admin',
|
||||
},
|
||||
},
|
||||
}, {
|
||||
'DnDomainMapEntry': {
|
||||
'attributes': {
|
||||
'dn': 'uni/tn-mgmt',
|
||||
'readPrivileges': 'admin',
|
||||
'writePrivileges': 'admin',
|
||||
},
|
||||
},
|
||||
}],
|
||||
},
|
||||
}]
|
||||
|
||||
totalCount = 1
|
||||
|
||||
json_response = '{"totalCount":"1","imdata":[{"aaaLogin":{"attributes":{"token":"ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem","siteFingerprint":"NdxD72K/uXaUK0wn","refreshTimeoutSeconds":"600","maximumLifetimeSeconds":"86400","guiIdleTimeoutSeconds":"1200","restTimeoutSeconds":"90","creationTime":"1500134817","firstLoginTime":"1500134817","userName":"admin","remoteUser":"false","unixUserId":"15374","sessionId":"o7hObsqNTfCmDGcZI5c4ng==","lastName":"","firstName":"","version":"2.0(2f)","buildTime":"Sat Aug 20 23:07:07 PDT 2016","node":"topology/pod-1/node-1"},"children":[{"aaaUserDomain":{"attributes":{"name":"all","rolesR":"admin","rolesW":"admin"},"children":[{"aaaReadRoles":{"attributes":{}}},{"aaaWriteRoles":{"attributes":{},"children":[{"role":{"attributes":{"name":"admin"}}}]}}]}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-common","readPrivileges":"admin","writePrivileges":"admin"}}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-infra","readPrivileges":"admin","writePrivileges":"admin"}}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-mgmt","readPrivileges":"admin","writePrivileges":"admin"}}}]}}]}' # NOQA
|
||||
json_result = dict()
|
||||
aci.response_json(json_response)
|
||||
self.assertEqual(aci.imdata, imdata)
|
||||
self.assertEqual(aci.totalCount, totalCount)
|
||||
|
||||
# Python 2.7+ is needed for xmljson
|
||||
if sys.version_info < (2, 7):
|
||||
return
|
||||
|
||||
xml_response = '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">\n<aaaLogin token="ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem" siteFingerprint="NdxD72K/uXaUK0wn" refreshTimeoutSeconds="600" maximumLifetimeSeconds="86400" guiIdleTimeoutSeconds="1200" restTimeoutSeconds="90" creationTime="1500134817" firstLoginTime="1500134817" userName="admin" remoteUser="false" unixUserId="15374" sessionId="o7hObsqNTfCmDGcZI5c4ng==" lastName="" firstName="" version="2.0(2f)" buildTime="Sat Aug 20 23:07:07 PDT 2016" node="topology/pod-1/node-1">\n<aaaUserDomain name="all" rolesR="admin" rolesW="admin">\n<aaaReadRoles/>\n<aaaWriteRoles>\n<role name="admin"/>\n</aaaWriteRoles>\n</aaaUserDomain>\n<DnDomainMapEntry dn="uni/tn-common" readPrivileges="admin" writePrivileges="admin"/>\n<DnDomainMapEntry dn="uni/tn-infra" readPrivileges="admin" writePrivileges="admin"/>\n<DnDomainMapEntry dn="uni/tn-mgmt" readPrivileges="admin" writePrivileges="admin"/>\n</aaaLogin></imdata>\n''' # NOQA
|
||||
xml_result = dict()
|
||||
aci.response_xml(xml_response)
|
||||
self.assertEqual(aci.imdata, imdata)
|
||||
self.assertEqual(aci.totalCount, totalCount)
|
||||
|
||||
def test_invalid_input(self):
|
||||
self.maxDiff = None
|
||||
|
||||
error = dict(
|
||||
code='401',
|
||||
text='Username or password is incorrect - FAILED local authentication',
|
||||
)
|
||||
|
||||
imdata = [{
|
||||
'error': {
|
||||
'attributes': {
|
||||
'code': '401',
|
||||
'text': 'Username or password is incorrect - FAILED local authentication',
|
||||
},
|
||||
},
|
||||
}]
|
||||
|
||||
totalCount = 1
|
||||
|
||||
json_response = '{"totalCount":"1","imdata":[{"error":{"attributes":{"code":"401","text":"Username or password is incorrect - FAILED local authentication"}}}]}' # NOQA
|
||||
json_result = dict()
|
||||
aci.response_json(json_response)
|
||||
self.assertEqual(aci.error, error)
|
||||
self.assertEqual(aci.imdata, imdata)
|
||||
self.assertEqual(aci.totalCount, totalCount)
|
||||
|
||||
# Python 2.7+ is needed for xmljson
|
||||
if sys.version_info < (2, 7):
|
||||
return
|
||||
|
||||
xml_response = '''<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">
|
||||
<error code="401" text="Username or password is incorrect - FAILED local authentication"/>
|
||||
</imdata>
|
||||
'''
|
||||
xml_result = dict()
|
||||
aci.response_xml(xml_response)
|
||||
self.assertEqual(aci.error, error)
|
||||
self.assertEqual(aci.imdata, imdata)
|
||||
self.assertEqual(aci.totalCount, totalCount)
|
||||
|
||||
def test_empty_response(self):
|
||||
self.maxDiffi = None
|
||||
|
||||
if PY2:
|
||||
error_text = "Unable to parse output as JSON, see 'raw' output. No JSON object could be decoded"
|
||||
else:
|
||||
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting value: line 1 column 1 (char 0)"
|
||||
|
||||
error = dict(
|
||||
code=-1,
|
||||
text=error_text,
|
||||
)
|
||||
raw = ''
|
||||
|
||||
json_response = ''
|
||||
json_result = dict()
|
||||
aci.response_json(json_response)
|
||||
self.assertEqual(aci.error, error)
|
||||
self.assertEqual(aci.result['raw'], raw)
|
||||
|
||||
# Python 2.7+ is needed for xmljson
|
||||
if sys.version_info < (2, 7):
|
||||
return
|
||||
|
||||
elif etree.LXML_VERSION < (3, 3, 0, 0):
|
||||
error_text = "Unable to parse output as XML, see 'raw' output. None",
|
||||
elif etree.LXML_VERSION < (4, 0, 0, 0):
|
||||
error_text = to_native(u"Unable to parse output as XML, see 'raw' output. None (line 0)", errors='surrogate_or_strict')
|
||||
elif PY2:
|
||||
error_text = "Unable to parse output as XML, see 'raw' output. Document is empty, line 1, column 1 (line 1)"
|
||||
else:
|
||||
error_text = None
|
||||
|
||||
xml_response = ''
|
||||
aci.response_xml(xml_response)
|
||||
|
||||
if error_text is None:
|
||||
# errors vary on Python 3.8+ for unknown reasons
|
||||
# accept any of the following error messages
|
||||
errors = (
|
||||
"Unable to parse output as XML, see 'raw' output. None (line 0)",
|
||||
"Unable to parse output as XML, see 'raw' output. Document is empty, line 1, column 1 (<string>, line 1)",
|
||||
)
|
||||
|
||||
for error in errors:
|
||||
if error in aci.error['text']:
|
||||
error_text = error
|
||||
break
|
||||
|
||||
error = dict(
|
||||
code=-1,
|
||||
text=error_text,
|
||||
)
|
||||
|
||||
raw = ''
|
||||
|
||||
self.assertEqual(aci.error, error)
|
||||
self.assertEqual(aci.result['raw'], raw)
|
||||
|
||||
def test_invalid_response(self):
|
||||
self.maxDiff = None
|
||||
|
||||
if sys.version_info < (2, 7):
|
||||
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting object: line 1 column 8 (char 8)"
|
||||
elif PY2:
|
||||
error_text = "Unable to parse output as JSON, see 'raw' output. No JSON object could be decoded"
|
||||
else:
|
||||
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting value: line 1 column 9 (char 8)"
|
||||
|
||||
error = dict(
|
||||
code=-1,
|
||||
text=error_text,
|
||||
)
|
||||
|
||||
raw = '{ "aaa":'
|
||||
|
||||
json_response = '{ "aaa":'
|
||||
json_result = dict()
|
||||
aci.response_json(json_response)
|
||||
self.assertEqual(aci.error, error)
|
||||
self.assertEqual(aci.result['raw'], raw)
|
||||
|
||||
# Python 2.7+ is needed for xmljson
|
||||
if sys.version_info < (2, 7):
|
||||
return
|
||||
|
||||
elif etree.LXML_VERSION < (3, 3, 0, 0):
|
||||
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 5" # NOQA
|
||||
|
||||
elif PY2:
|
||||
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 6 (line 1)" # NOQA
|
||||
|
||||
else:
|
||||
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 6 (<string>, line 1)" # NOQA
|
||||
|
||||
error = dict(
|
||||
code=-1,
|
||||
text=error_text,
|
||||
)
|
||||
|
||||
raw = '<aaa '
|
||||
|
||||
xml_response = '<aaa '
|
||||
xml_result = dict()
|
||||
aci.response_xml(xml_response)
|
||||
self.assertEqual(aci.error, error)
|
||||
self.assertEqual(aci.result['raw'], raw)
|
|
@ -1,699 +0,0 @@
|
|||
'''
|
||||
Created on Aug 16, 2016
|
||||
|
||||
@author: grastogi
|
||||
'''
|
||||
import unittest
|
||||
from ansible_collections.community.general.plugins.module_utils.network.avi.ansible_utils import \
|
||||
cleanup_absent_fields, avi_obj_cmp
|
||||
|
||||
|
||||
class TestAviApiUtils(unittest.TestCase):
|
||||
|
||||
def test_avi_obj_cmp(self):
|
||||
obj = {'name': 'testpool'}
|
||||
existing_obj = {
|
||||
'lb_algorithm': 'LB_ALGORITHM_LEAST_CONNECTIONS',
|
||||
'use_service_port': False,
|
||||
'server_auto_scale': False,
|
||||
'host_check_enabled': False,
|
||||
'enabled': True,
|
||||
'capacity_estimation': False,
|
||||
'fewest_tasks_feedback_delay': 10,
|
||||
'_last_modified': '1471377748747040',
|
||||
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9',
|
||||
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-0e8ce760-fed2-4650-9397-5b3e4966376e',
|
||||
'inline_health_monitor': True,
|
||||
'default_server_port': 80,
|
||||
'request_queue_depth': 128,
|
||||
'graceful_disable_timeout': 1,
|
||||
'server_count': 0,
|
||||
'sni_enabled': True,
|
||||
'request_queue_enabled': False,
|
||||
'name': 'testpool',
|
||||
'max_concurrent_connections_per_server': 0,
|
||||
'url': 'https://192.0.2.42/api/pool/pool-20084ee1-872e-4103-98e1-899103e2242a',
|
||||
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
|
||||
'uuid': 'pool-20084ee1-872e-4103-98e1-899103e2242a',
|
||||
'connection_ramp_duration': 10}
|
||||
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert diff
|
||||
|
||||
def test_avi_obj_cmp_w_refs(self):
|
||||
obj = {'name': 'testpool',
|
||||
'health_monitor_refs': ['/api/healthmonitor?name=System-HTTP'],
|
||||
'enabled': True}
|
||||
existing_obj = {
|
||||
'lb_algorithm': 'LB_ALGORITHM_LEAST_CONNECTIONS',
|
||||
'use_service_port': False,
|
||||
'server_auto_scale': False,
|
||||
'host_check_enabled': False,
|
||||
'enabled': True,
|
||||
'capacity_estimation': False,
|
||||
'fewest_tasks_feedback_delay': 10,
|
||||
'_last_modified': '1471377748747040',
|
||||
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9',
|
||||
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-0e8ce760-fed2-4650-9397-5b3e4966376e',
|
||||
'inline_health_monitor': True,
|
||||
'default_server_port': 80,
|
||||
'request_queue_depth': 128,
|
||||
'graceful_disable_timeout': 1,
|
||||
'server_count': 0,
|
||||
'sni_enabled': True,
|
||||
'request_queue_enabled': False,
|
||||
'name': 'testpool',
|
||||
'max_concurrent_connections_per_server': 0,
|
||||
'url': 'https://192.0.2.42/api/pool/pool-20084ee1-872e-4103-98e1-899103e2242a',
|
||||
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
|
||||
'uuid': 'pool-20084ee1-872e-4103-98e1-899103e2242a',
|
||||
'connection_ramp_duration': 10,
|
||||
'health_monitor_refs': [
|
||||
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9#System-HTTP"],
|
||||
}
|
||||
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert diff
|
||||
|
||||
obj = {'name': 'testpool',
|
||||
'health_monitor_refs': ['/api/healthmonitor?name=System-HTTP'],
|
||||
'server_count': 1}
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert not diff
|
||||
|
||||
obj = {'name': 'testpool',
|
||||
'health_monitor_refs': ['api/healthmonitor?name=System-HTTP'],
|
||||
'server_count': 0}
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert not diff
|
||||
obj = {'name': 'testpool',
|
||||
'health_monitor_refs': ['healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9'],
|
||||
'server_count': 0}
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert diff
|
||||
obj = {'name': 'testpool#asdfasf',
|
||||
'health_monitor_refs': ['api/healthmonitor?name=System-HTTP'],
|
||||
'server_count': 0}
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert not diff
|
||||
obj = {'name': 'testpool',
|
||||
'health_monitor_refs': ['/api/healthmonitor?name=System-HTTP#'],
|
||||
'server_count': 0}
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert not diff
|
||||
|
||||
def test_avi_obj_cmp_empty_list(self):
|
||||
obj = {'name': 'testpool',
|
||||
'health_monitor_refs': [],
|
||||
'enabled': True}
|
||||
existing_obj = {
|
||||
'lb_algorithm': 'LB_ALGORITHM_LEAST_CONNECTIONS',
|
||||
'use_service_port': False,
|
||||
'server_auto_scale': False,
|
||||
'host_check_enabled': False,
|
||||
'enabled': True,
|
||||
'capacity_estimation': False,
|
||||
'fewest_tasks_feedback_delay': 10,
|
||||
'_last_modified': '1471377748747040',
|
||||
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9',
|
||||
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-0e8ce760-fed2-4650-9397-5b3e4966376e',
|
||||
'inline_health_monitor': True,
|
||||
'default_server_port': 80,
|
||||
'request_queue_depth': 128,
|
||||
'graceful_disable_timeout': 1,
|
||||
'server_count': 0,
|
||||
'sni_enabled': True,
|
||||
'request_queue_enabled': False,
|
||||
'name': 'testpool',
|
||||
'max_concurrent_connections_per_server': 0,
|
||||
'url': 'https://192.0.2.42/api/pool/pool-20084ee1-872e-4103-98e1-899103e2242a',
|
||||
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
|
||||
'uuid': 'pool-20084ee1-872e-4103-98e1-899103e2242a',
|
||||
'connection_ramp_duration': 10
|
||||
}
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert diff
|
||||
|
||||
def test_avi_obj_cmp_w_refs_n_name(self):
|
||||
existing_obj = {
|
||||
'use_service_port': False,
|
||||
'server_auto_scale': False,
|
||||
'host_check_enabled': False,
|
||||
'enabled': True,
|
||||
'capacity_estimation': False,
|
||||
'fewest_tasks_feedback_delay': 10,
|
||||
'_last_modified': '1471377748747040',
|
||||
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9',
|
||||
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-0e8ce760-fed2-4650-9397-5b3e4966376e',
|
||||
'inline_health_monitor': True,
|
||||
'default_server_port': 80,
|
||||
'request_queue_depth': 128,
|
||||
'graceful_disable_timeout': 1,
|
||||
'server_count': 0,
|
||||
'sni_enabled': True,
|
||||
'request_queue_enabled': False,
|
||||
'name': 'testpool',
|
||||
'max_concurrent_connections_per_server': 0,
|
||||
'url': 'https://192.0.2.42/api/pool/pool-20084ee1-872e-4103-98e1-899103e2242a',
|
||||
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
|
||||
'uuid': 'pool-20084ee1-872e-4103-98e1-899103e2242a',
|
||||
'connection_ramp_duration': 10,
|
||||
'health_monitor_refs': [
|
||||
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9#System-HTTP",
|
||||
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc8",
|
||||
],
|
||||
}
|
||||
|
||||
obj = {'name': 'testpool',
|
||||
'health_monitor_refs': ['https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9',
|
||||
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc8"],
|
||||
'server_count': 0}
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert diff
|
||||
|
||||
obj = {'name': 'testpool',
|
||||
'health_monitor_refs': [
|
||||
'https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9#System-HTTP',
|
||||
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc8"],
|
||||
'server_count': 0}
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert diff
|
||||
|
||||
obj = {'name': 'testpool',
|
||||
'health_monitor_refs': [
|
||||
'https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc9#System-HTTP',
|
||||
"https://192.0.2.42/api/healthmonitor/healthmonitor-6d07b57f-126b-476c-baba-a8c8c8b06dc8#System-HTTP2"],
|
||||
'server_count': 0,
|
||||
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-afe8bf2c-9821-4272-9bc6-67634c84bec9#Default-Cloud',
|
||||
}
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert diff
|
||||
|
||||
def test_avi_list_update(self):
|
||||
existing_obj = {
|
||||
'services': [
|
||||
{
|
||||
"enable_ssl": False,
|
||||
"port_range_end": 80,
|
||||
"port": 80
|
||||
},
|
||||
{
|
||||
"enable_ssl": False,
|
||||
"port_range_end": 443,
|
||||
"port": 443
|
||||
}
|
||||
],
|
||||
"name": "vs-health-test",
|
||||
"url": "https://192.0.2.42/api/virtualservice/virtualservice-526c55c2-df89-40b9-9de6-e45a472290aa",
|
||||
}
|
||||
|
||||
obj = {
|
||||
'services': [
|
||||
{
|
||||
"enable_ssl": False,
|
||||
"port_range_end": 80,
|
||||
"port": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert not diff
|
||||
|
||||
obj = {
|
||||
'services': [
|
||||
{
|
||||
"enable_ssl": False,
|
||||
"port_range_end": 80,
|
||||
"port": 80
|
||||
},
|
||||
{
|
||||
"enable_ssl": False,
|
||||
"port_range_end": 443,
|
||||
"port": 80
|
||||
}
|
||||
],
|
||||
"name": "vs-health-test",
|
||||
"url": "https://192.0.2.42/api/virtualservice/virtualservice-526c55c2-df89-40b9-9de6-e45a472290aa",
|
||||
}
|
||||
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert not diff
|
||||
|
||||
def test_cleanup_abset(self):
|
||||
obj = {'x': 10,
|
||||
'y': {'state': 'absent'},
|
||||
'z': {'a': {'state': 'absent'}},
|
||||
'l': [{'y1': {'state': 'absent'}}],
|
||||
'z1': {'a': {'state': 'absent'}, 'b': {}, 'c': 42},
|
||||
'empty': []}
|
||||
|
||||
obj = cleanup_absent_fields(obj)
|
||||
|
||||
assert 'y' not in obj
|
||||
assert 'z' not in obj
|
||||
assert 'l' not in obj
|
||||
assert 'z1' in obj
|
||||
assert 'b' not in obj['z1']
|
||||
assert 'a' not in obj['z1']
|
||||
assert 'empty' not in obj
|
||||
|
||||
def test_complex_obj(self):
|
||||
|
||||
obj = {
|
||||
'lb_algorithm': 'LB_ALGORITHM_ROUND_ROBIN',
|
||||
'use_service_port': False, 'server_auto_scale': False,
|
||||
'host_check_enabled': False,
|
||||
'tenant_ref': 'https://192.0.2.42/api/tenant/admin#admin',
|
||||
'capacity_estimation': False,
|
||||
'servers': [{
|
||||
'hostname': 'grastogi-server6', 'ratio': 1,
|
||||
'ip': {'type': 'V4', 'addr': '198.51.100.62'},
|
||||
'discovered_networks': [{
|
||||
'subnet': [{
|
||||
'ip_addr': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.0'
|
||||
},
|
||||
'mask': 24
|
||||
}],
|
||||
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10#PG-964'
|
||||
}],
|
||||
'enabled': True, 'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10#PG-964',
|
||||
'verify_network': False,
|
||||
'static': False,
|
||||
'resolve_server_by_dns': False,
|
||||
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
|
||||
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993#grastogi-server6'
|
||||
}, {
|
||||
'hostname': 'grastogi-server6',
|
||||
'ratio': 1,
|
||||
'ip': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.61'
|
||||
},
|
||||
'discovered_networks': [{
|
||||
'subnet': [{
|
||||
'ip_addr': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.0'
|
||||
},
|
||||
'mask': 24
|
||||
}],
|
||||
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10#PG-964'
|
||||
}],
|
||||
'enabled': True,
|
||||
'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10#PG-964',
|
||||
'verify_network': False,
|
||||
'static': False,
|
||||
'resolve_server_by_dns': False,
|
||||
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
|
||||
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993#grastogi-server6'
|
||||
}, {
|
||||
'hostname': 'grastogi-server6',
|
||||
'ratio': 1,
|
||||
'ip': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.65'
|
||||
},
|
||||
'discovered_networks': [{
|
||||
'subnet': [{
|
||||
'ip_addr': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.0'
|
||||
}, 'mask': 24
|
||||
}],
|
||||
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10#PG-964'
|
||||
}],
|
||||
'enabled': True,
|
||||
'verify_network': False,
|
||||
'static': False,
|
||||
'resolve_server_by_dns': False
|
||||
}],
|
||||
'fewest_tasks_feedback_delay': 10,
|
||||
'_last_modified': '1473292763246107',
|
||||
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-e0696a58-8b72-4026-923c-9a87c38a2489#Default-Cloud',
|
||||
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-33dfbcd7-867c-4e3e-acf7-96bf679d5a0d#global',
|
||||
'inline_health_monitor': True,
|
||||
'default_server_port': 8000,
|
||||
'request_queue_depth': 128,
|
||||
'graceful_disable_timeout': 1,
|
||||
'sni_enabled': True,
|
||||
'server_count': 3,
|
||||
'uuid': 'pool-09201181-747e-41ea-872d-e9a7df71b726',
|
||||
'request_queue_enabled': False,
|
||||
'name': 'p1',
|
||||
'max_concurrent_connections_per_server': 0,
|
||||
'url': 'https://192.0.2.42/api/pool/pool-09201181-747e-41ea-872d-e9a7df71b726#p1',
|
||||
'enabled': True,
|
||||
'connection_ramp_duration': 10}
|
||||
|
||||
existing_obj = {
|
||||
'lb_algorithm': 'LB_ALGORITHM_ROUND_ROBIN',
|
||||
'use_service_port': False,
|
||||
'server_auto_scale': False,
|
||||
'host_check_enabled': False,
|
||||
'tenant_ref': 'https://192.0.2.42/api/tenant/admin',
|
||||
'capacity_estimation': False,
|
||||
'servers': [{
|
||||
'hostname': 'grastogi-server6', 'ratio': 1,
|
||||
'ip': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.62'
|
||||
},
|
||||
'discovered_networks': [{
|
||||
'subnet': [{
|
||||
'mask': 24,
|
||||
'ip_addr': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.0'
|
||||
}
|
||||
}],
|
||||
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10'
|
||||
}],
|
||||
'enabled': True,
|
||||
'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10',
|
||||
'verify_network': False,
|
||||
'static': False,
|
||||
'resolve_server_by_dns': False,
|
||||
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
|
||||
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993'
|
||||
}, {
|
||||
'hostname': 'grastogi-server6',
|
||||
'ratio': 1,
|
||||
'ip': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.61'
|
||||
},
|
||||
'discovered_networks': [{
|
||||
'subnet': [{
|
||||
'mask': 24,
|
||||
'ip_addr': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.0'
|
||||
}
|
||||
}],
|
||||
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10'
|
||||
}],
|
||||
'enabled': True,
|
||||
'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10',
|
||||
'verify_network': False,
|
||||
'static': False,
|
||||
'resolve_server_by_dns': False,
|
||||
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
|
||||
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993'
|
||||
}, {
|
||||
'hostname': 'grastogi-server6',
|
||||
'ratio': 1,
|
||||
'ip': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.65'
|
||||
},
|
||||
'discovered_networks': [{
|
||||
'subnet': [{
|
||||
'mask': 24,
|
||||
'ip_addr': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.0'
|
||||
}
|
||||
}],
|
||||
'network_ref': 'https://192.0.2.42/api/network/dvportgroup-53975-10.10.2.10'
|
||||
}],
|
||||
'enabled': True,
|
||||
'nw_ref': 'https://192.0.2.42/api/vimgrnwruntime/dvportgroup-53975-10.10.2.10',
|
||||
'verify_network': False,
|
||||
'static': False,
|
||||
'resolve_server_by_dns': False,
|
||||
'external_uuid': 'vm-4230615e-bc0b-3d33-3929-1c7328575993',
|
||||
'vm_ref': 'https://192.0.2.42/api/vimgrvmruntime/vm-4230615e-bc0b-3d33-3929-1c7328575993'
|
||||
}],
|
||||
'fewest_tasks_feedback_delay': 10,
|
||||
'cloud_ref': 'https://192.0.2.42/api/cloud/cloud-e0696a58-8b72-4026-923c-9a87c38a2489',
|
||||
'vrf_ref': 'https://192.0.2.42/api/vrfcontext/vrfcontext-33dfbcd7-867c-4e3e-acf7-96bf679d5a0d',
|
||||
'inline_health_monitor': True,
|
||||
'default_server_port': 8000,
|
||||
'request_queue_depth': 128,
|
||||
'graceful_disable_timeout': 1,
|
||||
'sni_enabled': True,
|
||||
'server_count': 3,
|
||||
'uuid': 'pool-09201181-747e-41ea-872d-e9a7df71b726',
|
||||
'request_queue_enabled': False,
|
||||
'name': 'p1',
|
||||
'max_concurrent_connections_per_server': 0,
|
||||
'url': 'https://192.0.2.42/api/pool/pool-09201181-747e-41ea-872d-e9a7df71b726',
|
||||
'enabled': True,
|
||||
'connection_ramp_duration': 10
|
||||
}
|
||||
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert diff
|
||||
|
||||
def testAWSVs(self):
|
||||
existing_obj = {
|
||||
'network_profile_ref': 'https://12.97.16.202/api/networkprofile/networkprofile-9a0a9896-6876-44c8-a3ee-512a968905f2#System-TCP-Proxy',
|
||||
'port_uuid': 'eni-4144e73c',
|
||||
'weight': 1,
|
||||
'availability_zone': 'us-west-2a',
|
||||
'enabled': True,
|
||||
'flow_dist': 'LOAD_AWARE',
|
||||
'subnet_uuid': 'subnet-91f0b6f4',
|
||||
'delay_fairness': False,
|
||||
'avi_allocated_vip': True,
|
||||
'vrf_context_ref': 'https://12.97.16.202/api/vrfcontext/vrfcontext-722b280d-b555-4d82-9b35-af9442c0cb86#global',
|
||||
'subnet': {
|
||||
'ip_addr': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.0'
|
||||
},
|
||||
'mask': 24
|
||||
},
|
||||
'cloud_type': 'CLOUD_AWS', 'uuid': 'virtualservice-a5f49b99-22c8-42e6-aa65-3ca5f1e36b9e',
|
||||
'network_ref': 'https://12.97.16.202/api/network/subnet-91f0b6f4',
|
||||
'cloud_ref': 'https://12.97.16.202/api/cloud/cloud-49829414-c704-43ca-9dff-05b9e8474dcb#AWS Cloud',
|
||||
'avi_allocated_fip': False,
|
||||
'se_group_ref': 'https://12.97.16.202/api/serviceenginegroup/serviceenginegroup-3bef6320-5a2d-4801-85c4-ef4f9841f235#Default-Group',
|
||||
'scaleout_ecmp': False,
|
||||
'max_cps_per_client': 0,
|
||||
'type': 'VS_TYPE_NORMAL',
|
||||
'analytics_profile_ref': 'https://12.97.16.202/api/analyticsprofile/analyticsprofile-70f8b06f-7b6a-4500-b829-c869bbca2009#System-Analytics-Profile',
|
||||
'use_bridge_ip_as_vip': False,
|
||||
'application_profile_ref': 'https://12.97.16.202/api/applicationprofile/applicationprofile-103cbc31-cac5-46ab-8e66-bbbb2c8f551f#System-HTTP',
|
||||
'auto_allocate_floating_ip': False,
|
||||
'services': [{
|
||||
'enable_ssl': False,
|
||||
'port_range_end': 80,
|
||||
'port': 80
|
||||
}],
|
||||
'active_standby_se_tag': 'ACTIVE_STANDBY_SE_1',
|
||||
'ip_address': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.33'
|
||||
},
|
||||
'ign_pool_net_reach': False,
|
||||
'east_west_placement': False,
|
||||
'limit_doser': False,
|
||||
'name': 'wwwawssit.ebiz.verizon.com',
|
||||
'url': 'https://12.97.16.202/api/virtualservice/virtualservice-a5f49b99-22c8-42e6-aa65-3ca5f1e36b9e#wwwawssit.ebiz.verizon.com',
|
||||
'ssl_sess_cache_avg_size': 1024,
|
||||
'enable_autogw': True,
|
||||
'auto_allocate_ip': True,
|
||||
'tenant_ref': 'https://12.97.16.202/api/tenant/tenant-f52f7a3e-6876-4bb9-b8f7-3cab636dadf2#Sales',
|
||||
'remove_listening_port_on_vs_down': False
|
||||
}
|
||||
obj = {'auto_allocate_ip': True, 'subnet_uuid': 'subnet-91f0b6f4', 'cloud_ref': '/api/cloud?name=AWS Cloud', 'services': [{'port': 80}],
|
||||
'name': 'wwwawssit.ebiz.verizon.com'}
|
||||
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert diff
|
||||
|
||||
def testhttppolicy(self):
|
||||
existing_obj = {
|
||||
"http_request_policy": {
|
||||
"rules": [{
|
||||
"enable": True,
|
||||
"index": 0,
|
||||
"match": {
|
||||
"path": {
|
||||
"match_case": "INSENSITIVE",
|
||||
"match_criteria": "CONTAINS",
|
||||
"match_str": ["xvz", "rst"]
|
||||
}
|
||||
},
|
||||
"name": "blah",
|
||||
"switching_action": {
|
||||
"action": "HTTP_SWITCHING_SELECT_POOL",
|
||||
"pool_ref": "https://12.97.16.202/api/pool/pool-d7f6f5e7-bd26-49ad-aeed-965719eb140b#abc",
|
||||
"status_code": "HTTP_LOCAL_RESPONSE_STATUS_CODE_200"
|
||||
}
|
||||
}]
|
||||
},
|
||||
"is_internal_policy": False,
|
||||
"name": "blah",
|
||||
"tenant_ref": "https://12.97.16.202/api/tenant/tenant-f52f7a3e-6876-4bb9-b8f7-3cab636dadf2#Sales",
|
||||
"url": "https://12.97.16.202/api/httppolicyset/httppolicyset-ffd8354b-671b-48d5-92cc-69a9057aad0c#blah",
|
||||
"uuid": "httppolicyset-ffd8354b-671b-48d5-92cc-69a9057aad0c"
|
||||
}
|
||||
|
||||
obj = {
|
||||
"http_request_policy": {
|
||||
"rules": [{
|
||||
"enable": True,
|
||||
"index": "0",
|
||||
"match": {
|
||||
"path": {
|
||||
"match_case": "INSENSITIVE",
|
||||
"match_criteria": "CONTAINS",
|
||||
"match_str": ["xvz", "rst"]
|
||||
}
|
||||
},
|
||||
"name": "blah",
|
||||
"switching_action": {
|
||||
"action": "HTTP_SWITCHING_SELECT_POOL",
|
||||
"pool_ref": "/api/pool?name=abc",
|
||||
"status_code": "HTTP_LOCAL_RESPONSE_STATUS_CODE_200"
|
||||
}
|
||||
}]
|
||||
},
|
||||
"is_internal_policy": False,
|
||||
"tenant": "Sales"
|
||||
}
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert diff
|
||||
|
||||
def testCleanupFields(self):
|
||||
obj = {'name': 'testpool',
|
||||
'scalar_field': {'state': 'absent'},
|
||||
'list_fields': [{'x': '1'}, {'y': {'state': 'absent'}}]}
|
||||
|
||||
cleanup_absent_fields(obj)
|
||||
assert 'scalar_field' not in obj
|
||||
for elem in obj['list_fields']:
|
||||
assert 'y' not in elem
|
||||
|
||||
def testGSLB(self):
|
||||
obj = {
|
||||
'domain_names': ['cloud5.avi.com', 'cloud6.avi.com'],
|
||||
'health_monitor_scope': 'GSLB_SERVICE_HEALTH_MONITOR_ALL_MEMBERS',
|
||||
'groups': [{
|
||||
'priority': 20,
|
||||
'members': [{
|
||||
'ip': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.1'
|
||||
},
|
||||
'enabled': True, 'ratio': 1
|
||||
}, {
|
||||
'ip': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.10'
|
||||
},
|
||||
'enabled': True,
|
||||
'ratio': 1
|
||||
}],
|
||||
'algorithm': 'GSLB_ALGORITHM_CONSISTENT_HASH',
|
||||
'name': 'sc'
|
||||
}, {
|
||||
'priority': 14,
|
||||
'members': [{
|
||||
'ip': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.2'
|
||||
},
|
||||
'enabled': True,
|
||||
'ratio': 1
|
||||
}],
|
||||
'algorithm': 'GSLB_ALGORITHM_ROUND_ROBIN',
|
||||
'name': 'cn'
|
||||
}, {
|
||||
'priority': 15,
|
||||
'members': [{
|
||||
'ip': {
|
||||
'type': 'V4',
|
||||
'addr': '198.51.100.3'
|
||||
},
|
||||
'enabled': True, 'ratio': 1
|
||||
}],
|
||||
'algorithm': 'GSLB_ALGORITHM_ROUND_ROBIN',
|
||||
'name': 'in'
|
||||
}],
|
||||
'name': 'gs-3',
|
||||
'num_dns_ip': 2
|
||||
}
|
||||
existing_obj = {
|
||||
u'controller_health_status_enabled': True,
|
||||
u'uuid': u'gslbservice-ab9b36bd-3e95-4c2e-80f8-92905c2eccb2',
|
||||
u'wildcard_match': False,
|
||||
u'url': u'https://192.0.2.42/api/gslbservice/gslbservice-ab9b36bd-3e95-4c2e-80f8-92905c2eccb2#gs-3',
|
||||
u'tenant_ref': u'https://192.0.2.42/api/tenant/admin#admin',
|
||||
u'enabled': True,
|
||||
u'domain_names': [u'cloud5.avi.com', u'cloud6.avi.com'],
|
||||
u'use_edns_client_subnet': True,
|
||||
u'groups': [{
|
||||
u'priority': 20,
|
||||
u'members': [{
|
||||
u'ip': {
|
||||
u'type': u'V4',
|
||||
u'addr': u'198.51.100.1'
|
||||
},
|
||||
u'ratio': 1,
|
||||
u'enabled': True
|
||||
}, {
|
||||
u'ip': {
|
||||
u'type': u'V4',
|
||||
u'addr': u'198.51.100.10'
|
||||
},
|
||||
u'ratio': 1,
|
||||
u'enabled': True
|
||||
}],
|
||||
u'name': u'sc',
|
||||
u'algorithm': u'GSLB_ALGORITHM_CONSISTENT_HASH'
|
||||
}, {
|
||||
u'priority': 14,
|
||||
u'members': [{
|
||||
u'ip': {
|
||||
u'type': u'V4',
|
||||
u'addr': u'198.51.100.2'
|
||||
},
|
||||
u'ratio': 1,
|
||||
u'enabled': True
|
||||
}],
|
||||
u'name': u'cn',
|
||||
u'algorithm': u'GSLB_ALGORITHM_ROUND_ROBIN'
|
||||
}, {
|
||||
u'priority': 15,
|
||||
u'members': [{
|
||||
u'ip': {
|
||||
u'type': u'V4',
|
||||
u'addr': u'198.51.100.3'
|
||||
},
|
||||
u'ratio': 1,
|
||||
u'enabled': True
|
||||
}],
|
||||
u'name': u'in',
|
||||
u'algorithm': u'GSLB_ALGORITHM_ROUND_ROBIN'
|
||||
}],
|
||||
u'num_dns_ip': 2,
|
||||
u'health_monitor_scope': u'GSLB_SERVICE_HEALTH_MONITOR_ALL_MEMBERS',
|
||||
u'name': u'gs-3'
|
||||
}
|
||||
diff = avi_obj_cmp(obj, existing_obj)
|
||||
assert diff
|
||||
|
||||
def testNoneParams(self):
|
||||
objwnone = {
|
||||
'name': 'testpool',
|
||||
'scalar_field': None,
|
||||
'list_fields': {
|
||||
'y': None,
|
||||
'z': 'zz'
|
||||
}
|
||||
}
|
||||
obj = {
|
||||
'name': 'testpool',
|
||||
'list_fields': {
|
||||
'z': 'zz'
|
||||
}
|
||||
}
|
||||
|
||||
result = avi_obj_cmp(objwnone, obj)
|
||||
assert result
|
|
@ -1,46 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# (c) 2017 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.parsing import Conditional
|
||||
|
||||
test_results = ['result_1', 'result_2', 'result_3']
|
||||
c1 = Conditional('result[1] == result_2')
|
||||
c2 = Conditional('result[2] not == result_2')
|
||||
c3 = Conditional('result[0] neq not result_1')
|
||||
|
||||
|
||||
class TestNotKeyword(unittest.TestCase):
|
||||
def test_negate_instance_variable_assignment(self):
|
||||
assert c1.negate is False and c2.negate is True
|
||||
|
||||
def test_key_value_instance_variable_assignment(self):
|
||||
c1_assignments = c1.key == 'result[1]' and c1.value == 'result_2'
|
||||
c2_assignments = c2.key == 'result[2]' and c2.value == 'result_2'
|
||||
assert c1_assignments and c2_assignments
|
||||
|
||||
def test_conditionals_w_not_keyword(self):
|
||||
assert c1(test_results) and c2(test_results) and c3(test_results)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
|
@ -1,213 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# (c) 2017 Red Hat, Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import absolute_import, division, print_function
|
||||
__metaclass__ = type
|
||||
|
||||
import pytest
|
||||
|
||||
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import to_list, sort_list
|
||||
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import dict_diff, dict_merge
|
||||
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import conditional, Template
|
||||
from ansible.module_utils.common.network import (
|
||||
to_masklen, to_netmask, to_subnet, to_ipv6_network, to_ipv6_subnet, is_masklen, is_netmask
|
||||
)
|
||||
|
||||
|
||||
def test_to_list():
|
||||
for scalar in ('string', 1, True, False, None):
|
||||
assert isinstance(to_list(scalar), list)
|
||||
|
||||
for container in ([1, 2, 3], {'one': 1}):
|
||||
assert isinstance(to_list(container), list)
|
||||
|
||||
test_list = [1, 2, 3]
|
||||
assert id(test_list) != id(to_list(test_list))
|
||||
|
||||
|
||||
def test_sort():
|
||||
data = [3, 1, 2]
|
||||
assert [1, 2, 3] == sort_list(data)
|
||||
|
||||
string_data = '123'
|
||||
assert string_data == sort_list(string_data)
|
||||
|
||||
|
||||
def test_dict_diff():
|
||||
base = dict(obj2=dict(), b1=True, b2=False, b3=False,
|
||||
one=1, two=2, three=3, obj1=dict(key1=1, key2=2),
|
||||
l1=[1, 3], l2=[1, 2, 3], l4=[4],
|
||||
nested=dict(n1=dict(n2=2)))
|
||||
|
||||
other = dict(b1=True, b2=False, b3=True, b4=True,
|
||||
one=1, three=4, four=4, obj1=dict(key1=2),
|
||||
l1=[2, 1], l2=[3, 2, 1], l3=[1],
|
||||
nested=dict(n1=dict(n2=2, n3=3)))
|
||||
|
||||
result = dict_diff(base, other)
|
||||
|
||||
# string assertions
|
||||
assert 'one' not in result
|
||||
assert 'two' not in result
|
||||
assert result['three'] == 4
|
||||
assert result['four'] == 4
|
||||
|
||||
# dict assertions
|
||||
assert 'obj1' in result
|
||||
assert 'key1' in result['obj1']
|
||||
assert 'key2' not in result['obj1']
|
||||
|
||||
# list assertions
|
||||
assert result['l1'] == [2, 1]
|
||||
assert 'l2' not in result
|
||||
assert result['l3'] == [1]
|
||||
assert 'l4' not in result
|
||||
|
||||
# nested assertions
|
||||
assert 'obj1' in result
|
||||
assert result['obj1']['key1'] == 2
|
||||
assert 'key2' not in result['obj1']
|
||||
|
||||
# bool assertions
|
||||
assert 'b1' not in result
|
||||
assert 'b2' not in result
|
||||
assert result['b3']
|
||||
assert result['b4']
|
||||
|
||||
|
||||
def test_dict_merge():
|
||||
base = dict(obj2=dict(), b1=True, b2=False, b3=False,
|
||||
one=1, two=2, three=3, obj1=dict(key1=1, key2=2),
|
||||
l1=[1, 3], l2=[1, 2, 3], l4=[4],
|
||||
nested=dict(n1=dict(n2=2)))
|
||||
|
||||
other = dict(b1=True, b2=False, b3=True, b4=True,
|
||||
one=1, three=4, four=4, obj1=dict(key1=2),
|
||||
l1=[2, 1], l2=[3, 2, 1], l3=[1],
|
||||
nested=dict(n1=dict(n2=2, n3=3)))
|
||||
|
||||
result = dict_merge(base, other)
|
||||
|
||||
# string assertions
|
||||
assert 'one' in result
|
||||
assert 'two' in result
|
||||
assert result['three'] == 4
|
||||
assert result['four'] == 4
|
||||
|
||||
# dict assertions
|
||||
assert 'obj1' in result
|
||||
assert 'key1' in result['obj1']
|
||||
assert 'key2' in result['obj1']
|
||||
|
||||
# list assertions
|
||||
assert result['l1'] == [1, 2, 3]
|
||||
assert 'l2' in result
|
||||
assert result['l3'] == [1]
|
||||
assert 'l4' in result
|
||||
|
||||
# nested assertions
|
||||
assert 'obj1' in result
|
||||
assert result['obj1']['key1'] == 2
|
||||
assert 'key2' in result['obj1']
|
||||
|
||||
# bool assertions
|
||||
assert 'b1' in result
|
||||
assert 'b2' in result
|
||||
assert result['b3']
|
||||
assert result['b4']
|
||||
|
||||
|
||||
def test_conditional():
|
||||
assert conditional(10, 10)
|
||||
assert conditional('10', '10')
|
||||
assert conditional('foo', 'foo')
|
||||
assert conditional(True, True)
|
||||
assert conditional(False, False)
|
||||
assert conditional(None, None)
|
||||
assert conditional("ge(1)", 1)
|
||||
assert conditional("gt(1)", 2)
|
||||
assert conditional("le(2)", 2)
|
||||
assert conditional("lt(3)", 2)
|
||||
assert conditional("eq(1)", 1)
|
||||
assert conditional("neq(0)", 1)
|
||||
assert conditional("min(1)", 1)
|
||||
assert conditional("max(1)", 1)
|
||||
assert conditional("exactly(1)", 1)
|
||||
|
||||
|
||||
def test_template():
|
||||
tmpl = Template()
|
||||
assert 'foo' == tmpl('{{ test }}', {'test': 'foo'})
|
||||
|
||||
|
||||
def test_to_masklen():
|
||||
assert 24 == to_masklen('255.255.255.0')
|
||||
|
||||
|
||||
def test_to_masklen_invalid():
|
||||
with pytest.raises(ValueError):
|
||||
to_masklen('255')
|
||||
|
||||
|
||||
def test_to_netmask():
|
||||
assert '255.0.0.0' == to_netmask(8)
|
||||
assert '255.0.0.0' == to_netmask('8')
|
||||
|
||||
|
||||
def test_to_netmask_invalid():
|
||||
with pytest.raises(ValueError):
|
||||
to_netmask(128)
|
||||
|
||||
|
||||
def test_to_subnet():
|
||||
result = to_subnet('192.168.1.1', 24)
|
||||
assert '192.168.1.0/24' == result
|
||||
|
||||
result = to_subnet('192.168.1.1', 24, dotted_notation=True)
|
||||
assert '192.168.1.0 255.255.255.0' == result
|
||||
|
||||
|
||||
def test_to_subnet_invalid():
|
||||
with pytest.raises(ValueError):
|
||||
to_subnet('foo', 'bar')
|
||||
|
||||
|
||||
def test_is_masklen():
|
||||
assert is_masklen(32)
|
||||
assert not is_masklen(33)
|
||||
assert not is_masklen('foo')
|
||||
|
||||
|
||||
def test_is_netmask():
|
||||
assert is_netmask('255.255.255.255')
|
||||
assert not is_netmask(24)
|
||||
assert not is_netmask('foo')
|
||||
|
||||
|
||||
def test_to_ipv6_network():
|
||||
assert '2001:db8::' == to_ipv6_network('2001:db8::')
|
||||
assert '2001:0db8:85a3::' == to_ipv6_network('2001:0db8:85a3:0000:0000:8a2e:0370:7334')
|
||||
assert '2001:0db8:85a3::' == to_ipv6_network('2001:0db8:85a3:0:0:8a2e:0370:7334')
|
||||
|
||||
|
||||
def test_to_ipv6_subnet():
|
||||
assert '2001:db8::' == to_ipv6_subnet('2001:db8::')
|
||||
assert '2001:0db8:85a3:4242::' == to_ipv6_subnet('2001:0db8:85a3:4242:0000:8a2e:0370:7334')
|
||||
assert '2001:0db8:85a3:4242::' == to_ipv6_subnet('2001:0db8:85a3:4242:0:8a2e:0370:7334')
|
|
@ -1,446 +0,0 @@
|
|||
# Copyright (c) 2018 Cisco and/or its affiliates.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.network.ftd.common import equal_objects, delete_ref_duplicates, construct_ansible_facts
|
||||
|
||||
|
||||
# simple objects
|
||||
|
||||
def test_equal_objects_return_false_with_different_length():
|
||||
assert not equal_objects(
|
||||
{'foo': 1},
|
||||
{'foo': 1, 'bar': 2}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_false_with_different_fields():
|
||||
assert not equal_objects(
|
||||
{'foo': 1},
|
||||
{'bar': 1}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_false_with_different_value_types():
|
||||
assert not equal_objects(
|
||||
{'foo': 1},
|
||||
{'foo': '1'}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_false_with_different_values():
|
||||
assert not equal_objects(
|
||||
{'foo': 1},
|
||||
{'foo': 2}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_false_with_different_nested_values():
|
||||
assert not equal_objects(
|
||||
{'foo': {'bar': 1}},
|
||||
{'foo': {'bar': 2}}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_false_with_different_list_length():
|
||||
assert not equal_objects(
|
||||
{'foo': []},
|
||||
{'foo': ['bar']}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_true_with_equal_objects():
|
||||
assert equal_objects(
|
||||
{'foo': 1, 'bar': 2},
|
||||
{'bar': 2, 'foo': 1}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_true_with_equal_str_like_values():
|
||||
assert equal_objects(
|
||||
{'foo': b'bar'},
|
||||
{'foo': u'bar'}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_true_with_equal_nested_dicts():
|
||||
assert equal_objects(
|
||||
{'foo': {'bar': 1, 'buz': 2}},
|
||||
{'foo': {'buz': 2, 'bar': 1}}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_true_with_equal_lists():
|
||||
assert equal_objects(
|
||||
{'foo': ['bar']},
|
||||
{'foo': ['bar']}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_true_with_ignored_fields():
|
||||
assert equal_objects(
|
||||
{'foo': 1, 'version': '123', 'id': '123123'},
|
||||
{'foo': 1}
|
||||
)
|
||||
|
||||
|
||||
# objects with object references
|
||||
|
||||
def test_equal_objects_return_true_with_different_ref_ids():
|
||||
assert not equal_objects(
|
||||
{'foo': {'id': '1', 'type': 'network', 'ignored_field': 'foo'}},
|
||||
{'foo': {'id': '2', 'type': 'network', 'ignored_field': 'bar'}}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_true_with_different_ref_types():
|
||||
assert not equal_objects(
|
||||
{'foo': {'id': '1', 'type': 'network', 'ignored_field': 'foo'}},
|
||||
{'foo': {'id': '1', 'type': 'accessRule', 'ignored_field': 'bar'}}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_true_with_same_object_refs():
|
||||
assert equal_objects(
|
||||
{'foo': {'id': '1', 'type': 'network', 'ignored_field': 'foo'}},
|
||||
{'foo': {'id': '1', 'type': 'network', 'ignored_field': 'bar'}}
|
||||
)
|
||||
|
||||
|
||||
# objects with array of object references
|
||||
|
||||
def test_equal_objects_return_false_with_different_array_length():
|
||||
assert not equal_objects(
|
||||
{'foo': [
|
||||
{'id': '1', 'type': 'network', 'ignored_field': 'foo'}
|
||||
]},
|
||||
{'foo': []}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_false_with_different_array_order():
|
||||
assert not equal_objects(
|
||||
{'foo': [
|
||||
{'id': '1', 'type': 'network', 'ignored_field': 'foo'},
|
||||
{'id': '2', 'type': 'network', 'ignored_field': 'bar'}
|
||||
]},
|
||||
{'foo': [
|
||||
{'id': '2', 'type': 'network', 'ignored_field': 'foo'},
|
||||
{'id': '1', 'type': 'network', 'ignored_field': 'bar'}
|
||||
]}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_true_with_equal_ref_arrays():
|
||||
assert equal_objects(
|
||||
{'foo': [
|
||||
{'id': '1', 'type': 'network', 'ignored_field': 'foo'}
|
||||
]},
|
||||
{'foo': [
|
||||
{'id': '1', 'type': 'network', 'ignored_field': 'bar'}
|
||||
]}
|
||||
)
|
||||
|
||||
|
||||
# objects with nested structures and object references
|
||||
|
||||
def test_equal_objects_return_true_with_equal_nested_object_references():
|
||||
assert equal_objects(
|
||||
{
|
||||
'name': 'foo',
|
||||
'config': {
|
||||
'version': '1',
|
||||
'port': {
|
||||
'name': 'oldPortName',
|
||||
'type': 'port',
|
||||
'id': '123'
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'foo',
|
||||
'config': {
|
||||
'version': '1',
|
||||
'port': {
|
||||
'name': 'newPortName',
|
||||
'type': 'port',
|
||||
'id': '123'
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_false_with_different_nested_object_references():
|
||||
assert not equal_objects(
|
||||
{
|
||||
'name': 'foo',
|
||||
'config': {
|
||||
'version': '1',
|
||||
'port': {
|
||||
'name': 'oldPortName',
|
||||
'type': 'port',
|
||||
'id': '123'
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'foo',
|
||||
'config': {
|
||||
'version': '1',
|
||||
'port': {
|
||||
'name': 'oldPortName',
|
||||
'type': 'port',
|
||||
'id': '234'
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_true_with_equal_nested_list_of_object_references():
|
||||
assert equal_objects(
|
||||
{
|
||||
'name': 'foo',
|
||||
'config': {
|
||||
'version': '1',
|
||||
'ports': [{
|
||||
'name': 'oldPortName',
|
||||
'type': 'port',
|
||||
'id': '123'
|
||||
}, {
|
||||
'name': 'oldPortName2',
|
||||
'type': 'port',
|
||||
'id': '234'
|
||||
}]
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'foo',
|
||||
'config': {
|
||||
'version': '1',
|
||||
'ports': [{
|
||||
'name': 'newPortName',
|
||||
'type': 'port',
|
||||
'id': '123'
|
||||
}, {
|
||||
'name': 'newPortName2',
|
||||
'type': 'port',
|
||||
'id': '234',
|
||||
'extraField': 'foo'
|
||||
}]
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def test_equal_objects_return_true_with_reference_list_containing_duplicates():
|
||||
assert equal_objects(
|
||||
{
|
||||
'name': 'foo',
|
||||
'config': {
|
||||
'version': '1',
|
||||
'ports': [{
|
||||
'name': 'oldPortName',
|
||||
'type': 'port',
|
||||
'id': '123'
|
||||
}, {
|
||||
'name': 'oldPortName',
|
||||
'type': 'port',
|
||||
'id': '123'
|
||||
}, {
|
||||
'name': 'oldPortName2',
|
||||
'type': 'port',
|
||||
'id': '234'
|
||||
}]
|
||||
}
|
||||
},
|
||||
{
|
||||
'name': 'foo',
|
||||
'config': {
|
||||
'version': '1',
|
||||
'ports': [{
|
||||
'name': 'newPortName',
|
||||
'type': 'port',
|
||||
'id': '123'
|
||||
}, {
|
||||
'name': 'newPortName2',
|
||||
'type': 'port',
|
||||
'id': '234',
|
||||
'extraField': 'foo'
|
||||
}]
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def test_delete_ref_duplicates_with_none():
|
||||
assert delete_ref_duplicates(None) is None
|
||||
|
||||
|
||||
def test_delete_ref_duplicates_with_empty_dict():
|
||||
assert {} == delete_ref_duplicates({})
|
||||
|
||||
|
||||
def test_delete_ref_duplicates_with_simple_object():
|
||||
data = {
|
||||
'id': '123',
|
||||
'name': 'foo',
|
||||
'type': 'bar',
|
||||
'values': ['a', 'b']
|
||||
}
|
||||
assert data == delete_ref_duplicates(data)
|
||||
|
||||
|
||||
def test_delete_ref_duplicates_with_object_containing_refs():
|
||||
data = {
|
||||
'id': '123',
|
||||
'name': 'foo',
|
||||
'type': 'bar',
|
||||
'refs': [
|
||||
{'id': '123', 'type': 'baz'},
|
||||
{'id': '234', 'type': 'baz'},
|
||||
{'id': '234', 'type': 'foo'}
|
||||
]
|
||||
}
|
||||
assert data == delete_ref_duplicates(data)
|
||||
|
||||
|
||||
def test_delete_ref_duplicates_with_object_containing_duplicate_refs():
|
||||
data = {
|
||||
'id': '123',
|
||||
'name': 'foo',
|
||||
'type': 'bar',
|
||||
'refs': [
|
||||
{'id': '123', 'type': 'baz'},
|
||||
{'id': '123', 'type': 'baz'},
|
||||
{'id': '234', 'type': 'baz'},
|
||||
{'id': '234', 'type': 'baz'},
|
||||
{'id': '234', 'type': 'foo'}
|
||||
]
|
||||
}
|
||||
assert {
|
||||
'id': '123',
|
||||
'name': 'foo',
|
||||
'type': 'bar',
|
||||
'refs': [
|
||||
{'id': '123', 'type': 'baz'},
|
||||
{'id': '234', 'type': 'baz'},
|
||||
{'id': '234', 'type': 'foo'}
|
||||
]
|
||||
} == delete_ref_duplicates(data)
|
||||
|
||||
|
||||
def test_delete_ref_duplicates_with_object_containing_duplicate_refs_in_nested_object():
|
||||
data = {
|
||||
'id': '123',
|
||||
'name': 'foo',
|
||||
'type': 'bar',
|
||||
'children': {
|
||||
'refs': [
|
||||
{'id': '123', 'type': 'baz'},
|
||||
{'id': '123', 'type': 'baz'},
|
||||
{'id': '234', 'type': 'baz'},
|
||||
{'id': '234', 'type': 'baz'},
|
||||
{'id': '234', 'type': 'foo'}
|
||||
]
|
||||
}
|
||||
}
|
||||
assert {
|
||||
'id': '123',
|
||||
'name': 'foo',
|
||||
'type': 'bar',
|
||||
'children': {
|
||||
'refs': [
|
||||
{'id': '123', 'type': 'baz'},
|
||||
{'id': '234', 'type': 'baz'},
|
||||
{'id': '234', 'type': 'foo'}
|
||||
]
|
||||
}
|
||||
} == delete_ref_duplicates(data)
|
||||
|
||||
|
||||
def test_construct_ansible_facts_should_make_default_fact_with_name_and_type():
|
||||
response = {
|
||||
'id': '123',
|
||||
'name': 'foo',
|
||||
'type': 'bar'
|
||||
}
|
||||
|
||||
assert {'bar_foo': response} == construct_ansible_facts(response, {})
|
||||
|
||||
|
||||
def test_construct_ansible_facts_should_not_make_default_fact_with_no_name():
|
||||
response = {
|
||||
'id': '123',
|
||||
'name': 'foo'
|
||||
}
|
||||
|
||||
assert {} == construct_ansible_facts(response, {})
|
||||
|
||||
|
||||
def test_construct_ansible_facts_should_not_make_default_fact_with_no_type():
|
||||
response = {
|
||||
'id': '123',
|
||||
'type': 'bar'
|
||||
}
|
||||
|
||||
assert {} == construct_ansible_facts(response, {})
|
||||
|
||||
|
||||
def test_construct_ansible_facts_should_use_register_as_when_given():
|
||||
response = {
|
||||
'id': '123',
|
||||
'name': 'foo',
|
||||
'type': 'bar'
|
||||
}
|
||||
params = {'register_as': 'fact_name'}
|
||||
|
||||
assert {'fact_name': response} == construct_ansible_facts(response, params)
|
||||
|
||||
|
||||
def test_construct_ansible_facts_should_extract_items():
|
||||
response = {'items': [
|
||||
{
|
||||
'id': '123',
|
||||
'name': 'foo',
|
||||
'type': 'bar'
|
||||
}, {
|
||||
'id': '123',
|
||||
'name': 'foo',
|
||||
'type': 'bar'
|
||||
}
|
||||
]}
|
||||
params = {'register_as': 'fact_name'}
|
||||
|
||||
assert {'fact_name': response['items']} == construct_ansible_facts(response, params)
|
||||
|
||||
|
||||
def test_construct_ansible_facts_should_ignore_items_with_no_register_as():
|
||||
response = {'items': [
|
||||
{
|
||||
'id': '123',
|
||||
'name': 'foo',
|
||||
'type': 'bar'
|
||||
}, {
|
||||
'id': '123',
|
||||
'name': 'foo',
|
||||
'type': 'bar'
|
||||
}
|
||||
]}
|
||||
|
||||
assert {} == construct_ansible_facts(response, {})
|
|
@ -1,588 +0,0 @@
|
|||
# Copyright (c) 2018 Cisco and/or its affiliates.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
import json
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
from ansible_collections.community.general.tests.unit.compat import mock
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import call, patch
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.network.ftd.common import HTTPMethod, FtdUnexpectedResponse
|
||||
from ansible_collections.community.general.plugins.module_utils.network.ftd.configuration import iterate_over_pageable_resource, BaseConfigurationResource, \
|
||||
OperationChecker, OperationNamePrefix, ParamName, QueryParams
|
||||
from ansible_collections.community.general.plugins.module_utils.network.ftd.fdm_swagger_client import ValidationError, OperationField
|
||||
|
||||
|
||||
class TestBaseConfigurationResource(object):
|
||||
@pytest.fixture
|
||||
def connection_mock(self, mocker):
|
||||
connection_class_mock = mocker.patch('ansible_collections.community.general.plugins.modules.network.ftd.ftd_configuration.Connection')
|
||||
connection_instance = connection_class_mock.return_value
|
||||
connection_instance.validate_data.return_value = True, None
|
||||
connection_instance.validate_query_params.return_value = True, None
|
||||
connection_instance.validate_path_params.return_value = True, None
|
||||
|
||||
return connection_instance
|
||||
|
||||
@patch.object(BaseConfigurationResource, '_fetch_system_info')
|
||||
@patch.object(BaseConfigurationResource, '_send_request')
|
||||
def test_get_objects_by_filter_with_multiple_filters(self, send_request_mock, fetch_system_info_mock,
|
||||
connection_mock):
|
||||
objects = [
|
||||
{'name': 'obj1', 'type': 1, 'foo': {'bar': 'buzz'}},
|
||||
{'name': 'obj2', 'type': 1, 'foo': {'bar': 'buz'}},
|
||||
{'name': 'obj3', 'type': 2, 'foo': {'bar': 'buzz'}}
|
||||
]
|
||||
|
||||
fetch_system_info_mock.return_value = {
|
||||
'databaseInfo': {
|
||||
'buildVersion': '6.3.0'
|
||||
}
|
||||
}
|
||||
|
||||
connection_mock.get_operation_spec.return_value = {
|
||||
'method': HTTPMethod.GET,
|
||||
'url': '/object/'
|
||||
}
|
||||
resource = BaseConfigurationResource(connection_mock, False)
|
||||
|
||||
send_request_mock.side_effect = [{'items': objects}, {'items': []}]
|
||||
# resource.get_objects_by_filter returns generator so to be able compare generated list with expected list
|
||||
# we need evaluate it.
|
||||
assert objects == list(resource.get_objects_by_filter('test', {}))
|
||||
send_request_mock.assert_has_calls(
|
||||
[
|
||||
mock.call('/object/', 'get', {}, {}, {'limit': 10, 'offset': 0})
|
||||
]
|
||||
)
|
||||
|
||||
send_request_mock.reset_mock()
|
||||
send_request_mock.side_effect = [{'items': objects}, {'items': []}]
|
||||
# resource.get_objects_by_filter returns generator so to be able compare generated list with expected list
|
||||
# we need evaluate it.
|
||||
assert [objects[0]] == list(resource.get_objects_by_filter('test', {ParamName.FILTERS: {'name': 'obj1'}}))
|
||||
send_request_mock.assert_has_calls(
|
||||
[
|
||||
mock.call('/object/', 'get', {}, {}, {QueryParams.FILTER: 'name:obj1', 'limit': 10, 'offset': 0})
|
||||
]
|
||||
)
|
||||
|
||||
send_request_mock.reset_mock()
|
||||
send_request_mock.side_effect = [{'items': objects}, {'items': []}]
|
||||
# resource.get_objects_by_filter returns generator so to be able compare generated list with expected list
|
||||
# we need evaluate it.
|
||||
assert [objects[1]] == list(resource.get_objects_by_filter(
|
||||
'test',
|
||||
{ParamName.FILTERS: {'name': 'obj2', 'type': 1, 'foo': {'bar': 'buz'}}}))
|
||||
|
||||
send_request_mock.assert_has_calls(
|
||||
[
|
||||
mock.call('/object/', 'get', {}, {}, {QueryParams.FILTER: 'name:obj2', 'limit': 10, 'offset': 0})
|
||||
]
|
||||
)
|
||||
|
||||
@patch.object(BaseConfigurationResource, '_fetch_system_info')
|
||||
@patch.object(BaseConfigurationResource, '_send_request')
|
||||
def test_get_objects_by_filter_with_multiple_responses(self, send_request_mock, fetch_system_info_mock,
|
||||
connection_mock):
|
||||
send_request_mock.side_effect = [
|
||||
{'items': [
|
||||
{'name': 'obj1', 'type': 'foo'},
|
||||
{'name': 'obj2', 'type': 'bar'}
|
||||
]},
|
||||
{'items': [
|
||||
{'name': 'obj3', 'type': 'foo'}
|
||||
]},
|
||||
{'items': []}
|
||||
]
|
||||
fetch_system_info_mock.return_value = {
|
||||
'databaseInfo': {
|
||||
'buildVersion': '6.3.0'
|
||||
}
|
||||
}
|
||||
connection_mock.get_operation_spec.return_value = {
|
||||
'method': HTTPMethod.GET,
|
||||
'url': '/object/'
|
||||
}
|
||||
resource = BaseConfigurationResource(connection_mock, False)
|
||||
assert [{'name': 'obj1', 'type': 'foo'}] == list(resource.get_objects_by_filter(
|
||||
'test',
|
||||
{ParamName.FILTERS: {'type': 'foo'}}))
|
||||
send_request_mock.assert_has_calls(
|
||||
[
|
||||
mock.call('/object/', 'get', {}, {}, {'limit': 10, 'offset': 0})
|
||||
]
|
||||
)
|
||||
|
||||
send_request_mock.reset_mock()
|
||||
send_request_mock.side_effect = [
|
||||
{'items': [
|
||||
{'name': 'obj1', 'type': 'foo'},
|
||||
{'name': 'obj2', 'type': 'bar'}
|
||||
]},
|
||||
{'items': [
|
||||
{'name': 'obj3', 'type': 'foo'}
|
||||
]},
|
||||
{'items': []}
|
||||
]
|
||||
resp = list(resource.get_objects_by_filter(
|
||||
'test',
|
||||
{
|
||||
ParamName.FILTERS: {'type': 'foo'},
|
||||
ParamName.QUERY_PARAMS: {'limit': 2}
|
||||
}))
|
||||
assert [{'name': 'obj1', 'type': 'foo'}, {'name': 'obj3', 'type': 'foo'}] == resp
|
||||
send_request_mock.assert_has_calls(
|
||||
[
|
||||
mock.call('/object/', 'get', {}, {}, {'limit': 2, 'offset': 0}),
|
||||
mock.call('/object/', 'get', {}, {}, {'limit': 2, 'offset': 2})
|
||||
]
|
||||
)
|
||||
|
||||
def test_module_should_fail_if_validation_error_in_data(self, connection_mock):
|
||||
connection_mock.get_operation_spec.return_value = {'method': HTTPMethod.POST, 'url': '/test'}
|
||||
report = {
|
||||
'required': ['objects[0].type'],
|
||||
'invalid_type': [
|
||||
{
|
||||
'path': 'objects[3].id',
|
||||
'expected_type': 'string',
|
||||
'actually_value': 1
|
||||
}
|
||||
]
|
||||
}
|
||||
connection_mock.validate_data.return_value = (False, json.dumps(report, sort_keys=True, indent=4))
|
||||
|
||||
with pytest.raises(ValidationError) as e_info:
|
||||
resource = BaseConfigurationResource(connection_mock, False)
|
||||
resource.crud_operation('addTest', {'data': {}})
|
||||
|
||||
result = e_info.value.args[0]
|
||||
key = 'Invalid data provided'
|
||||
assert result[key]
|
||||
result[key] = json.loads(result[key])
|
||||
assert result == {key: {
|
||||
'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
|
||||
'required': ['objects[0].type']
|
||||
}}
|
||||
|
||||
def test_module_should_fail_if_validation_error_in_query_params(self, connection_mock):
|
||||
connection_mock.get_operation_spec.return_value = {'method': HTTPMethod.GET, 'url': '/test',
|
||||
'returnMultipleItems': False}
|
||||
report = {
|
||||
'required': ['objects[0].type'],
|
||||
'invalid_type': [
|
||||
{
|
||||
'path': 'objects[3].id',
|
||||
'expected_type': 'string',
|
||||
'actually_value': 1
|
||||
}
|
||||
]
|
||||
}
|
||||
connection_mock.validate_query_params.return_value = (False, json.dumps(report, sort_keys=True, indent=4))
|
||||
|
||||
with pytest.raises(ValidationError) as e_info:
|
||||
resource = BaseConfigurationResource(connection_mock, False)
|
||||
resource.crud_operation('getTestList', {'data': {}})
|
||||
|
||||
result = e_info.value.args[0]
|
||||
|
||||
key = 'Invalid query_params provided'
|
||||
assert result[key]
|
||||
result[key] = json.loads(result[key])
|
||||
|
||||
assert result == {key: {
|
||||
'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
|
||||
'required': ['objects[0].type']}}
|
||||
|
||||
def test_module_should_fail_if_validation_error_in_path_params(self, connection_mock):
|
||||
connection_mock.get_operation_spec.return_value = {'method': HTTPMethod.GET, 'url': '/test',
|
||||
'returnMultipleItems': False}
|
||||
report = {
|
||||
'path_params': {
|
||||
'required': ['objects[0].type'],
|
||||
'invalid_type': [
|
||||
{
|
||||
'path': 'objects[3].id',
|
||||
'expected_type': 'string',
|
||||
'actually_value': 1
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
connection_mock.validate_path_params.return_value = (False, json.dumps(report, sort_keys=True, indent=4))
|
||||
|
||||
with pytest.raises(ValidationError) as e_info:
|
||||
resource = BaseConfigurationResource(connection_mock, False)
|
||||
resource.crud_operation('putTest', {'data': {}})
|
||||
|
||||
result = e_info.value.args[0]
|
||||
|
||||
key = 'Invalid path_params provided'
|
||||
assert result[key]
|
||||
result[key] = json.loads(result[key])
|
||||
|
||||
assert result == {key: {
|
||||
'path_params': {
|
||||
'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
|
||||
'required': ['objects[0].type']}}}
|
||||
|
||||
def test_module_should_fail_if_validation_error_in_all_params(self, connection_mock):
|
||||
connection_mock.get_operation_spec.return_value = {'method': HTTPMethod.POST, 'url': '/test'}
|
||||
report = {
|
||||
'data': {
|
||||
'required': ['objects[0].type'],
|
||||
'invalid_type': [
|
||||
{
|
||||
'path': 'objects[3].id',
|
||||
'expected_type': 'string',
|
||||
'actually_value': 1
|
||||
}
|
||||
]
|
||||
},
|
||||
'path_params': {
|
||||
'required': ['some_param'],
|
||||
'invalid_type': [
|
||||
{
|
||||
'path': 'name',
|
||||
'expected_type': 'string',
|
||||
'actually_value': True
|
||||
}
|
||||
]
|
||||
},
|
||||
'query_params': {
|
||||
'required': ['other_param'],
|
||||
'invalid_type': [
|
||||
{
|
||||
'path': 'f_integer',
|
||||
'expected_type': 'integer',
|
||||
'actually_value': "test"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
connection_mock.validate_data.return_value = (False, json.dumps(report['data'], sort_keys=True, indent=4))
|
||||
connection_mock.validate_query_params.return_value = (False,
|
||||
json.dumps(report['query_params'], sort_keys=True,
|
||||
indent=4))
|
||||
connection_mock.validate_path_params.return_value = (False,
|
||||
json.dumps(report['path_params'], sort_keys=True,
|
||||
indent=4))
|
||||
|
||||
with pytest.raises(ValidationError) as e_info:
|
||||
resource = BaseConfigurationResource(connection_mock, False)
|
||||
resource.crud_operation('putTest', {'data': {}})
|
||||
|
||||
result = e_info.value.args[0]
|
||||
|
||||
key_data = 'Invalid data provided'
|
||||
assert result[key_data]
|
||||
result[key_data] = json.loads(result[key_data])
|
||||
|
||||
key_path_params = 'Invalid path_params provided'
|
||||
assert result[key_path_params]
|
||||
result[key_path_params] = json.loads(result[key_path_params])
|
||||
|
||||
key_query_params = 'Invalid query_params provided'
|
||||
assert result[key_query_params]
|
||||
result[key_query_params] = json.loads(result[key_query_params])
|
||||
|
||||
assert result == {
|
||||
key_data: {'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
|
||||
'required': ['objects[0].type']},
|
||||
key_path_params: {'invalid_type': [{'actually_value': True, 'expected_type': 'string', 'path': 'name'}],
|
||||
'required': ['some_param']},
|
||||
key_query_params: {
|
||||
'invalid_type': [{'actually_value': 'test', 'expected_type': 'integer', 'path': 'f_integer'}],
|
||||
'required': ['other_param']}}
|
||||
|
||||
@pytest.mark.parametrize("test_api_version, expected_result",
|
||||
[
|
||||
("6.2.3", "name:object_name"),
|
||||
("6.3.0", "name:object_name"),
|
||||
("6.4.0", "fts~object_name")
|
||||
]
|
||||
)
|
||||
def test_stringify_name_filter(self, test_api_version, expected_result, connection_mock):
|
||||
filters = {"name": "object_name"}
|
||||
|
||||
with patch.object(BaseConfigurationResource, '_fetch_system_info') as fetch_system_info_mock:
|
||||
fetch_system_info_mock.return_value = {
|
||||
'databaseInfo': {
|
||||
'buildVersion': test_api_version
|
||||
}
|
||||
}
|
||||
resource = BaseConfigurationResource(connection_mock, False)
|
||||
|
||||
assert resource._stringify_name_filter(filters) == expected_result, "Unexpected result for version %s" % (
|
||||
test_api_version)
|
||||
|
||||
|
||||
class TestIterateOverPageableResource(object):
|
||||
|
||||
def test_iterate_over_pageable_resource_with_no_items(self):
|
||||
resource_func = mock.Mock(return_value={'items': []})
|
||||
|
||||
items = iterate_over_pageable_resource(resource_func, {'query_params': {}})
|
||||
|
||||
assert [] == list(items)
|
||||
|
||||
def test_iterate_over_pageable_resource_with_one_page(self):
|
||||
resource_func = mock.Mock(side_effect=[
|
||||
{'items': ['foo', 'bar']},
|
||||
{'items': []},
|
||||
])
|
||||
|
||||
items = iterate_over_pageable_resource(resource_func, {'query_params': {}})
|
||||
|
||||
assert ['foo', 'bar'] == list(items)
|
||||
resource_func.assert_has_calls([
|
||||
call(params={'query_params': {'offset': 0, 'limit': 10}})
|
||||
])
|
||||
|
||||
def test_iterate_over_pageable_resource_with_multiple_pages(self):
|
||||
objects = [
|
||||
{'items': ['foo']},
|
||||
{'items': ['bar']},
|
||||
{'items': ['buzz']},
|
||||
{'items': []},
|
||||
]
|
||||
resource_func = mock.Mock(side_effect=objects)
|
||||
|
||||
items = iterate_over_pageable_resource(resource_func, {'query_params': {}})
|
||||
assert ['foo'] == list(items)
|
||||
|
||||
resource_func.reset_mock()
|
||||
resource_func = mock.Mock(side_effect=objects)
|
||||
items = iterate_over_pageable_resource(resource_func, {'query_params': {'limit': 1}})
|
||||
assert ['foo', 'bar', 'buzz'] == list(items)
|
||||
|
||||
def test_iterate_over_pageable_resource_should_preserve_query_params(self):
|
||||
resource_func = mock.Mock(return_value={'items': []})
|
||||
|
||||
items = iterate_over_pageable_resource(resource_func, {'query_params': {'filter': 'name:123'}})
|
||||
|
||||
assert [] == list(items)
|
||||
resource_func.assert_called_once_with(params={'query_params': {'filter': 'name:123', 'offset': 0, 'limit': 10}})
|
||||
|
||||
def test_iterate_over_pageable_resource_should_preserve_limit(self):
|
||||
resource_func = mock.Mock(side_effect=[
|
||||
{'items': ['foo']},
|
||||
{'items': []},
|
||||
])
|
||||
|
||||
items = iterate_over_pageable_resource(resource_func, {'query_params': {'limit': 1}})
|
||||
|
||||
assert ['foo'] == list(items)
|
||||
resource_func.assert_has_calls([
|
||||
call(params={'query_params': {'offset': 0, 'limit': 1}})
|
||||
])
|
||||
|
||||
def test_iterate_over_pageable_resource_should_preserve_offset(self):
|
||||
resource_func = mock.Mock(side_effect=[
|
||||
{'items': ['foo']},
|
||||
{'items': []},
|
||||
])
|
||||
|
||||
items = iterate_over_pageable_resource(resource_func, {'query_params': {'offset': 3}})
|
||||
|
||||
assert ['foo'] == list(items)
|
||||
resource_func.assert_has_calls([
|
||||
call(params={'query_params': {'offset': 3, 'limit': 10}}),
|
||||
])
|
||||
|
||||
def test_iterate_over_pageable_resource_should_pass_with_string_offset_and_limit(self):
|
||||
resource_func = mock.Mock(side_effect=[
|
||||
{'items': ['foo']},
|
||||
{'items': []},
|
||||
])
|
||||
|
||||
items = iterate_over_pageable_resource(resource_func, {'query_params': {'offset': '1', 'limit': '1'}})
|
||||
|
||||
assert ['foo'] == list(items)
|
||||
resource_func.assert_has_calls([
|
||||
call(params={'query_params': {'offset': '1', 'limit': '1'}}),
|
||||
call(params={'query_params': {'offset': 2, 'limit': '1'}})
|
||||
])
|
||||
|
||||
def test_iterate_over_pageable_resource_raises_exception_when_server_returned_more_items_than_requested(self):
|
||||
resource_func = mock.Mock(side_effect=[
|
||||
{'items': ['foo', 'redundant_bar']},
|
||||
{'items': []},
|
||||
])
|
||||
|
||||
with pytest.raises(FtdUnexpectedResponse):
|
||||
list(iterate_over_pageable_resource(resource_func, {'query_params': {'offset': '1', 'limit': '1'}}))
|
||||
|
||||
resource_func.assert_has_calls([
|
||||
call(params={'query_params': {'offset': '1', 'limit': '1'}})
|
||||
])
|
||||
|
||||
|
||||
class TestOperationCheckerClass(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self._checker = OperationChecker
|
||||
|
||||
def test_is_add_operation_positive(self):
|
||||
operation_name = OperationNamePrefix.ADD + "Object"
|
||||
operation_spec = {OperationField.METHOD: HTTPMethod.POST}
|
||||
assert self._checker.is_add_operation(operation_name, operation_spec)
|
||||
|
||||
def test_is_add_operation_wrong_method_in_spec(self):
|
||||
operation_name = OperationNamePrefix.ADD + "Object"
|
||||
operation_spec = {OperationField.METHOD: HTTPMethod.GET}
|
||||
assert not self._checker.is_add_operation(operation_name, operation_spec)
|
||||
|
||||
def test_is_add_operation_negative_wrong_operation_name(self):
|
||||
operation_name = OperationNamePrefix.GET + "Object"
|
||||
operation_spec = {OperationField.METHOD: HTTPMethod.POST}
|
||||
assert not self._checker.is_add_operation(operation_name, operation_spec)
|
||||
|
||||
def test_is_edit_operation_positive(self):
|
||||
operation_name = OperationNamePrefix.EDIT + "Object"
|
||||
operation_spec = {OperationField.METHOD: HTTPMethod.PUT}
|
||||
assert self._checker.is_edit_operation(operation_name, operation_spec)
|
||||
|
||||
def test_is_edit_operation_wrong_method_in_spec(self):
|
||||
operation_name = OperationNamePrefix.EDIT + "Object"
|
||||
operation_spec = {OperationField.METHOD: HTTPMethod.GET}
|
||||
assert not self._checker.is_edit_operation(operation_name, operation_spec)
|
||||
|
||||
def test_is_edit_operation_negative_wrong_operation_name(self):
|
||||
operation_name = OperationNamePrefix.GET + "Object"
|
||||
operation_spec = {OperationField.METHOD: HTTPMethod.PUT}
|
||||
assert not self._checker.is_edit_operation(operation_name, operation_spec)
|
||||
|
||||
def test_is_delete_operation_positive(self):
|
||||
operation_name = OperationNamePrefix.DELETE + "Object"
|
||||
operation_spec = {OperationField.METHOD: HTTPMethod.DELETE}
|
||||
self.assertTrue(
|
||||
self._checker.is_delete_operation(operation_name, operation_spec)
|
||||
)
|
||||
|
||||
def test_is_delete_operation_wrong_method_in_spec(self):
|
||||
operation_name = OperationNamePrefix.DELETE + "Object"
|
||||
operation_spec = {OperationField.METHOD: HTTPMethod.GET}
|
||||
assert not self._checker.is_delete_operation(operation_name, operation_spec)
|
||||
|
||||
def test_is_delete_operation_negative_wrong_operation_name(self):
|
||||
operation_name = OperationNamePrefix.GET + "Object"
|
||||
operation_spec = {OperationField.METHOD: HTTPMethod.DELETE}
|
||||
assert not self._checker.is_delete_operation(operation_name, operation_spec)
|
||||
|
||||
def test_is_get_list_operation_positive(self):
|
||||
operation_name = OperationNamePrefix.GET + "Object"
|
||||
operation_spec = {
|
||||
OperationField.METHOD: HTTPMethod.GET,
|
||||
OperationField.RETURN_MULTIPLE_ITEMS: True
|
||||
}
|
||||
assert self._checker.is_get_list_operation(operation_name, operation_spec)
|
||||
|
||||
def test_is_get_list_operation_wrong_method_in_spec(self):
|
||||
operation_name = OperationNamePrefix.GET + "Object"
|
||||
operation_spec = {
|
||||
OperationField.METHOD: HTTPMethod.POST,
|
||||
OperationField.RETURN_MULTIPLE_ITEMS: True
|
||||
}
|
||||
assert not self._checker.is_get_list_operation(operation_name, operation_spec)
|
||||
|
||||
def test_is_get_list_operation_does_not_return_list(self):
|
||||
operation_name = OperationNamePrefix.GET + "Object"
|
||||
operation_spec = {
|
||||
OperationField.METHOD: HTTPMethod.GET,
|
||||
OperationField.RETURN_MULTIPLE_ITEMS: False
|
||||
}
|
||||
assert not self._checker.is_get_list_operation(operation_name, operation_spec)
|
||||
|
||||
def test_is_get_operation_positive(self):
|
||||
operation_name = OperationNamePrefix.GET + "Object"
|
||||
operation_spec = {
|
||||
OperationField.METHOD: HTTPMethod.GET,
|
||||
OperationField.RETURN_MULTIPLE_ITEMS: False
|
||||
}
|
||||
self.assertTrue(
|
||||
self._checker.is_get_operation(operation_name, operation_spec)
|
||||
)
|
||||
|
||||
def test_is_get_operation_wrong_method_in_spec(self):
|
||||
operation_name = OperationNamePrefix.ADD + "Object"
|
||||
operation_spec = {
|
||||
OperationField.METHOD: HTTPMethod.POST,
|
||||
OperationField.RETURN_MULTIPLE_ITEMS: False
|
||||
}
|
||||
assert not self._checker.is_get_operation(operation_name, operation_spec)
|
||||
|
||||
def test_is_get_operation_negative_when_returns_multiple(self):
|
||||
operation_name = OperationNamePrefix.GET + "Object"
|
||||
operation_spec = {
|
||||
OperationField.METHOD: HTTPMethod.GET,
|
||||
OperationField.RETURN_MULTIPLE_ITEMS: True
|
||||
}
|
||||
assert not self._checker.is_get_operation(operation_name, operation_spec)
|
||||
|
||||
def test_is_upsert_operation_positive(self):
|
||||
operation_name = OperationNamePrefix.UPSERT + "Object"
|
||||
assert self._checker.is_upsert_operation(operation_name)
|
||||
|
||||
def test_is_upsert_operation_with_wrong_operation_name(self):
|
||||
for op_type in [OperationNamePrefix.ADD, OperationNamePrefix.GET, OperationNamePrefix.EDIT,
|
||||
OperationNamePrefix.DELETE]:
|
||||
operation_name = op_type + "Object"
|
||||
assert not self._checker.is_upsert_operation(operation_name)
|
||||
|
||||
def test_is_find_by_filter_operation(self):
|
||||
operation_name = OperationNamePrefix.GET + "Object"
|
||||
operation_spec = {
|
||||
OperationField.METHOD: HTTPMethod.GET,
|
||||
OperationField.RETURN_MULTIPLE_ITEMS: True
|
||||
}
|
||||
params = {ParamName.FILTERS: 1}
|
||||
self.assertTrue(
|
||||
self._checker.is_find_by_filter_operation(
|
||||
operation_name, params, operation_spec
|
||||
)
|
||||
)
|
||||
|
||||
def test_is_find_by_filter_operation_negative_when_filters_empty(self):
|
||||
operation_name = OperationNamePrefix.GET + "Object"
|
||||
operation_spec = {
|
||||
OperationField.METHOD: HTTPMethod.GET,
|
||||
OperationField.RETURN_MULTIPLE_ITEMS: True
|
||||
}
|
||||
params = {ParamName.FILTERS: None}
|
||||
assert not self._checker.is_find_by_filter_operation(
|
||||
operation_name, params, operation_spec
|
||||
)
|
||||
|
||||
params = {}
|
||||
assert not self._checker.is_find_by_filter_operation(
|
||||
operation_name, params, operation_spec
|
||||
)
|
||||
|
||||
def test_is_upsert_operation_supported_operation(self):
|
||||
get_list_op_spec = {OperationField.METHOD: HTTPMethod.GET, OperationField.RETURN_MULTIPLE_ITEMS: True}
|
||||
add_op_spec = {OperationField.METHOD: HTTPMethod.POST}
|
||||
edit_op_spec = {OperationField.METHOD: HTTPMethod.PUT}
|
||||
|
||||
assert self._checker.is_upsert_operation_supported({'getList': get_list_op_spec, 'edit': edit_op_spec})
|
||||
assert self._checker.is_upsert_operation_supported(
|
||||
{'add': add_op_spec, 'getList': get_list_op_spec, 'edit': edit_op_spec})
|
||||
assert not self._checker.is_upsert_operation_supported({'getList': get_list_op_spec})
|
||||
assert not self._checker.is_upsert_operation_supported({'edit': edit_op_spec})
|
||||
assert not self._checker.is_upsert_operation_supported({'getList': get_list_op_spec, 'add': add_op_spec})
|
File diff suppressed because one or more lines are too long
|
@ -1,145 +0,0 @@
|
|||
# Copyright (c) 2019 Cisco and/or its affiliates.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
import pytest
|
||||
|
||||
pytest.importorskip("kick")
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.network.ftd.device import FtdPlatformFactory, FtdModel, FtdAsa5500xPlatform, \
|
||||
Ftd2100Platform, AbstractFtdPlatform
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.network.ftd.test_ftd_install import DEFAULT_MODULE_PARAMS
|
||||
|
||||
|
||||
class TestFtdModel(object):
|
||||
|
||||
def test_has_value_should_return_true_for_existing_models(self):
|
||||
assert FtdModel.FTD_2120 in FtdModel.supported_models()
|
||||
assert FtdModel.FTD_ASA5516_X in FtdModel.supported_models()
|
||||
|
||||
def test_has_value_should_return_false_for_non_existing_models(self):
|
||||
assert 'nonExistingModel' not in FtdModel.supported_models()
|
||||
assert None not in FtdModel.supported_models()
|
||||
|
||||
|
||||
class TestFtdPlatformFactory(object):
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def mock_devices(self, mocker):
|
||||
mocker.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.device.Kp')
|
||||
mocker.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.device.Ftd5500x')
|
||||
|
||||
def test_factory_should_return_corresponding_platform(self):
|
||||
ftd_platform = FtdPlatformFactory.create(FtdModel.FTD_ASA5508_X, dict(DEFAULT_MODULE_PARAMS))
|
||||
assert type(ftd_platform) is FtdAsa5500xPlatform
|
||||
ftd_platform = FtdPlatformFactory.create(FtdModel.FTD_2130, dict(DEFAULT_MODULE_PARAMS))
|
||||
assert type(ftd_platform) is Ftd2100Platform
|
||||
|
||||
def test_factory_should_raise_error_with_not_supported_model(self):
|
||||
with pytest.raises(ValueError) as ex:
|
||||
FtdPlatformFactory.create('nonExistingModel', dict(DEFAULT_MODULE_PARAMS))
|
||||
assert "FTD model 'nonExistingModel' is not supported by this module." == ex.value.args[0]
|
||||
|
||||
|
||||
class TestAbstractFtdPlatform(object):
|
||||
|
||||
def test_install_ftd_image_raise_error_on_abstract_class(self):
|
||||
with pytest.raises(NotImplementedError):
|
||||
AbstractFtdPlatform().install_ftd_image(dict(DEFAULT_MODULE_PARAMS))
|
||||
|
||||
def test_supports_ftd_model_should_return_true_for_supported_models(self):
|
||||
assert Ftd2100Platform.supports_ftd_model(FtdModel.FTD_2120)
|
||||
assert FtdAsa5500xPlatform.supports_ftd_model(FtdModel.FTD_ASA5516_X)
|
||||
|
||||
def test_supports_ftd_model_should_return_false_for_non_supported_models(self):
|
||||
assert not AbstractFtdPlatform.supports_ftd_model(FtdModel.FTD_2120)
|
||||
assert not Ftd2100Platform.supports_ftd_model(FtdModel.FTD_ASA5508_X)
|
||||
assert not FtdAsa5500xPlatform.supports_ftd_model(FtdModel.FTD_2120)
|
||||
|
||||
def test_parse_rommon_file_location(self):
|
||||
server, path = AbstractFtdPlatform.parse_rommon_file_location('tftp://1.2.3.4/boot/rommon-boot.foo')
|
||||
assert '1.2.3.4' == server
|
||||
assert '/boot/rommon-boot.foo' == path
|
||||
|
||||
def test_parse_rommon_file_location_should_fail_for_non_tftp_protocol(self):
|
||||
with pytest.raises(ValueError) as ex:
|
||||
AbstractFtdPlatform.parse_rommon_file_location('http://1.2.3.4/boot/rommon-boot.foo')
|
||||
assert 'The ROMMON image must be downloaded from TFTP server' in str(ex.value)
|
||||
|
||||
|
||||
class TestFtd2100Platform(object):
|
||||
|
||||
@pytest.fixture
|
||||
def kp_mock(self, mocker):
|
||||
return mocker.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.device.Kp')
|
||||
|
||||
@pytest.fixture
|
||||
def module_params(self):
|
||||
return dict(DEFAULT_MODULE_PARAMS)
|
||||
|
||||
def test_install_ftd_image_should_call_kp_module(self, kp_mock, module_params):
|
||||
ftd = FtdPlatformFactory.create(FtdModel.FTD_2110, module_params)
|
||||
ftd.install_ftd_image(module_params)
|
||||
|
||||
assert kp_mock.called
|
||||
assert kp_mock.return_value.ssh_console.called
|
||||
ftd_line = kp_mock.return_value.ssh_console.return_value
|
||||
assert ftd_line.baseline_fp2k_ftd.called
|
||||
assert ftd_line.disconnect.called
|
||||
|
||||
def test_install_ftd_image_should_call_disconnect_when_install_fails(self, kp_mock, module_params):
|
||||
ftd_line = kp_mock.return_value.ssh_console.return_value
|
||||
ftd_line.baseline_fp2k_ftd.side_effect = Exception('Something went wrong')
|
||||
|
||||
ftd = FtdPlatformFactory.create(FtdModel.FTD_2120, module_params)
|
||||
with pytest.raises(Exception):
|
||||
ftd.install_ftd_image(module_params)
|
||||
|
||||
assert ftd_line.baseline_fp2k_ftd.called
|
||||
assert ftd_line.disconnect.called
|
||||
|
||||
|
||||
class TestFtdAsa5500xPlatform(object):
|
||||
|
||||
@pytest.fixture
|
||||
def asa5500x_mock(self, mocker):
|
||||
return mocker.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.device.Ftd5500x')
|
||||
|
||||
@pytest.fixture
|
||||
def module_params(self):
|
||||
return dict(DEFAULT_MODULE_PARAMS)
|
||||
|
||||
def test_install_ftd_image_should_call_kp_module(self, asa5500x_mock, module_params):
|
||||
ftd = FtdPlatformFactory.create(FtdModel.FTD_ASA5508_X, module_params)
|
||||
ftd.install_ftd_image(module_params)
|
||||
|
||||
assert asa5500x_mock.called
|
||||
assert asa5500x_mock.return_value.ssh_console.called
|
||||
ftd_line = asa5500x_mock.return_value.ssh_console.return_value
|
||||
assert ftd_line.rommon_to_new_image.called
|
||||
assert ftd_line.disconnect.called
|
||||
|
||||
def test_install_ftd_image_should_call_disconnect_when_install_fails(self, asa5500x_mock, module_params):
|
||||
ftd_line = asa5500x_mock.return_value.ssh_console.return_value
|
||||
ftd_line.rommon_to_new_image.side_effect = Exception('Something went wrong')
|
||||
|
||||
ftd = FtdPlatformFactory.create(FtdModel.FTD_ASA5516_X, module_params)
|
||||
with pytest.raises(Exception):
|
||||
ftd.install_ftd_image(module_params)
|
||||
|
||||
assert ftd_line.rommon_to_new_image.called
|
||||
assert ftd_line.disconnect.called
|
|
@ -1,379 +0,0 @@
|
|||
# Copyright (c) 2018 Cisco and/or its affiliates.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
import copy
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.network.ftd.common import HTTPMethod
|
||||
from ansible_collections.community.general.plugins.module_utils.network.ftd.fdm_swagger_client import FdmSwaggerParser
|
||||
|
||||
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
TEST_DATA_FOLDER = os.path.join(DIR_PATH, 'test_data')
|
||||
|
||||
base = {
|
||||
'basePath': "/api/fdm/v2",
|
||||
'definitions': {"NetworkObject": {"type": "object",
|
||||
"properties": {"version": {"type": "string"}, "name": {"type": "string"},
|
||||
"description": {"type": "string"},
|
||||
"subType": {"type": "object",
|
||||
"$ref": "#/definitions/NetworkObjectType"},
|
||||
"value": {"type": "string"},
|
||||
"isSystemDefined": {"type": "boolean"},
|
||||
"dnsResolution": {"type": "object",
|
||||
"$ref": "#/definitions/FQDNDNSResolution"},
|
||||
"id": {"type": "string"},
|
||||
"type": {"type": "string", "default": "networkobject"}},
|
||||
"required": ["subType", "type", "value", "name"]},
|
||||
"NetworkObjectWrapper": {
|
||||
"allOf": [{"$ref": "#/definitions/NetworkObject"}, {"$ref": "#/definitions/LinksWrapper"}]}
|
||||
},
|
||||
'paths': {
|
||||
"/object/networks": {
|
||||
"get": {"tags": ["NetworkObject"],
|
||||
"operationId": "getNetworkObjectList",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "",
|
||||
"schema": {"type": "object",
|
||||
"title": "NetworkObjectList",
|
||||
"properties": {
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {"$ref": "#/definitions/NetworkObjectWrapper"}},
|
||||
"paging": {
|
||||
"$ref": "#/definitions/Paging"}},
|
||||
"required": ["items", "paging"]}}},
|
||||
"parameters": [
|
||||
{"name": "offset", "in": "query", "required": False, "type": "integer"},
|
||||
{"name": "limit", "in": "query", "required": False, "type": "integer"},
|
||||
{"name": "sort", "in": "query", "required": False, "type": "string"},
|
||||
{"name": "filter", "in": "query", "required": False, "type": "string"}]},
|
||||
"post": {"tags": ["NetworkObject"], "operationId": "addNetworkObject",
|
||||
"responses": {
|
||||
"200": {"description": "",
|
||||
"schema": {"type": "object",
|
||||
"$ref": "#/definitions/NetworkObjectWrapper"}},
|
||||
"422": {"description": "",
|
||||
"schema": {"type": "object", "$ref": "#/definitions/ErrorWrapper"}}},
|
||||
"parameters": [{"in": "body", "name": "body",
|
||||
"required": True,
|
||||
"schema": {"$ref": "#/definitions/NetworkObject"}}]}
|
||||
},
|
||||
"/object/networks/{objId}": {
|
||||
"get": {"tags": ["NetworkObject"], "operationId": "getNetworkObject",
|
||||
"responses": {"200": {"description": "",
|
||||
"schema": {"type": "object",
|
||||
"$ref": "#/definitions/NetworkObjectWrapper"}},
|
||||
"404": {"description": "",
|
||||
"schema": {"type": "object",
|
||||
"$ref": "#/definitions/ErrorWrapper"}}},
|
||||
"parameters": [{"name": "objId", "in": "path", "required": True,
|
||||
"type": "string"}]},
|
||||
|
||||
"put": {"tags": ["NetworkObject"], "operationId": "editNetworkObject",
|
||||
"responses": {"200": {"description": "",
|
||||
"schema": {"type": "object",
|
||||
"$ref": "#/definitions/NetworkObjectWrapper"}},
|
||||
"422": {"description": "",
|
||||
"schema": {"type": "object",
|
||||
"$ref": "#/definitions/ErrorWrapper"}}},
|
||||
"parameters": [{"name": "objId", "in": "path", "required": True,
|
||||
"type": "string"},
|
||||
{"in": "body", "name": "body", "required": True,
|
||||
"schema": {"$ref": "#/definitions/NetworkObject"}}]},
|
||||
"delete": {"tags": ["NetworkObject"], "operationId": "deleteNetworkObject",
|
||||
"responses": {"204": {"description": ""},
|
||||
"422": {"description": "",
|
||||
"schema": {"type": "object",
|
||||
"$ref": "#/definitions/ErrorWrapper"}}},
|
||||
"parameters": [{"name": "objId", "in": "path", "required": True,
|
||||
"type": "string"}]}}}
|
||||
}
|
||||
|
||||
|
||||
def _get_objects(base_object, key_names):
|
||||
return dict((_key, base_object[_key]) for _key in key_names)
|
||||
|
||||
|
||||
class TestFdmSwaggerParser(unittest.TestCase):
|
||||
|
||||
def test_simple_object(self):
|
||||
self._data = copy.deepcopy(base)
|
||||
|
||||
self.fdm_data = FdmSwaggerParser().parse_spec(self._data)
|
||||
|
||||
expected_operations = {
|
||||
'getNetworkObjectList': {
|
||||
'method': HTTPMethod.GET,
|
||||
'url': '/api/fdm/v2/object/networks',
|
||||
'modelName': 'NetworkObject',
|
||||
'parameters': {
|
||||
'path': {},
|
||||
'query': {
|
||||
'offset': {
|
||||
'required': False,
|
||||
'type': 'integer'
|
||||
},
|
||||
'limit': {
|
||||
'required': False,
|
||||
'type': 'integer'
|
||||
},
|
||||
'sort': {
|
||||
'required': False,
|
||||
'type': 'string'
|
||||
},
|
||||
'filter': {
|
||||
'required': False,
|
||||
'type': 'string'
|
||||
}
|
||||
}
|
||||
},
|
||||
'returnMultipleItems': True,
|
||||
"tags": ["NetworkObject"]
|
||||
},
|
||||
'addNetworkObject': {
|
||||
'method': HTTPMethod.POST,
|
||||
'url': '/api/fdm/v2/object/networks',
|
||||
'modelName': 'NetworkObject',
|
||||
'parameters': {'path': {},
|
||||
'query': {}},
|
||||
'returnMultipleItems': False,
|
||||
"tags": ["NetworkObject"]
|
||||
},
|
||||
'getNetworkObject': {
|
||||
'method': HTTPMethod.GET,
|
||||
'url': '/api/fdm/v2/object/networks/{objId}',
|
||||
'modelName': 'NetworkObject',
|
||||
'parameters': {
|
||||
'path': {
|
||||
'objId': {
|
||||
'required': True,
|
||||
'type': "string"
|
||||
}
|
||||
},
|
||||
'query': {}
|
||||
},
|
||||
'returnMultipleItems': False,
|
||||
"tags": ["NetworkObject"]
|
||||
},
|
||||
'editNetworkObject': {
|
||||
'method': HTTPMethod.PUT,
|
||||
'url': '/api/fdm/v2/object/networks/{objId}',
|
||||
'modelName': 'NetworkObject',
|
||||
'parameters': {
|
||||
'path': {
|
||||
'objId': {
|
||||
'required': True,
|
||||
'type': "string"
|
||||
}
|
||||
},
|
||||
'query': {}
|
||||
},
|
||||
'returnMultipleItems': False,
|
||||
"tags": ["NetworkObject"]
|
||||
},
|
||||
'deleteNetworkObject': {
|
||||
'method': HTTPMethod.DELETE,
|
||||
'url': '/api/fdm/v2/object/networks/{objId}',
|
||||
'modelName': 'NetworkObject',
|
||||
'parameters': {
|
||||
'path': {
|
||||
'objId': {
|
||||
'required': True,
|
||||
'type': "string"
|
||||
}
|
||||
},
|
||||
'query': {}
|
||||
},
|
||||
'returnMultipleItems': False,
|
||||
"tags": ["NetworkObject"]
|
||||
}
|
||||
}
|
||||
assert sorted(['NetworkObject', 'NetworkObjectWrapper']) == sorted(self.fdm_data['models'].keys())
|
||||
assert expected_operations == self.fdm_data['operations']
|
||||
assert {'NetworkObject': expected_operations} == self.fdm_data['model_operations']
|
||||
|
||||
def test_simple_object_with_documentation(self):
|
||||
api_spec = copy.deepcopy(base)
|
||||
docs = {
|
||||
'definitions': {
|
||||
'NetworkObject': {
|
||||
'description': 'Description for Network Object',
|
||||
'properties': {'name': 'Description for name field'}
|
||||
}
|
||||
},
|
||||
'paths': {
|
||||
'/object/networks': {
|
||||
'get': {
|
||||
'description': 'Description for getNetworkObjectList operation',
|
||||
'parameters': [{'name': 'offset', 'description': 'Description for offset field'}]
|
||||
},
|
||||
'post': {'description': 'Description for addNetworkObject operation'}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.fdm_data = FdmSwaggerParser().parse_spec(api_spec, docs)
|
||||
|
||||
assert 'Description for Network Object' == self.fdm_data['models']['NetworkObject']['description']
|
||||
assert '' == self.fdm_data['models']['NetworkObjectWrapper']['description']
|
||||
network_properties = self.fdm_data['models']['NetworkObject']['properties']
|
||||
assert '' == network_properties['id']['description']
|
||||
assert not network_properties['id']['required']
|
||||
assert 'Description for name field' == network_properties['name']['description']
|
||||
assert network_properties['name']['required']
|
||||
|
||||
ops = self.fdm_data['operations']
|
||||
assert 'Description for getNetworkObjectList operation' == ops['getNetworkObjectList']['description']
|
||||
assert 'Description for addNetworkObject operation' == ops['addNetworkObject']['description']
|
||||
assert '' == ops['deleteNetworkObject']['description']
|
||||
|
||||
get_op_params = ops['getNetworkObjectList']['parameters']
|
||||
assert 'Description for offset field' == get_op_params['query']['offset']['description']
|
||||
assert '' == get_op_params['query']['limit']['description']
|
||||
|
||||
def test_model_operations_should_contain_all_operations(self):
|
||||
data = {
|
||||
'basePath': '/v2/',
|
||||
'definitions': {
|
||||
'Model1': {"type": "object"},
|
||||
'Model2': {"type": "object"},
|
||||
'Model3': {"type": "object"}
|
||||
},
|
||||
'paths': {
|
||||
'path1': {
|
||||
'get': {
|
||||
'operationId': 'getSomeModelList',
|
||||
"responses": {
|
||||
"200": {"description": "",
|
||||
"schema": {"type": "object",
|
||||
"title": "NetworkObjectList",
|
||||
"properties": {
|
||||
"items": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/Model1"
|
||||
}
|
||||
}
|
||||
}}
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"operationId": "addSomeModel",
|
||||
"parameters": [{"in": "body",
|
||||
"name": "body",
|
||||
"schema": {"$ref": "#/definitions/Model2"}
|
||||
}]}
|
||||
},
|
||||
'path2/{id}': {
|
||||
"get": {"operationId": "getSomeModel",
|
||||
"responses": {"200": {"description": "",
|
||||
"schema": {"type": "object",
|
||||
"$ref": "#/definitions/Model3"}},
|
||||
}
|
||||
},
|
||||
"put": {"operationId": "editSomeModel",
|
||||
"parameters": [{"in": "body",
|
||||
"name": "body",
|
||||
"schema": {"$ref": "#/definitions/Model1"}}
|
||||
]},
|
||||
"delete": {
|
||||
"operationId": "deleteModel3",
|
||||
}},
|
||||
'path3': {
|
||||
"delete": {
|
||||
"operationId": "deleteNoneModel",
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expected_operations = {
|
||||
'getSomeModelList': {
|
||||
'method': HTTPMethod.GET,
|
||||
'url': '/v2/path1',
|
||||
'modelName': 'Model1',
|
||||
'returnMultipleItems': True,
|
||||
'tags': []
|
||||
},
|
||||
'addSomeModel': {
|
||||
'method': HTTPMethod.POST,
|
||||
'url': '/v2/path1',
|
||||
'modelName': 'Model2',
|
||||
'parameters': {
|
||||
'path': {},
|
||||
'query': {}
|
||||
},
|
||||
'returnMultipleItems': False,
|
||||
'tags': []
|
||||
},
|
||||
'getSomeModel': {
|
||||
'method': HTTPMethod.GET,
|
||||
'url': '/v2/path2/{id}',
|
||||
'modelName': 'Model3',
|
||||
'returnMultipleItems': False,
|
||||
'tags': []
|
||||
},
|
||||
'editSomeModel': {
|
||||
'method': HTTPMethod.PUT,
|
||||
'url': '/v2/path2/{id}',
|
||||
'modelName': 'Model1',
|
||||
'parameters': {
|
||||
'path': {},
|
||||
'query': {}
|
||||
},
|
||||
'returnMultipleItems': False,
|
||||
'tags': []
|
||||
},
|
||||
'deleteModel3': {
|
||||
'method': HTTPMethod.DELETE,
|
||||
'url': '/v2/path2/{id}',
|
||||
'modelName': 'Model3',
|
||||
'returnMultipleItems': False,
|
||||
'tags': []
|
||||
},
|
||||
'deleteNoneModel': {
|
||||
'method': HTTPMethod.DELETE,
|
||||
'url': '/v2/path3',
|
||||
'modelName': None,
|
||||
'returnMultipleItems': False,
|
||||
'tags': []
|
||||
}
|
||||
}
|
||||
|
||||
fdm_data = FdmSwaggerParser().parse_spec(data)
|
||||
assert sorted(['Model1', 'Model2', 'Model3']) == sorted(fdm_data['models'].keys())
|
||||
assert expected_operations == fdm_data['operations']
|
||||
assert {
|
||||
'Model1': {
|
||||
'getSomeModelList': expected_operations['getSomeModelList'],
|
||||
'editSomeModel': expected_operations['editSomeModel'],
|
||||
},
|
||||
'Model2': {
|
||||
'addSomeModel': expected_operations['addSomeModel']
|
||||
},
|
||||
'Model3': {
|
||||
'getSomeModel': expected_operations['getSomeModel'],
|
||||
'deleteModel3': expected_operations['deleteModel3']
|
||||
},
|
||||
None: {
|
||||
'deleteNoneModel': expected_operations['deleteNoneModel']
|
||||
}
|
||||
} == fdm_data['model_operations']
|
File diff suppressed because it is too large
Load diff
|
@ -1,75 +0,0 @@
|
|||
import json
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.network.ftd.fdm_swagger_client import FdmSwaggerValidator, FdmSwaggerParser
|
||||
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
|
||||
TEST_DATA_FOLDER = os.path.join(DIR_PATH, 'test_data')
|
||||
|
||||
|
||||
class TestFdmSwagger(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.init_mock_data()
|
||||
|
||||
def init_mock_data(self):
|
||||
with open(os.path.join(TEST_DATA_FOLDER, 'ngfw_with_ex.json'), 'rb') as f:
|
||||
self.base_data = json.loads(f.read().decode('utf-8'))
|
||||
|
||||
def test_with_all_data(self):
|
||||
fdm_data = FdmSwaggerParser().parse_spec(self.base_data)
|
||||
validator = FdmSwaggerValidator(fdm_data)
|
||||
models = fdm_data['models']
|
||||
operations = fdm_data['operations']
|
||||
|
||||
invalid = set({})
|
||||
for operation in operations:
|
||||
model_name = operations[operation]['modelName']
|
||||
method = operations[operation]['method']
|
||||
if method != 'get' and model_name in models:
|
||||
if 'example' in models[model_name]:
|
||||
example = models[model_name]['example']
|
||||
try:
|
||||
valid, rez = validator.validate_data(operation, example)
|
||||
assert valid
|
||||
except Exception:
|
||||
invalid.add(model_name)
|
||||
assert invalid == set(['TCPPortObject',
|
||||
'UDPPortObject',
|
||||
'ICMPv4PortObject',
|
||||
'ICMPv6PortObject',
|
||||
'StandardAccessList',
|
||||
'ExtendedAccessList',
|
||||
'ASPathList',
|
||||
'RouteMap',
|
||||
'StandardCommunityList',
|
||||
'ExpandedCommunityList',
|
||||
'IPV4PrefixList',
|
||||
'IPV6PrefixList',
|
||||
'PolicyList',
|
||||
'SyslogServer',
|
||||
'HAConfiguration',
|
||||
'TestIdentitySource'])
|
||||
|
||||
def test_parse_all_data(self):
|
||||
self.fdm_data = FdmSwaggerParser().parse_spec(self.base_data)
|
||||
operations = self.fdm_data['operations']
|
||||
without_model_name = []
|
||||
expected_operations_counter = 0
|
||||
for key in self.base_data['paths']:
|
||||
operation = self.base_data['paths'][key]
|
||||
for dummy in operation:
|
||||
expected_operations_counter += 1
|
||||
|
||||
for key in operations:
|
||||
operation = operations[key]
|
||||
if not operation['modelName']:
|
||||
without_model_name.append(operation['url'])
|
||||
|
||||
if operation['modelName'] == '_File' and 'download' not in operation['url']:
|
||||
self.fail('File type can be defined for download operation only')
|
||||
|
||||
assert sorted(['/api/fdm/v2/operational/deploy/{objId}', '/api/fdm/v2/action/upgrade']) == sorted(
|
||||
without_model_name)
|
||||
assert sorted(self.fdm_data['model_operations'][None].keys()) == sorted(['deleteDeployment', 'startUpgrade'])
|
||||
assert expected_operations_counter == len(operations)
|
|
@ -1,886 +0,0 @@
|
|||
# Copyright (c) 2018 Cisco and/or its affiliates.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import copy
|
||||
import json
|
||||
import unittest
|
||||
|
||||
import pytest
|
||||
from ansible_collections.community.general.tests.unit.compat import mock
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.network.ftd.common import FtdServerError, HTTPMethod, ResponseParams, FtdConfigurationError
|
||||
from ansible_collections.community.general.plugins.module_utils.network.ftd.configuration import DUPLICATE_NAME_ERROR_MESSAGE, UNPROCESSABLE_ENTITY_STATUS, \
|
||||
MULTIPLE_DUPLICATES_FOUND_ERROR, BaseConfigurationResource, FtdInvalidOperationNameError, QueryParams, \
|
||||
ADD_OPERATION_NOT_SUPPORTED_ERROR, ParamName
|
||||
from ansible_collections.community.general.plugins.module_utils.network.ftd.fdm_swagger_client import ValidationError
|
||||
|
||||
ADD_RESPONSE = {'status': 'Object added'}
|
||||
EDIT_RESPONSE = {'status': 'Object edited'}
|
||||
DELETE_RESPONSE = {'status': 'Object deleted'}
|
||||
GET_BY_FILTER_RESPONSE = [{'name': 'foo', 'description': 'bar'}]
|
||||
ARBITRARY_RESPONSE = {'status': 'Arbitrary request sent'}
|
||||
|
||||
|
||||
class TestUpsertOperationUnitTests(unittest.TestCase):
|
||||
|
||||
@mock.patch.object(BaseConfigurationResource, '_fetch_system_info')
|
||||
def setUp(self, fetch_system_info_mock):
|
||||
self._conn = mock.MagicMock()
|
||||
self._resource = BaseConfigurationResource(self._conn)
|
||||
fetch_system_info_mock.return_value = {
|
||||
'databaseInfo': {
|
||||
'buildVersion': '6.3.0'
|
||||
}
|
||||
}
|
||||
|
||||
def test_get_operation_name(self):
|
||||
operation_a = mock.MagicMock()
|
||||
operation_b = mock.MagicMock()
|
||||
|
||||
def checker_wrapper(expected_object):
|
||||
def checker(obj, *args, **kwargs):
|
||||
return obj == expected_object
|
||||
|
||||
return checker
|
||||
|
||||
operations = {
|
||||
operation_a: "spec",
|
||||
operation_b: "spec"
|
||||
}
|
||||
|
||||
assert operation_a == self._resource._get_operation_name(checker_wrapper(operation_a), operations)
|
||||
assert operation_b == self._resource._get_operation_name(checker_wrapper(operation_b), operations)
|
||||
assert self._resource._get_operation_name(checker_wrapper(None), operations) is None
|
||||
|
||||
@mock.patch.object(BaseConfigurationResource, "_get_operation_name")
|
||||
@mock.patch.object(BaseConfigurationResource, "add_object")
|
||||
def test_add_upserted_object(self, add_object_mock, get_operation_mock):
|
||||
model_operations = mock.MagicMock()
|
||||
params = mock.MagicMock()
|
||||
add_op_name = get_operation_mock.return_value
|
||||
|
||||
assert add_object_mock.return_value == self._resource._add_upserted_object(model_operations, params)
|
||||
|
||||
get_operation_mock.assert_called_once_with(
|
||||
self._resource._operation_checker.is_add_operation,
|
||||
model_operations)
|
||||
add_object_mock.assert_called_once_with(add_op_name, params)
|
||||
|
||||
@mock.patch.object(BaseConfigurationResource, "_get_operation_name")
|
||||
@mock.patch.object(BaseConfigurationResource, "add_object")
|
||||
def test_add_upserted_object_with_no_add_operation(self, add_object_mock, get_operation_mock):
|
||||
model_operations = mock.MagicMock()
|
||||
get_operation_mock.return_value = None
|
||||
|
||||
with pytest.raises(FtdConfigurationError) as exc_info:
|
||||
self._resource._add_upserted_object(model_operations, mock.MagicMock())
|
||||
assert ADD_OPERATION_NOT_SUPPORTED_ERROR in str(exc_info.value)
|
||||
|
||||
get_operation_mock.assert_called_once_with(self._resource._operation_checker.is_add_operation, model_operations)
|
||||
add_object_mock.assert_not_called()
|
||||
|
||||
@mock.patch.object(BaseConfigurationResource, "_get_operation_name")
|
||||
@mock.patch.object(BaseConfigurationResource, "edit_object")
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.copy_identity_properties')
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration._set_default')
|
||||
def test_edit_upserted_object(self, _set_default_mock, copy_properties_mock, edit_object_mock, get_operation_mock):
|
||||
model_operations = mock.MagicMock()
|
||||
existing_object = mock.MagicMock()
|
||||
params = {
|
||||
'path_params': {},
|
||||
'data': {}
|
||||
}
|
||||
|
||||
result = self._resource._edit_upserted_object(model_operations, existing_object, params)
|
||||
|
||||
assert result == edit_object_mock.return_value
|
||||
|
||||
_set_default_mock.assert_has_calls([
|
||||
mock.call(params, 'path_params', {}),
|
||||
mock.call(params, 'data', {})
|
||||
])
|
||||
get_operation_mock.assert_called_once_with(
|
||||
self._resource._operation_checker.is_edit_operation,
|
||||
model_operations
|
||||
)
|
||||
copy_properties_mock.assert_called_once_with(
|
||||
existing_object,
|
||||
params['data']
|
||||
)
|
||||
edit_object_mock.assert_called_once_with(
|
||||
get_operation_mock.return_value,
|
||||
params
|
||||
)
|
||||
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
|
||||
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
|
||||
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
|
||||
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
|
||||
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
|
||||
def test_upsert_object_successfully_added(self, edit_mock, add_mock, find_object, get_operation_mock,
|
||||
is_upsert_supported_mock):
|
||||
params = mock.MagicMock()
|
||||
|
||||
is_upsert_supported_mock.return_value = True
|
||||
find_object.return_value = None
|
||||
|
||||
result = self._resource.upsert_object('upsertFoo', params)
|
||||
|
||||
assert result == add_mock.return_value
|
||||
self._conn.get_model_spec.assert_called_once_with('Foo')
|
||||
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
|
||||
get_operation_mock.assert_called_once_with('Foo')
|
||||
find_object.assert_called_once_with('Foo', params)
|
||||
add_mock.assert_called_once_with(get_operation_mock.return_value, params)
|
||||
edit_mock.assert_not_called()
|
||||
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.equal_objects')
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
|
||||
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
|
||||
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
|
||||
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
|
||||
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
|
||||
def test_upsert_object_successfully_edited(self, edit_mock, add_mock, find_object, get_operation_mock,
|
||||
is_upsert_supported_mock, equal_objects_mock):
|
||||
params = mock.MagicMock()
|
||||
existing_obj = mock.MagicMock()
|
||||
|
||||
is_upsert_supported_mock.return_value = True
|
||||
find_object.return_value = existing_obj
|
||||
equal_objects_mock.return_value = False
|
||||
|
||||
result = self._resource.upsert_object('upsertFoo', params)
|
||||
|
||||
assert result == edit_mock.return_value
|
||||
self._conn.get_model_spec.assert_called_once_with('Foo')
|
||||
get_operation_mock.assert_called_once_with('Foo')
|
||||
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
|
||||
add_mock.assert_not_called()
|
||||
equal_objects_mock.assert_called_once_with(existing_obj, params[ParamName.DATA])
|
||||
edit_mock.assert_called_once_with(get_operation_mock.return_value, existing_obj, params)
|
||||
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.equal_objects')
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
|
||||
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
|
||||
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
|
||||
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
|
||||
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
|
||||
def test_upsert_object_returned_without_modifications(self, edit_mock, add_mock, find_object, get_operation_mock,
|
||||
is_upsert_supported_mock, equal_objects_mock):
|
||||
params = mock.MagicMock()
|
||||
existing_obj = mock.MagicMock()
|
||||
|
||||
is_upsert_supported_mock.return_value = True
|
||||
find_object.return_value = existing_obj
|
||||
equal_objects_mock.return_value = True
|
||||
|
||||
result = self._resource.upsert_object('upsertFoo', params)
|
||||
|
||||
assert result == existing_obj
|
||||
self._conn.get_model_spec.assert_called_once_with('Foo')
|
||||
get_operation_mock.assert_called_once_with('Foo')
|
||||
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
|
||||
add_mock.assert_not_called()
|
||||
equal_objects_mock.assert_called_once_with(existing_obj, params[ParamName.DATA])
|
||||
edit_mock.assert_not_called()
|
||||
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
|
||||
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
|
||||
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
|
||||
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
|
||||
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
|
||||
def test_upsert_object_not_supported(self, edit_mock, add_mock, find_object, get_operation_mock,
|
||||
is_upsert_supported_mock):
|
||||
params = mock.MagicMock()
|
||||
|
||||
is_upsert_supported_mock.return_value = False
|
||||
|
||||
self.assertRaises(
|
||||
FtdInvalidOperationNameError,
|
||||
self._resource.upsert_object, 'upsertFoo', params
|
||||
)
|
||||
|
||||
self._conn.get_model_spec.assert_called_once_with('Foo')
|
||||
get_operation_mock.assert_called_once_with('Foo')
|
||||
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
|
||||
find_object.assert_not_called()
|
||||
add_mock.assert_not_called()
|
||||
edit_mock.assert_not_called()
|
||||
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
|
||||
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
|
||||
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
|
||||
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
|
||||
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
|
||||
def test_upsert_object_when_model_not_supported(self, edit_mock, add_mock, find_object, get_operation_mock,
|
||||
is_upsert_supported_mock):
|
||||
params = mock.MagicMock()
|
||||
self._conn.get_model_spec.return_value = None
|
||||
|
||||
self.assertRaises(
|
||||
FtdInvalidOperationNameError,
|
||||
self._resource.upsert_object, 'upsertNonExisting', params
|
||||
)
|
||||
|
||||
self._conn.get_model_spec.assert_called_once_with('NonExisting')
|
||||
get_operation_mock.assert_not_called()
|
||||
is_upsert_supported_mock.assert_not_called()
|
||||
find_object.assert_not_called()
|
||||
add_mock.assert_not_called()
|
||||
edit_mock.assert_not_called()
|
||||
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.equal_objects')
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
|
||||
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
|
||||
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
|
||||
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
|
||||
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
|
||||
def test_upsert_object_with_fatal_error_during_edit(self, edit_mock, add_mock, find_object, get_operation_mock,
|
||||
is_upsert_supported_mock, equal_objects_mock):
|
||||
params = mock.MagicMock()
|
||||
existing_obj = mock.MagicMock()
|
||||
|
||||
is_upsert_supported_mock.return_value = True
|
||||
find_object.return_value = existing_obj
|
||||
equal_objects_mock.return_value = False
|
||||
edit_mock.side_effect = FtdConfigurationError("Some object edit error")
|
||||
|
||||
self.assertRaises(
|
||||
FtdConfigurationError,
|
||||
self._resource.upsert_object, 'upsertFoo', params
|
||||
)
|
||||
|
||||
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
|
||||
self._conn.get_model_spec.assert_called_once_with('Foo')
|
||||
get_operation_mock.assert_called_once_with('Foo')
|
||||
find_object.assert_called_once_with('Foo', params)
|
||||
add_mock.assert_not_called()
|
||||
edit_mock.assert_called_once_with(get_operation_mock.return_value, existing_obj, params)
|
||||
|
||||
@mock.patch('ansible_collections.community.general.plugins.module_utils.network.ftd.configuration.OperationChecker.is_upsert_operation_supported')
|
||||
@mock.patch.object(BaseConfigurationResource, "get_operation_specs_by_model_name")
|
||||
@mock.patch.object(BaseConfigurationResource, "_find_object_matching_params")
|
||||
@mock.patch.object(BaseConfigurationResource, "_add_upserted_object")
|
||||
@mock.patch.object(BaseConfigurationResource, "_edit_upserted_object")
|
||||
def test_upsert_object_with_fatal_error_during_add(self, edit_mock, add_mock, find_object, get_operation_mock,
|
||||
is_upsert_supported_mock):
|
||||
params = mock.MagicMock()
|
||||
|
||||
is_upsert_supported_mock.return_value = True
|
||||
find_object.return_value = None
|
||||
|
||||
error = FtdConfigurationError("Obj duplication error")
|
||||
add_mock.side_effect = error
|
||||
|
||||
self.assertRaises(
|
||||
FtdConfigurationError,
|
||||
self._resource.upsert_object, 'upsertFoo', params
|
||||
)
|
||||
|
||||
is_upsert_supported_mock.assert_called_once_with(get_operation_mock.return_value)
|
||||
self._conn.get_model_spec.assert_called_once_with('Foo')
|
||||
get_operation_mock.assert_called_once_with('Foo')
|
||||
find_object.assert_called_once_with('Foo', params)
|
||||
add_mock.assert_called_once_with(get_operation_mock.return_value, params)
|
||||
edit_mock.assert_not_called()
|
||||
|
||||
|
||||
# functional tests below
|
||||
class TestUpsertOperationFunctionalTests(object):
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def connection_mock(self, mocker):
|
||||
connection_class_mock = mocker.patch('ansible_collections.community.general.plugins.modules.network.ftd.ftd_configuration.Connection')
|
||||
connection_instance = connection_class_mock.return_value
|
||||
connection_instance.validate_data.return_value = True, None
|
||||
connection_instance.validate_query_params.return_value = True, None
|
||||
connection_instance.validate_path_params.return_value = True, None
|
||||
return connection_instance
|
||||
|
||||
def test_module_should_create_object_when_upsert_operation_and_object_does_not_exist(self, connection_mock):
|
||||
url = '/test'
|
||||
|
||||
operations = {
|
||||
'getObjectList': {
|
||||
'method': HTTPMethod.GET,
|
||||
'url': url,
|
||||
'modelName': 'Object',
|
||||
'returnMultipleItems': True},
|
||||
'addObject': {
|
||||
'method': HTTPMethod.POST,
|
||||
'modelName': 'Object',
|
||||
'url': url},
|
||||
'editObject': {
|
||||
'method': HTTPMethod.PUT,
|
||||
'modelName': 'Object',
|
||||
'url': '/test/{objId}'},
|
||||
'otherObjectOperation': {
|
||||
'method': HTTPMethod.GET,
|
||||
'modelName': 'Object',
|
||||
'url': '/test/{objId}',
|
||||
'returnMultipleItems': False
|
||||
}
|
||||
}
|
||||
|
||||
def get_operation_spec(name):
|
||||
return operations[name]
|
||||
|
||||
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
|
||||
if http_method == HTTPMethod.POST:
|
||||
assert url_path == url
|
||||
assert body_params == params['data']
|
||||
assert query_params == {}
|
||||
assert path_params == params['path_params']
|
||||
return {
|
||||
ResponseParams.SUCCESS: True,
|
||||
ResponseParams.RESPONSE: ADD_RESPONSE
|
||||
}
|
||||
elif http_method == HTTPMethod.GET:
|
||||
return {
|
||||
ResponseParams.SUCCESS: True,
|
||||
ResponseParams.RESPONSE: {'items': []}
|
||||
}
|
||||
else:
|
||||
assert False
|
||||
|
||||
connection_mock.get_operation_spec = get_operation_spec
|
||||
|
||||
connection_mock.get_operation_specs_by_model_name.return_value = operations
|
||||
connection_mock.send_request = request_handler
|
||||
params = {
|
||||
'operation': 'upsertObject',
|
||||
'data': {'id': '123', 'name': 'testObject', 'type': 'object'},
|
||||
'path_params': {'objId': '123'},
|
||||
'register_as': 'test_var'
|
||||
}
|
||||
|
||||
result = self._resource_execute_operation(params, connection=connection_mock)
|
||||
|
||||
assert ADD_RESPONSE == result
|
||||
|
||||
def test_module_should_fail_when_no_model(self, connection_mock):
|
||||
connection_mock.get_model_spec.return_value = None
|
||||
params = {
|
||||
'operation': 'upsertObject',
|
||||
'data': {'id': '123', 'name': 'testObject', 'type': 'object'},
|
||||
'path_params': {'objId': '123'},
|
||||
'register_as': 'test_var'
|
||||
}
|
||||
|
||||
with pytest.raises(FtdInvalidOperationNameError) as exc_info:
|
||||
self._resource_execute_operation(params, connection=connection_mock)
|
||||
assert 'upsertObject' == exc_info.value.operation_name
|
||||
|
||||
def test_module_should_fail_when_no_add_operation_and_no_object(self, connection_mock):
|
||||
url = '/test'
|
||||
|
||||
operations = {
|
||||
'getObjectList': {
|
||||
'method': HTTPMethod.GET,
|
||||
'url': url,
|
||||
'modelName': 'Object',
|
||||
'returnMultipleItems': True},
|
||||
'editObject': {
|
||||
'method': HTTPMethod.PUT,
|
||||
'modelName': 'Object',
|
||||
'url': '/test/{objId}'},
|
||||
'otherObjectOperation': {
|
||||
'method': HTTPMethod.GET,
|
||||
'modelName': 'Object',
|
||||
'url': '/test/{objId}',
|
||||
'returnMultipleItems': False
|
||||
}}
|
||||
|
||||
def get_operation_spec(name):
|
||||
return operations[name]
|
||||
|
||||
connection_mock.get_operation_spec = get_operation_spec
|
||||
|
||||
connection_mock.get_operation_specs_by_model_name.return_value = operations
|
||||
connection_mock.send_request.return_value = {
|
||||
ResponseParams.SUCCESS: True,
|
||||
ResponseParams.RESPONSE: {'items': []}
|
||||
}
|
||||
params = {
|
||||
'operation': 'upsertObject',
|
||||
'data': {'id': '123', 'name': 'testObject', 'type': 'object'},
|
||||
'path_params': {'objId': '123'},
|
||||
'register_as': 'test_var'
|
||||
}
|
||||
|
||||
with pytest.raises(FtdConfigurationError) as exc_info:
|
||||
self._resource_execute_operation(params, connection=connection_mock)
|
||||
assert ADD_OPERATION_NOT_SUPPORTED_ERROR in str(exc_info.value)
|
||||
|
||||
# test when object exists but with different fields(except id)
|
||||
def test_module_should_update_object_when_upsert_operation_and_object_exists(self, connection_mock):
|
||||
url = '/test'
|
||||
obj_id = '456'
|
||||
version = 'test_version'
|
||||
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
|
||||
|
||||
new_value = '0000'
|
||||
old_value = '1111'
|
||||
params = {
|
||||
'operation': 'upsertObject',
|
||||
'data': {'name': 'testObject', 'value': new_value, 'type': 'object'},
|
||||
'register_as': 'test_var'
|
||||
}
|
||||
|
||||
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
|
||||
if http_method == HTTPMethod.POST:
|
||||
assert url_path == url
|
||||
assert body_params == params['data']
|
||||
assert query_params == {}
|
||||
assert path_params == {}
|
||||
return {
|
||||
ResponseParams.SUCCESS: False,
|
||||
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
|
||||
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
|
||||
}
|
||||
elif http_method == HTTPMethod.GET:
|
||||
is_get_list_req = url_path == url
|
||||
is_get_req = url_path == url_with_id_templ
|
||||
assert is_get_req or is_get_list_req
|
||||
|
||||
if is_get_list_req:
|
||||
assert body_params == {}
|
||||
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
|
||||
assert path_params == {}
|
||||
elif is_get_req:
|
||||
assert body_params == {}
|
||||
assert query_params == {}
|
||||
assert path_params == {'objId': obj_id}
|
||||
|
||||
return {
|
||||
ResponseParams.SUCCESS: True,
|
||||
ResponseParams.RESPONSE: {
|
||||
'items': [
|
||||
{'name': 'testObject', 'value': old_value, 'type': 'object', 'id': obj_id,
|
||||
'version': version}
|
||||
]
|
||||
}
|
||||
}
|
||||
elif http_method == HTTPMethod.PUT:
|
||||
assert url_path == url_with_id_templ
|
||||
return {
|
||||
ResponseParams.SUCCESS: True,
|
||||
ResponseParams.RESPONSE: body_params
|
||||
}
|
||||
else:
|
||||
assert False
|
||||
|
||||
operations = {
|
||||
'getObjectList': {'method': HTTPMethod.GET, 'url': url, 'modelName': 'Object', 'returnMultipleItems': True},
|
||||
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
|
||||
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
|
||||
'otherObjectOperation': {
|
||||
'method': HTTPMethod.GET,
|
||||
'modelName': 'Object',
|
||||
'url': url_with_id_templ,
|
||||
'returnMultipleItems': False}
|
||||
}
|
||||
|
||||
def get_operation_spec(name):
|
||||
return operations[name]
|
||||
|
||||
connection_mock.get_operation_spec = get_operation_spec
|
||||
connection_mock.get_operation_specs_by_model_name.return_value = operations
|
||||
|
||||
connection_mock.send_request = request_handler
|
||||
expected_val = {'name': 'testObject', 'value': new_value, 'type': 'object', 'id': obj_id, 'version': version}
|
||||
|
||||
result = self._resource_execute_operation(params, connection=connection_mock)
|
||||
|
||||
assert expected_val == result
|
||||
|
||||
# test when object exists and all fields have the same value
|
||||
def test_module_should_not_update_object_when_upsert_operation_and_object_exists_with_the_same_fields(
|
||||
self, connection_mock):
|
||||
url = '/test'
|
||||
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
|
||||
|
||||
params = {
|
||||
'operation': 'upsertObject',
|
||||
'data': {'name': 'testObject', 'value': '3333', 'type': 'object'},
|
||||
'register_as': 'test_var'
|
||||
}
|
||||
|
||||
expected_val = copy.deepcopy(params['data'])
|
||||
expected_val['version'] = 'test_version'
|
||||
expected_val['id'] = 'test_id'
|
||||
|
||||
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
|
||||
if http_method == HTTPMethod.POST:
|
||||
assert url_path == url
|
||||
assert body_params == params['data']
|
||||
assert query_params == {}
|
||||
assert path_params == {}
|
||||
return {
|
||||
ResponseParams.SUCCESS: False,
|
||||
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
|
||||
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
|
||||
}
|
||||
elif http_method == HTTPMethod.GET:
|
||||
assert url_path == url
|
||||
assert body_params == {}
|
||||
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
|
||||
assert path_params == {}
|
||||
|
||||
return {
|
||||
ResponseParams.SUCCESS: True,
|
||||
ResponseParams.RESPONSE: {
|
||||
'items': [expected_val]
|
||||
}
|
||||
}
|
||||
else:
|
||||
assert False
|
||||
|
||||
operations = {
|
||||
'getObjectList': {'method': HTTPMethod.GET, 'modelName': 'Object', 'url': url, 'returnMultipleItems': True},
|
||||
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
|
||||
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
|
||||
'otherObjectOperation': {
|
||||
'method': HTTPMethod.GET,
|
||||
'modelName': 'Object',
|
||||
'url': url_with_id_templ,
|
||||
'returnMultipleItems': False}
|
||||
}
|
||||
|
||||
def get_operation_spec(name):
|
||||
return operations[name]
|
||||
|
||||
connection_mock.get_operation_spec = get_operation_spec
|
||||
connection_mock.get_operation_specs_by_model_name.return_value = operations
|
||||
connection_mock.send_request = request_handler
|
||||
|
||||
result = self._resource_execute_operation(params, connection=connection_mock)
|
||||
|
||||
assert expected_val == result
|
||||
|
||||
def test_module_should_fail_when_upsert_operation_is_not_supported(self, connection_mock):
|
||||
connection_mock.get_operation_specs_by_model_name.return_value = {
|
||||
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': '/test'},
|
||||
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': '/test/{objId}'},
|
||||
'otherObjectOperation': {
|
||||
'method': HTTPMethod.GET,
|
||||
'modelName': 'Object',
|
||||
'url': '/test/{objId}',
|
||||
'returnMultipleItems': False}
|
||||
}
|
||||
operation_name = 'upsertObject'
|
||||
params = {
|
||||
'operation': operation_name,
|
||||
'data': {'id': '123', 'name': 'testObject', 'type': 'object'},
|
||||
'path_params': {'objId': '123'},
|
||||
'register_as': 'test_var'
|
||||
}
|
||||
|
||||
result = self._resource_execute_operation_with_expected_failure(
|
||||
expected_exception_class=FtdInvalidOperationNameError,
|
||||
params=params, connection=connection_mock)
|
||||
|
||||
connection_mock.send_request.assert_not_called()
|
||||
assert operation_name == result.operation_name
|
||||
|
||||
# when create operation raised FtdConfigurationError exception without id and version
|
||||
def test_module_should_fail_when_upsert_operation_and_failed_create_without_id_and_version(self, connection_mock):
|
||||
url = '/test'
|
||||
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
|
||||
|
||||
params = {
|
||||
'operation': 'upsertObject',
|
||||
'data': {'name': 'testObject', 'value': '3333', 'type': 'object'},
|
||||
'register_as': 'test_var'
|
||||
}
|
||||
|
||||
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
|
||||
if http_method == HTTPMethod.POST:
|
||||
assert url_path == url
|
||||
assert body_params == params['data']
|
||||
assert query_params == {}
|
||||
assert path_params == {}
|
||||
return {
|
||||
ResponseParams.SUCCESS: False,
|
||||
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
|
||||
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
|
||||
}
|
||||
elif http_method == HTTPMethod.GET:
|
||||
assert url_path == url
|
||||
assert body_params == {}
|
||||
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
|
||||
assert path_params == {}
|
||||
|
||||
return {
|
||||
ResponseParams.SUCCESS: True,
|
||||
ResponseParams.RESPONSE: {
|
||||
'items': []
|
||||
}
|
||||
}
|
||||
else:
|
||||
assert False
|
||||
|
||||
operations = {
|
||||
'getObjectList': {'method': HTTPMethod.GET, 'modelName': 'Object', 'url': url, 'returnMultipleItems': True},
|
||||
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
|
||||
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
|
||||
'otherObjectOperation': {
|
||||
'method': HTTPMethod.GET,
|
||||
'modelName': 'Object',
|
||||
'url': url_with_id_templ,
|
||||
'returnMultipleItems': False}
|
||||
}
|
||||
|
||||
def get_operation_spec(name):
|
||||
return operations[name]
|
||||
|
||||
connection_mock.get_operation_spec = get_operation_spec
|
||||
connection_mock.get_operation_specs_by_model_name.return_value = operations
|
||||
connection_mock.send_request = request_handler
|
||||
|
||||
result = self._resource_execute_operation_with_expected_failure(
|
||||
expected_exception_class=FtdServerError,
|
||||
params=params, connection=connection_mock)
|
||||
|
||||
assert result.code == 422
|
||||
assert result.response == 'Validation failed due to a duplicate name'
|
||||
|
||||
def test_module_should_fail_when_upsert_operation_and_failed_update_operation(self, connection_mock):
|
||||
url = '/test'
|
||||
obj_id = '456'
|
||||
version = 'test_version'
|
||||
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
|
||||
|
||||
error_code = 404
|
||||
|
||||
new_value = '0000'
|
||||
old_value = '1111'
|
||||
params = {
|
||||
'operation': 'upsertObject',
|
||||
'data': {'name': 'testObject', 'value': new_value, 'type': 'object'},
|
||||
'register_as': 'test_var'
|
||||
}
|
||||
|
||||
error_msg = 'test error'
|
||||
|
||||
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
|
||||
if http_method == HTTPMethod.POST:
|
||||
assert url_path == url
|
||||
assert body_params == params['data']
|
||||
assert query_params == {}
|
||||
assert path_params == {}
|
||||
return {
|
||||
ResponseParams.SUCCESS: False,
|
||||
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
|
||||
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
|
||||
}
|
||||
elif http_method == HTTPMethod.GET:
|
||||
is_get_list_req = url_path == url
|
||||
is_get_req = url_path == url_with_id_templ
|
||||
assert is_get_req or is_get_list_req
|
||||
|
||||
if is_get_list_req:
|
||||
assert body_params == {}
|
||||
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
|
||||
elif is_get_req:
|
||||
assert body_params == {}
|
||||
assert query_params == {}
|
||||
assert path_params == {'objId': obj_id}
|
||||
|
||||
return {
|
||||
ResponseParams.SUCCESS: True,
|
||||
ResponseParams.RESPONSE: {
|
||||
'items': [
|
||||
{'name': 'testObject', 'value': old_value, 'type': 'object', 'id': obj_id,
|
||||
'version': version}
|
||||
]
|
||||
}
|
||||
}
|
||||
elif http_method == HTTPMethod.PUT:
|
||||
assert url_path == url_with_id_templ
|
||||
raise FtdServerError(error_msg, error_code)
|
||||
else:
|
||||
assert False
|
||||
|
||||
operations = {
|
||||
'getObjectList': {'method': HTTPMethod.GET, 'modelName': 'Object', 'url': url, 'returnMultipleItems': True},
|
||||
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
|
||||
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
|
||||
'otherObjectOperation': {
|
||||
'method': HTTPMethod.GET,
|
||||
'modelName': 'Object',
|
||||
'url': url_with_id_templ,
|
||||
'returnMultipleItems': False}
|
||||
}
|
||||
|
||||
def get_operation_spec(name):
|
||||
return operations[name]
|
||||
|
||||
connection_mock.get_operation_spec = get_operation_spec
|
||||
connection_mock.get_operation_specs_by_model_name.return_value = operations
|
||||
connection_mock.send_request = request_handler
|
||||
|
||||
result = self._resource_execute_operation_with_expected_failure(
|
||||
expected_exception_class=FtdServerError,
|
||||
params=params, connection=connection_mock)
|
||||
|
||||
assert result.code == error_code
|
||||
assert result.response == error_msg
|
||||
|
||||
def test_module_should_fail_when_upsert_operation_and_invalid_data_for_create_operation(self, connection_mock):
|
||||
new_value = '0000'
|
||||
params = {
|
||||
'operation': 'upsertObject',
|
||||
'data': {'name': 'testObject', 'value': new_value, 'type': 'object'},
|
||||
'register_as': 'test_var'
|
||||
}
|
||||
|
||||
connection_mock.send_request.assert_not_called()
|
||||
|
||||
operations = {
|
||||
'getObjectList': {
|
||||
'method': HTTPMethod.GET,
|
||||
'modelName': 'Object',
|
||||
'url': 'sd',
|
||||
'returnMultipleItems': True},
|
||||
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': 'sdf'},
|
||||
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': 'sadf'},
|
||||
'otherObjectOperation': {
|
||||
'method': HTTPMethod.GET,
|
||||
'modelName': 'Object',
|
||||
'url': 'sdfs',
|
||||
'returnMultipleItems': False}
|
||||
}
|
||||
|
||||
def get_operation_spec(name):
|
||||
return operations[name]
|
||||
|
||||
connection_mock.get_operation_spec = get_operation_spec
|
||||
connection_mock.get_operation_specs_by_model_name.return_value = operations
|
||||
|
||||
report = {
|
||||
'required': ['objects[0].type'],
|
||||
'invalid_type': [
|
||||
{
|
||||
'path': 'objects[3].id',
|
||||
'expected_type': 'string',
|
||||
'actually_value': 1
|
||||
}
|
||||
]
|
||||
}
|
||||
connection_mock.validate_data.return_value = (False, json.dumps(report, sort_keys=True, indent=4))
|
||||
key = 'Invalid data provided'
|
||||
|
||||
result = self._resource_execute_operation_with_expected_failure(
|
||||
expected_exception_class=ValidationError,
|
||||
params=params, connection=connection_mock)
|
||||
|
||||
assert len(result.args) == 1
|
||||
assert key in result.args[0]
|
||||
assert json.loads(result.args[0][key]) == {
|
||||
'invalid_type': [{'actually_value': 1, 'expected_type': 'string', 'path': 'objects[3].id'}],
|
||||
'required': ['objects[0].type']
|
||||
}
|
||||
|
||||
def test_module_should_fail_when_upsert_operation_and_few_objects_found_by_filter(self, connection_mock):
|
||||
url = '/test'
|
||||
url_with_id_templ = '{0}/{1}'.format(url, '{objId}')
|
||||
|
||||
sample_obj = {'name': 'testObject', 'value': '3333', 'type': 'object'}
|
||||
params = {
|
||||
'operation': 'upsertObject',
|
||||
'data': sample_obj,
|
||||
'register_as': 'test_var'
|
||||
}
|
||||
|
||||
def request_handler(url_path=None, http_method=None, body_params=None, path_params=None, query_params=None):
|
||||
if http_method == HTTPMethod.POST:
|
||||
assert url_path == url
|
||||
assert body_params == params['data']
|
||||
assert query_params == {}
|
||||
assert path_params == {}
|
||||
return {
|
||||
ResponseParams.SUCCESS: False,
|
||||
ResponseParams.RESPONSE: DUPLICATE_NAME_ERROR_MESSAGE,
|
||||
ResponseParams.STATUS_CODE: UNPROCESSABLE_ENTITY_STATUS
|
||||
}
|
||||
elif http_method == HTTPMethod.GET:
|
||||
assert url_path == url
|
||||
assert body_params == {}
|
||||
assert query_params == {QueryParams.FILTER: 'name:testObject', 'limit': 10, 'offset': 0}
|
||||
assert path_params == {}
|
||||
|
||||
return {
|
||||
ResponseParams.SUCCESS: True,
|
||||
ResponseParams.RESPONSE: {
|
||||
'items': [sample_obj, sample_obj]
|
||||
}
|
||||
}
|
||||
else:
|
||||
assert False
|
||||
|
||||
operations = {
|
||||
'getObjectList': {'method': HTTPMethod.GET, 'modelName': 'Object', 'url': url, 'returnMultipleItems': True},
|
||||
'addObject': {'method': HTTPMethod.POST, 'modelName': 'Object', 'url': url},
|
||||
'editObject': {'method': HTTPMethod.PUT, 'modelName': 'Object', 'url': url_with_id_templ},
|
||||
'otherObjectOperation': {
|
||||
'method': HTTPMethod.GET,
|
||||
'modelName': 'Object',
|
||||
'url': url_with_id_templ,
|
||||
'returnMultipleItems': False}
|
||||
}
|
||||
|
||||
def get_operation_spec(name):
|
||||
return operations[name]
|
||||
|
||||
connection_mock.get_operation_spec = get_operation_spec
|
||||
connection_mock.get_operation_specs_by_model_name.return_value = operations
|
||||
connection_mock.send_request = request_handler
|
||||
|
||||
result = self._resource_execute_operation_with_expected_failure(
|
||||
expected_exception_class=FtdConfigurationError,
|
||||
params=params, connection=connection_mock)
|
||||
|
||||
assert result.msg is MULTIPLE_DUPLICATES_FOUND_ERROR
|
||||
assert result.obj is None
|
||||
|
||||
@staticmethod
|
||||
def _resource_execute_operation(params, connection):
|
||||
|
||||
with mock.patch.object(BaseConfigurationResource, '_fetch_system_info') as fetch_system_info_mock:
|
||||
fetch_system_info_mock.return_value = {
|
||||
'databaseInfo': {
|
||||
'buildVersion': '6.3.0'
|
||||
}
|
||||
}
|
||||
resource = BaseConfigurationResource(connection)
|
||||
op_name = params['operation']
|
||||
|
||||
resp = resource.execute_operation(op_name, params)
|
||||
|
||||
return resp
|
||||
|
||||
def _resource_execute_operation_with_expected_failure(self, expected_exception_class, params, connection):
|
||||
with pytest.raises(expected_exception_class) as ex:
|
||||
self._resource_execute_operation(params, connection)
|
||||
# 'ex' here is the instance of '_pytest._code.code.ExceptionInfo' but not <expected_exception_class>
|
||||
# actual instance of <expected_exception_class> is in the value attribute of 'ex'. That's why we should return
|
||||
# 'ex.value' here, so it can be checked in a test later.
|
||||
return ex.value
|
|
@ -1,176 +0,0 @@
|
|||
|
||||
# Copyright (c) 2017 Citrix Systems
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import Mock
|
||||
|
||||
|
||||
from ansible_collections.community.general.plugins.module_utils.network.netscaler.netscaler import (ConfigProxy, get_immutables_intersection,
|
||||
ensure_feature_is_enabled, log, loglines)
|
||||
|
||||
|
||||
class TestNetscalerConfigProxy(unittest.TestCase):
|
||||
|
||||
def test_values_copied_to_actual(self):
|
||||
actual = Mock()
|
||||
client = Mock()
|
||||
values = {
|
||||
'some_key': 'some_value',
|
||||
}
|
||||
ConfigProxy(
|
||||
actual=actual,
|
||||
client=client,
|
||||
attribute_values_dict=values,
|
||||
readwrite_attrs=['some_key']
|
||||
)
|
||||
self.assertEqual(actual.some_key, values['some_key'], msg='Failed to pass correct value from values dict')
|
||||
|
||||
def test_none_values_not_copied_to_actual(self):
|
||||
actual = Mock()
|
||||
client = Mock()
|
||||
actual.key_for_none = 'initial'
|
||||
print('actual %s' % actual.key_for_none)
|
||||
values = {
|
||||
'key_for_none': None,
|
||||
}
|
||||
print('value %s' % actual.key_for_none)
|
||||
ConfigProxy(
|
||||
actual=actual,
|
||||
client=client,
|
||||
attribute_values_dict=values,
|
||||
readwrite_attrs=['key_for_none']
|
||||
)
|
||||
self.assertEqual(actual.key_for_none, 'initial')
|
||||
|
||||
def test_missing_from_values_dict_not_copied_to_actual(self):
|
||||
actual = Mock()
|
||||
client = Mock()
|
||||
values = {
|
||||
'irrelevant_key': 'irrelevant_value',
|
||||
}
|
||||
print('value %s' % actual.key_for_none)
|
||||
ConfigProxy(
|
||||
actual=actual,
|
||||
client=client,
|
||||
attribute_values_dict=values,
|
||||
readwrite_attrs=['key_for_none']
|
||||
)
|
||||
print('none %s' % getattr(actual, 'key_for_none'))
|
||||
self.assertIsInstance(actual.key_for_none, Mock)
|
||||
|
||||
def test_bool_yes_no_transform(self):
|
||||
actual = Mock()
|
||||
client = Mock()
|
||||
values = {
|
||||
'yes_key': True,
|
||||
'no_key': False,
|
||||
}
|
||||
transforms = {
|
||||
'yes_key': ['bool_yes_no'],
|
||||
'no_key': ['bool_yes_no']
|
||||
}
|
||||
ConfigProxy(
|
||||
actual=actual,
|
||||
client=client,
|
||||
attribute_values_dict=values,
|
||||
readwrite_attrs=['yes_key', 'no_key'],
|
||||
transforms=transforms,
|
||||
)
|
||||
actual_values = [actual.yes_key, actual.no_key]
|
||||
self.assertListEqual(actual_values, ['YES', 'NO'])
|
||||
|
||||
def test_bool_on_off_transform(self):
|
||||
actual = Mock()
|
||||
client = Mock()
|
||||
values = {
|
||||
'on_key': True,
|
||||
'off_key': False,
|
||||
}
|
||||
transforms = {
|
||||
'on_key': ['bool_on_off'],
|
||||
'off_key': ['bool_on_off']
|
||||
}
|
||||
ConfigProxy(
|
||||
actual=actual,
|
||||
client=client,
|
||||
attribute_values_dict=values,
|
||||
readwrite_attrs=['on_key', 'off_key'],
|
||||
transforms=transforms,
|
||||
)
|
||||
actual_values = [actual.on_key, actual.off_key]
|
||||
self.assertListEqual(actual_values, ['ON', 'OFF'])
|
||||
|
||||
def test_callable_transform(self):
|
||||
actual = Mock()
|
||||
client = Mock()
|
||||
values = {
|
||||
'transform_key': 'hello',
|
||||
'transform_chain': 'hello',
|
||||
}
|
||||
transforms = {
|
||||
'transform_key': [lambda v: v.upper()],
|
||||
'transform_chain': [lambda v: v.upper(), lambda v: v[:4]]
|
||||
}
|
||||
ConfigProxy(
|
||||
actual=actual,
|
||||
client=client,
|
||||
attribute_values_dict=values,
|
||||
readwrite_attrs=['transform_key', 'transform_chain'],
|
||||
transforms=transforms,
|
||||
)
|
||||
actual_values = [actual.transform_key, actual.transform_chain]
|
||||
self.assertListEqual(actual_values, ['HELLO', 'HELL'])
|
||||
|
||||
|
||||
class TestNetscalerModuleUtils(unittest.TestCase):
|
||||
|
||||
def test_immutables_intersection(self):
|
||||
actual = Mock()
|
||||
client = Mock()
|
||||
values = {
|
||||
'mutable_key': 'some value',
|
||||
'immutable_key': 'some other value',
|
||||
}
|
||||
proxy = ConfigProxy(
|
||||
actual=actual,
|
||||
client=client,
|
||||
attribute_values_dict=values,
|
||||
readwrite_attrs=['mutable_key', 'immutable_key'],
|
||||
immutable_attrs=['immutable_key'],
|
||||
)
|
||||
keys_to_check = ['mutable_key', 'immutable_key', 'non_existant_key']
|
||||
result = get_immutables_intersection(proxy, keys_to_check)
|
||||
self.assertListEqual(result, ['immutable_key'])
|
||||
|
||||
def test_ensure_feature_is_enabled(self):
|
||||
client = Mock()
|
||||
attrs = {'get_enabled_features.return_value': ['GSLB']}
|
||||
client.configure_mock(**attrs)
|
||||
ensure_feature_is_enabled(client, 'GSLB')
|
||||
ensure_feature_is_enabled(client, 'LB')
|
||||
client.enable_features.assert_called_once_with('LB')
|
||||
|
||||
def test_log_function(self):
|
||||
messages = [
|
||||
'First message',
|
||||
'Second message',
|
||||
]
|
||||
log(messages[0])
|
||||
log(messages[1])
|
||||
self.assertListEqual(messages, loglines, msg='Log messages not recorded correctly')
|
|
@ -1,148 +0,0 @@
|
|||
#
|
||||
# (c) 2018 Extreme Networks Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
|
||||
from mock import MagicMock, patch, call
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.plugins.module_utils.network.nos import nos
|
||||
|
||||
|
||||
class TestPluginCLIConfNOS(unittest.TestCase):
|
||||
""" Test class for NOS CLI Conf Methods
|
||||
"""
|
||||
|
||||
def test_get_connection_established(self):
|
||||
""" Test get_connection with established connection
|
||||
"""
|
||||
module = MagicMock()
|
||||
connection = nos.get_connection(module)
|
||||
self.assertEqual(connection, module.nos_connection)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
|
||||
def test_get_connection_new(self, connection):
|
||||
""" Test get_connection with new connection
|
||||
"""
|
||||
socket_path = "little red riding hood"
|
||||
module = MagicMock(spec=[
|
||||
'fail_json',
|
||||
])
|
||||
module._socket_path = socket_path
|
||||
|
||||
connection().get_capabilities.return_value = '{"network_api": "cliconf"}'
|
||||
returned_connection = nos.get_connection(module)
|
||||
connection.assert_called_with(socket_path)
|
||||
self.assertEqual(returned_connection, module.nos_connection)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
|
||||
def test_get_connection_incorrect_network_api(self, connection):
|
||||
""" Test get_connection with incorrect network_api response
|
||||
"""
|
||||
socket_path = "little red riding hood"
|
||||
module = MagicMock(spec=[
|
||||
'fail_json',
|
||||
])
|
||||
module._socket_path = socket_path
|
||||
module.fail_json.side_effect = TypeError
|
||||
|
||||
connection().get_capabilities.return_value = '{"network_api": "nope"}'
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
nos.get_connection(module)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
|
||||
def test_get_capabilities(self, connection):
|
||||
""" Test get_capabilities
|
||||
"""
|
||||
socket_path = "little red riding hood"
|
||||
module = MagicMock(spec=[
|
||||
'fail_json',
|
||||
])
|
||||
module._socket_path = socket_path
|
||||
module.fail_json.side_effect = TypeError
|
||||
|
||||
capabilities = {'network_api': 'cliconf'}
|
||||
|
||||
connection().get_capabilities.return_value = json.dumps(capabilities)
|
||||
|
||||
capabilities_returned = nos.get_capabilities(module)
|
||||
|
||||
self.assertEqual(capabilities, capabilities_returned)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
|
||||
def test_run_commands(self, connection):
|
||||
""" Test get_capabilities
|
||||
"""
|
||||
module = MagicMock()
|
||||
|
||||
commands = [
|
||||
'hello',
|
||||
'dolly',
|
||||
'well hello',
|
||||
'dolly',
|
||||
'its so nice to have you back',
|
||||
'where you belong',
|
||||
]
|
||||
|
||||
responses = [
|
||||
'Dolly, never go away again1',
|
||||
'Dolly, never go away again2',
|
||||
'Dolly, never go away again3',
|
||||
'Dolly, never go away again4',
|
||||
'Dolly, never go away again5',
|
||||
'Dolly, never go away again6',
|
||||
]
|
||||
|
||||
module.nos_connection.get.side_effect = responses
|
||||
|
||||
run_command_responses = nos.run_commands(module, commands)
|
||||
|
||||
calls = []
|
||||
|
||||
for command in commands:
|
||||
calls.append(call(
|
||||
command,
|
||||
None,
|
||||
None
|
||||
))
|
||||
|
||||
module.nos_connection.get.assert_has_calls(calls)
|
||||
|
||||
self.assertEqual(responses, run_command_responses)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nos.nos.Connection')
|
||||
def test_load_config(self, connection):
|
||||
""" Test load_config
|
||||
"""
|
||||
module = MagicMock()
|
||||
|
||||
commands = [
|
||||
'what does it take',
|
||||
'to be',
|
||||
'number one?',
|
||||
'two is not a winner',
|
||||
'and three nobody remember',
|
||||
]
|
||||
|
||||
nos.load_config(module, commands)
|
||||
|
||||
module.nos_connection.edit_config.assert_called_once_with(commands)
|
|
@ -1,659 +0,0 @@
|
|||
# Copyright (c) 2017 Cisco and/or its affiliates.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
|
||||
import json
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.plugins.module_utils.network.nso import nso
|
||||
|
||||
|
||||
MODULE_PREFIX_MAP = '''
|
||||
{
|
||||
"ansible-nso": "an",
|
||||
"test": "test",
|
||||
"tailf-ncs": "ncs"
|
||||
}
|
||||
'''
|
||||
|
||||
|
||||
SCHEMA_DATA = {
|
||||
'/an:id-name-leaf': '''
|
||||
{
|
||||
"meta": {
|
||||
"prefix": "an",
|
||||
"namespace": "http://github.com/ansible/nso",
|
||||
"types": {
|
||||
"http://github.com/ansible/nso:id-name-t": [
|
||||
{
|
||||
"name": "http://github.com/ansible/nso:id-name-t",
|
||||
"enumeration": [
|
||||
{
|
||||
"label": "id-one"
|
||||
},
|
||||
{
|
||||
"label": "id-two"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "identityref"
|
||||
}
|
||||
]
|
||||
},
|
||||
"keypath": "/an:id-name-leaf"
|
||||
},
|
||||
"data": {
|
||||
"kind": "leaf",
|
||||
"type": {
|
||||
"namespace": "http://github.com/ansible/nso",
|
||||
"name": "id-name-t"
|
||||
},
|
||||
"name": "id-name-leaf",
|
||||
"qname": "an:id-name-leaf"
|
||||
}
|
||||
}''',
|
||||
'/an:id-name-values': '''
|
||||
{
|
||||
"meta": {
|
||||
"prefix": "an",
|
||||
"namespace": "http://github.com/ansible/nso",
|
||||
"types": {},
|
||||
"keypath": "/an:id-name-values"
|
||||
},
|
||||
"data": {
|
||||
"kind": "container",
|
||||
"name": "id-name-values",
|
||||
"qname": "an:id-name-values",
|
||||
"children": [
|
||||
{
|
||||
"kind": "list",
|
||||
"name": "id-name-value",
|
||||
"qname": "an:id-name-value",
|
||||
"key": [
|
||||
"name"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
''',
|
||||
'/an:id-name-values/id-name-value': '''
|
||||
{
|
||||
"meta": {
|
||||
"prefix": "an",
|
||||
"namespace": "http://github.com/ansible/nso",
|
||||
"types": {
|
||||
"http://github.com/ansible/nso:id-name-t": [
|
||||
{
|
||||
"name": "http://github.com/ansible/nso:id-name-t",
|
||||
"enumeration": [
|
||||
{
|
||||
"label": "id-one"
|
||||
},
|
||||
{
|
||||
"label": "id-two"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "identityref"
|
||||
}
|
||||
]
|
||||
},
|
||||
"keypath": "/an:id-name-values/id-name-value"
|
||||
},
|
||||
"data": {
|
||||
"kind": "list",
|
||||
"name": "id-name-value",
|
||||
"qname": "an:id-name-value",
|
||||
"key": [
|
||||
"name"
|
||||
],
|
||||
"children": [
|
||||
{
|
||||
"kind": "key",
|
||||
"name": "name",
|
||||
"qname": "an:name",
|
||||
"type": {
|
||||
"namespace": "http://github.com/ansible/nso",
|
||||
"name": "id-name-t"
|
||||
}
|
||||
},
|
||||
{
|
||||
"kind": "leaf",
|
||||
"type": {
|
||||
"primitive": true,
|
||||
"name": "string"
|
||||
},
|
||||
"name": "value",
|
||||
"qname": "an:value"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
''',
|
||||
'/test:test': '''
|
||||
{
|
||||
"meta": {
|
||||
"types": {
|
||||
"http://example.com/test:t15": [
|
||||
{
|
||||
"leaf_type":[
|
||||
{
|
||||
"name":"string"
|
||||
}
|
||||
],
|
||||
"list_type":[
|
||||
{
|
||||
"name":"http://example.com/test:t15",
|
||||
"leaf-list":true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"data": {
|
||||
"kind": "list",
|
||||
"name":"test",
|
||||
"qname":"test:test",
|
||||
"key":["name"],
|
||||
"children": [
|
||||
{
|
||||
"kind": "key",
|
||||
"name": "name",
|
||||
"qname": "test:name",
|
||||
"type": {"name":"string","primitive":true}
|
||||
},
|
||||
{
|
||||
"kind": "choice",
|
||||
"name": "test-choice",
|
||||
"qname": "test:test-choice",
|
||||
"cases": [
|
||||
{
|
||||
"kind": "case",
|
||||
"name": "direct-child-case",
|
||||
"qname":"test:direct-child-case",
|
||||
"children":[
|
||||
{
|
||||
"kind": "leaf",
|
||||
"name": "direct-child",
|
||||
"qname": "test:direct-child",
|
||||
"type": {"name":"string","primitive":true}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"kind":"case","name":"nested-child-case","qname":"test:nested-child-case",
|
||||
"children": [
|
||||
{
|
||||
"kind": "choice",
|
||||
"name": "nested-choice",
|
||||
"qname": "test:nested-choice",
|
||||
"cases": [
|
||||
{
|
||||
"kind":"case","name":"nested-child","qname":"test:nested-child",
|
||||
"children": [
|
||||
{
|
||||
"kind": "leaf",
|
||||
"name":"nested-child",
|
||||
"qname":"test:nested-child",
|
||||
"type":{"name":"string","primitive":true}}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"kind":"leaf-list",
|
||||
"name":"device-list",
|
||||
"qname":"test:device-list",
|
||||
"type": {
|
||||
"namespace":"http://example.com/test",
|
||||
"name":"t15"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
''',
|
||||
'/test:test/device-list': '''
|
||||
{
|
||||
"meta": {
|
||||
"types": {
|
||||
"http://example.com/test:t15": [
|
||||
{
|
||||
"leaf_type":[
|
||||
{
|
||||
"name":"string"
|
||||
}
|
||||
],
|
||||
"list_type":[
|
||||
{
|
||||
"name":"http://example.com/test:t15",
|
||||
"leaf-list":true
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"data": {
|
||||
"kind":"leaf-list",
|
||||
"name":"device-list",
|
||||
"qname":"test:device-list",
|
||||
"type": {
|
||||
"namespace":"http://example.com/test",
|
||||
"name":"t15"
|
||||
}
|
||||
}
|
||||
}
|
||||
''',
|
||||
'/test:deps': '''
|
||||
{
|
||||
"meta": {
|
||||
},
|
||||
"data": {
|
||||
"kind":"container",
|
||||
"name":"deps",
|
||||
"qname":"test:deps",
|
||||
"children": [
|
||||
{
|
||||
"kind": "leaf",
|
||||
"type": {
|
||||
"primitive": true,
|
||||
"name": "string"
|
||||
},
|
||||
"name": "a",
|
||||
"qname": "test:a",
|
||||
"deps": ["/test:deps/c"]
|
||||
},
|
||||
{
|
||||
"kind": "leaf",
|
||||
"type": {
|
||||
"primitive": true,
|
||||
"name": "string"
|
||||
},
|
||||
"name": "b",
|
||||
"qname": "test:b",
|
||||
"deps": ["/test:deps/a"]
|
||||
},
|
||||
{
|
||||
"kind": "leaf",
|
||||
"type": {
|
||||
"primitive": true,
|
||||
"name": "string"
|
||||
},
|
||||
"name": "c",
|
||||
"qname": "test:c"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
'''
|
||||
}
|
||||
|
||||
|
||||
class MockResponse(object):
|
||||
def __init__(self, method, params, code, body, headers=None):
|
||||
if headers is None:
|
||||
headers = {}
|
||||
|
||||
self.method = method
|
||||
self.params = params
|
||||
|
||||
self.code = code
|
||||
self.body = body
|
||||
self.headers = dict(headers)
|
||||
|
||||
def read(self):
|
||||
return self.body
|
||||
|
||||
|
||||
def mock_call(calls, url, timeout, validate_certs, data=None, headers=None, method=None):
|
||||
result = calls[0]
|
||||
del calls[0]
|
||||
|
||||
request = json.loads(data)
|
||||
if result.method != request['method']:
|
||||
raise ValueError('expected method {0}({1}), got {2}({3})'.format(
|
||||
result.method, result.params,
|
||||
request['method'], request['params']))
|
||||
|
||||
for key, value in result.params.items():
|
||||
if key not in request['params']:
|
||||
raise ValueError('{0} not in parameters'.format(key))
|
||||
if value != request['params'][key]:
|
||||
raise ValueError('expected {0} to be {1}, got {2}'.format(
|
||||
key, value, request['params'][key]))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_schema_response(path):
|
||||
return MockResponse(
|
||||
'get_schema', {'path': path}, 200, '{{"result": {0}}}'.format(
|
||||
SCHEMA_DATA[path]))
|
||||
|
||||
|
||||
class TestJsonRpc(unittest.TestCase):
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
|
||||
def test_exists(self, open_url_mock):
|
||||
calls = [
|
||||
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
|
||||
MockResponse('exists', {'path': '/exists'}, 200, '{"result": {"exists": true}}'),
|
||||
MockResponse('exists', {'path': '/not-exists'}, 200, '{"result": {"exists": false}}')
|
||||
]
|
||||
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
|
||||
client = nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False)
|
||||
self.assertEqual(True, client.exists('/exists'))
|
||||
self.assertEqual(False, client.exists('/not-exists'))
|
||||
|
||||
self.assertEqual(0, len(calls))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
|
||||
def test_exists_data_not_found(self, open_url_mock):
|
||||
calls = [
|
||||
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
|
||||
MockResponse('exists', {'path': '/list{missing-parent}/list{child}'}, 200, '{"error":{"type":"data.not_found"}}')
|
||||
]
|
||||
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
|
||||
client = nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False)
|
||||
self.assertEqual(False, client.exists('/list{missing-parent}/list{child}'))
|
||||
|
||||
self.assertEqual(0, len(calls))
|
||||
|
||||
|
||||
class TestValueBuilder(unittest.TestCase):
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
|
||||
def test_identityref_leaf(self, open_url_mock):
|
||||
calls = [
|
||||
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
|
||||
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
|
||||
get_schema_response('/an:id-name-leaf'),
|
||||
MockResponse('get_module_prefix_map', {}, 200, '{{"result": {0}}}'.format(MODULE_PREFIX_MAP))
|
||||
]
|
||||
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
|
||||
|
||||
parent = "/an:id-name-leaf"
|
||||
schema_data = json.loads(
|
||||
SCHEMA_DATA['/an:id-name-leaf'])
|
||||
schema = schema_data['data']
|
||||
|
||||
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
|
||||
vb.build(parent, None, 'ansible-nso:id-two', schema)
|
||||
values = list(vb.values)
|
||||
self.assertEqual(1, len(values))
|
||||
value = values[0]
|
||||
self.assertEqual(parent, value.path)
|
||||
self.assertEqual('set', value.state)
|
||||
self.assertEqual('an:id-two', value.value)
|
||||
|
||||
self.assertEqual(0, len(calls))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
|
||||
def test_identityref_key(self, open_url_mock):
|
||||
calls = [
|
||||
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
|
||||
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
|
||||
get_schema_response('/an:id-name-values/id-name-value'),
|
||||
MockResponse('get_module_prefix_map', {}, 200, '{{"result": {0}}}'.format(MODULE_PREFIX_MAP)),
|
||||
MockResponse('exists', {'path': '/an:id-name-values/id-name-value{an:id-one}'}, 200, '{"result": {"exists": true}}')
|
||||
]
|
||||
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
|
||||
|
||||
parent = "/an:id-name-values"
|
||||
schema_data = json.loads(
|
||||
SCHEMA_DATA['/an:id-name-values/id-name-value'])
|
||||
schema = schema_data['data']
|
||||
|
||||
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
|
||||
vb.build(parent, 'id-name-value', [{'name': 'ansible-nso:id-one', 'value': '1'}], schema)
|
||||
values = list(vb.values)
|
||||
self.assertEqual(1, len(values))
|
||||
value = values[0]
|
||||
self.assertEqual('{0}/id-name-value{{an:id-one}}/value'.format(parent), value.path)
|
||||
self.assertEqual('set', value.state)
|
||||
self.assertEqual('1', value.value)
|
||||
|
||||
self.assertEqual(0, len(calls))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
|
||||
def test_nested_choice(self, open_url_mock):
|
||||
calls = [
|
||||
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
|
||||
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
|
||||
get_schema_response('/test:test'),
|
||||
MockResponse('exists', {'path': '/test:test{direct}'}, 200, '{"result": {"exists": true}}'),
|
||||
MockResponse('exists', {'path': '/test:test{nested}'}, 200, '{"result": {"exists": true}}')
|
||||
]
|
||||
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
|
||||
|
||||
parent = "/test:test"
|
||||
schema_data = json.loads(
|
||||
SCHEMA_DATA['/test:test'])
|
||||
schema = schema_data['data']
|
||||
|
||||
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
|
||||
vb.build(parent, None, [{'name': 'direct', 'direct-child': 'direct-value'},
|
||||
{'name': 'nested', 'nested-child': 'nested-value'}], schema)
|
||||
values = list(vb.values)
|
||||
self.assertEqual(2, len(values))
|
||||
value = values[0]
|
||||
self.assertEqual('{0}{{direct}}/direct-child'.format(parent), value.path)
|
||||
self.assertEqual('set', value.state)
|
||||
self.assertEqual('direct-value', value.value)
|
||||
|
||||
value = values[1]
|
||||
self.assertEqual('{0}{{nested}}/nested-child'.format(parent), value.path)
|
||||
self.assertEqual('set', value.state)
|
||||
self.assertEqual('nested-value', value.value)
|
||||
|
||||
self.assertEqual(0, len(calls))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
|
||||
def test_leaf_list_type(self, open_url_mock):
|
||||
calls = [
|
||||
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.4"}'),
|
||||
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
|
||||
get_schema_response('/test:test')
|
||||
]
|
||||
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
|
||||
|
||||
parent = "/test:test"
|
||||
schema_data = json.loads(
|
||||
SCHEMA_DATA['/test:test'])
|
||||
schema = schema_data['data']
|
||||
|
||||
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
|
||||
vb.build(parent, None, {'device-list': ['one', 'two']}, schema)
|
||||
values = list(vb.values)
|
||||
self.assertEqual(1, len(values))
|
||||
value = values[0]
|
||||
self.assertEqual('{0}/device-list'.format(parent), value.path)
|
||||
self.assertEqual(['one', 'two'], value.value)
|
||||
|
||||
self.assertEqual(0, len(calls))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
|
||||
def test_leaf_list_type_45(self, open_url_mock):
|
||||
calls = [
|
||||
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
|
||||
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
|
||||
get_schema_response('/test:test/device-list')
|
||||
]
|
||||
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
|
||||
|
||||
parent = "/test:test"
|
||||
schema_data = json.loads(
|
||||
SCHEMA_DATA['/test:test'])
|
||||
schema = schema_data['data']
|
||||
|
||||
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
|
||||
vb.build(parent, None, {'device-list': ['one', 'two']}, schema)
|
||||
values = list(vb.values)
|
||||
self.assertEqual(3, len(values))
|
||||
value = values[0]
|
||||
self.assertEqual('{0}/device-list'.format(parent), value.path)
|
||||
self.assertEqual(nso.State.ABSENT, value.state)
|
||||
value = values[1]
|
||||
self.assertEqual('{0}/device-list{{one}}'.format(parent), value.path)
|
||||
self.assertEqual(nso.State.PRESENT, value.state)
|
||||
value = values[2]
|
||||
self.assertEqual('{0}/device-list{{two}}'.format(parent), value.path)
|
||||
self.assertEqual(nso.State.PRESENT, value.state)
|
||||
|
||||
self.assertEqual(0, len(calls))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
|
||||
def test_sort_by_deps(self, open_url_mock):
|
||||
calls = [
|
||||
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
|
||||
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
|
||||
get_schema_response('/test:deps')
|
||||
]
|
||||
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
|
||||
|
||||
parent = "/test:deps"
|
||||
schema_data = json.loads(
|
||||
SCHEMA_DATA['/test:deps'])
|
||||
schema = schema_data['data']
|
||||
|
||||
values = {
|
||||
'a': '1',
|
||||
'b': '2',
|
||||
'c': '3',
|
||||
}
|
||||
|
||||
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
|
||||
vb.build(parent, None, values, schema)
|
||||
values = list(vb.values)
|
||||
self.assertEqual(3, len(values))
|
||||
value = values[0]
|
||||
self.assertEqual('{0}/c'.format(parent), value.path)
|
||||
self.assertEqual('3', value.value)
|
||||
value = values[1]
|
||||
self.assertEqual('{0}/a'.format(parent), value.path)
|
||||
self.assertEqual('1', value.value)
|
||||
value = values[2]
|
||||
self.assertEqual('{0}/b'.format(parent), value.path)
|
||||
self.assertEqual('2', value.value)
|
||||
|
||||
self.assertEqual(0, len(calls))
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.nso.nso.open_url')
|
||||
def test_sort_by_deps_not_included(self, open_url_mock):
|
||||
calls = [
|
||||
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5"}'),
|
||||
MockResponse('new_trans', {}, 200, '{"result": {"th": 1}}'),
|
||||
get_schema_response('/test:deps')
|
||||
]
|
||||
open_url_mock.side_effect = lambda *args, **kwargs: mock_call(calls, *args, **kwargs)
|
||||
|
||||
parent = "/test:deps"
|
||||
schema_data = json.loads(
|
||||
SCHEMA_DATA['/test:deps'])
|
||||
schema = schema_data['data']
|
||||
|
||||
values = {
|
||||
'a': '1',
|
||||
'b': '2'
|
||||
}
|
||||
|
||||
vb = nso.ValueBuilder(nso.JsonRpc('http://localhost:8080/jsonrpc', 10, False))
|
||||
vb.build(parent, None, values, schema)
|
||||
values = list(vb.values)
|
||||
self.assertEqual(2, len(values))
|
||||
value = values[0]
|
||||
self.assertEqual('{0}/a'.format(parent), value.path)
|
||||
self.assertEqual('1', value.value)
|
||||
value = values[1]
|
||||
self.assertEqual('{0}/b'.format(parent), value.path)
|
||||
self.assertEqual('2', value.value)
|
||||
|
||||
self.assertEqual(0, len(calls))
|
||||
|
||||
|
||||
class TestVerifyVersion(unittest.TestCase):
|
||||
def test_valid_versions(self):
|
||||
self.assertTrue(nso.verify_version_str('5.0', [(4, 6), (4, 5, 1)]))
|
||||
self.assertTrue(nso.verify_version_str('5.1.1', [(4, 6), (4, 5, 1)]))
|
||||
self.assertTrue(nso.verify_version_str('5.1.1.2', [(4, 6), (4, 5, 1)]))
|
||||
self.assertTrue(nso.verify_version_str('4.6', [(4, 6), (4, 5, 1)]))
|
||||
self.assertTrue(nso.verify_version_str('4.6.2', [(4, 6), (4, 5, 1)]))
|
||||
self.assertTrue(nso.verify_version_str('4.6.2.1', [(4, 6), (4, 5, 1)]))
|
||||
self.assertTrue(nso.verify_version_str('4.5.1', [(4, 6), (4, 5, 1)]))
|
||||
self.assertTrue(nso.verify_version_str('4.5.2', [(4, 6), (4, 5, 1)]))
|
||||
self.assertTrue(nso.verify_version_str('4.5.1.2', [(4, 6), (4, 5, 1)]))
|
||||
|
||||
def test_invalid_versions(self):
|
||||
self.assertFalse(nso.verify_version_str('4.4', [(4, 6), (4, 5, 1)]))
|
||||
self.assertFalse(nso.verify_version_str('4.4.1', [(4, 6), (4, 5, 1)]))
|
||||
self.assertFalse(nso.verify_version_str('4.4.1.2', [(4, 6), (4, 5, 1)]))
|
||||
self.assertFalse(nso.verify_version_str('4.5.0', [(4, 6), (4, 5, 1)]))
|
||||
|
||||
|
||||
class TestValueSort(unittest.TestCase):
|
||||
def test_sort_parent_depend(self):
|
||||
values = [
|
||||
nso.ValueBuilder.Value('/test/list{entry}', '/test/list', 'CREATE', ['']),
|
||||
nso.ValueBuilder.Value('/test/list{entry}/description', '/test/list/description', 'TEST', ['']),
|
||||
nso.ValueBuilder.Value('/test/entry', '/test/entry', 'VALUE', ['/test/list', '/test/list/name'])
|
||||
]
|
||||
|
||||
result = [v.path for v in nso.ValueBuilder.sort_values(values)]
|
||||
|
||||
self.assertEqual(['/test/list{entry}', '/test/entry', '/test/list{entry}/description'], result)
|
||||
|
||||
def test_sort_break_direct_cycle(self):
|
||||
values = [
|
||||
nso.ValueBuilder.Value('/test/a', '/test/a', 'VALUE', ['/test/c']),
|
||||
nso.ValueBuilder.Value('/test/b', '/test/b', 'VALUE', ['/test/a']),
|
||||
nso.ValueBuilder.Value('/test/c', '/test/c', 'VALUE', ['/test/a'])
|
||||
]
|
||||
|
||||
result = [v.path for v in nso.ValueBuilder.sort_values(values)]
|
||||
|
||||
self.assertEqual(['/test/a', '/test/b', '/test/c'], result)
|
||||
|
||||
def test_sort_break_indirect_cycle(self):
|
||||
values = [
|
||||
nso.ValueBuilder.Value('/test/c', '/test/c', 'VALUE', ['/test/a']),
|
||||
nso.ValueBuilder.Value('/test/a', '/test/a', 'VALUE', ['/test/b']),
|
||||
nso.ValueBuilder.Value('/test/b', '/test/b', 'VALUE', ['/test/c'])
|
||||
]
|
||||
|
||||
result = [v.path for v in nso.ValueBuilder.sort_values(values)]
|
||||
|
||||
self.assertEqual(['/test/a', '/test/c', '/test/b'], result)
|
||||
|
||||
def test_sort_depend_on_self(self):
|
||||
values = [
|
||||
nso.ValueBuilder.Value('/test/a', '/test/a', 'VALUE', ['/test/a']),
|
||||
nso.ValueBuilder.Value('/test/b', '/test/b', 'VALUE', [])
|
||||
]
|
||||
|
||||
result = [v.path for v in nso.ValueBuilder.sort_values(values)]
|
||||
|
||||
self.assertEqual(['/test/a', '/test/b'], result)
|
|
@ -1,148 +0,0 @@
|
|||
#
|
||||
# (c) 2018 Extreme Networks Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
|
||||
from mock import MagicMock, patch, call
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.plugins.module_utils.network.slxos import slxos
|
||||
|
||||
|
||||
class TestPluginCLIConfSLXOS(unittest.TestCase):
|
||||
""" Test class for SLX-OS CLI Conf Methods
|
||||
"""
|
||||
|
||||
def test_get_connection_established(self):
|
||||
""" Test get_connection with established connection
|
||||
"""
|
||||
module = MagicMock()
|
||||
connection = slxos.get_connection(module)
|
||||
self.assertEqual(connection, module.slxos_connection)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
|
||||
def test_get_connection_new(self, connection):
|
||||
""" Test get_connection with new connection
|
||||
"""
|
||||
socket_path = "little red riding hood"
|
||||
module = MagicMock(spec=[
|
||||
'fail_json',
|
||||
])
|
||||
module._socket_path = socket_path
|
||||
|
||||
connection().get_capabilities.return_value = '{"network_api": "cliconf"}'
|
||||
returned_connection = slxos.get_connection(module)
|
||||
connection.assert_called_with(socket_path)
|
||||
self.assertEqual(returned_connection, module.slxos_connection)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
|
||||
def test_get_connection_incorrect_network_api(self, connection):
|
||||
""" Test get_connection with incorrect network_api response
|
||||
"""
|
||||
socket_path = "little red riding hood"
|
||||
module = MagicMock(spec=[
|
||||
'fail_json',
|
||||
])
|
||||
module._socket_path = socket_path
|
||||
module.fail_json.side_effect = TypeError
|
||||
|
||||
connection().get_capabilities.return_value = '{"network_api": "nope"}'
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
slxos.get_connection(module)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
|
||||
def test_get_capabilities(self, connection):
|
||||
""" Test get_capabilities
|
||||
"""
|
||||
socket_path = "little red riding hood"
|
||||
module = MagicMock(spec=[
|
||||
'fail_json',
|
||||
])
|
||||
module._socket_path = socket_path
|
||||
module.fail_json.side_effect = TypeError
|
||||
|
||||
capabilities = {'network_api': 'cliconf'}
|
||||
|
||||
connection().get_capabilities.return_value = json.dumps(capabilities)
|
||||
|
||||
capabilities_returned = slxos.get_capabilities(module)
|
||||
|
||||
self.assertEqual(capabilities, capabilities_returned)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
|
||||
def test_run_commands(self, connection):
|
||||
""" Test get_capabilities
|
||||
"""
|
||||
module = MagicMock()
|
||||
|
||||
commands = [
|
||||
'hello',
|
||||
'dolly',
|
||||
'well hello',
|
||||
'dolly',
|
||||
'its so nice to have you back',
|
||||
'where you belong',
|
||||
]
|
||||
|
||||
responses = [
|
||||
'Dolly, never go away again1',
|
||||
'Dolly, never go away again2',
|
||||
'Dolly, never go away again3',
|
||||
'Dolly, never go away again4',
|
||||
'Dolly, never go away again5',
|
||||
'Dolly, never go away again6',
|
||||
]
|
||||
|
||||
module.slxos_connection.get.side_effect = responses
|
||||
|
||||
run_command_responses = slxos.run_commands(module, commands)
|
||||
|
||||
calls = []
|
||||
|
||||
for command in commands:
|
||||
calls.append(call(
|
||||
command,
|
||||
None,
|
||||
None
|
||||
))
|
||||
|
||||
module.slxos_connection.get.assert_has_calls(calls)
|
||||
|
||||
self.assertEqual(responses, run_command_responses)
|
||||
|
||||
@patch('ansible_collections.community.general.plugins.module_utils.network.slxos.slxos.Connection')
|
||||
def test_load_config(self, connection):
|
||||
""" Test load_config
|
||||
"""
|
||||
module = MagicMock()
|
||||
|
||||
commands = [
|
||||
'what does it take',
|
||||
'to be',
|
||||
'number one?',
|
||||
'two is not a winner',
|
||||
'and three nobody remember',
|
||||
]
|
||||
|
||||
slxos.load_config(module, commands)
|
||||
|
||||
module.slxos_connection.edit_config.assert_called_once_with(commands)
|
|
@ -1,87 +0,0 @@
|
|||
# (c) 2016 Red Hat Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase
|
||||
|
||||
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
|
||||
fixture_data = {}
|
||||
|
||||
|
||||
def load_fixture(name):
|
||||
path = os.path.join(fixture_path, name)
|
||||
|
||||
if path in fixture_data:
|
||||
return fixture_data[path]
|
||||
|
||||
with open(path) as f:
|
||||
data = f.read()
|
||||
|
||||
try:
|
||||
data = json.loads(data)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
fixture_data[path] = data
|
||||
return data
|
||||
|
||||
|
||||
class TestCiscoWlcModule(ModuleTestCase):
|
||||
|
||||
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
|
||||
|
||||
self.load_fixtures(commands)
|
||||
|
||||
if failed:
|
||||
result = self.failed()
|
||||
self.assertTrue(result['failed'], result)
|
||||
else:
|
||||
result = self.changed(changed)
|
||||
self.assertEqual(result['changed'], changed, result)
|
||||
|
||||
if commands is not None:
|
||||
if sort:
|
||||
self.assertEqual(sorted(commands), sorted(result['commands']), result['commands'])
|
||||
else:
|
||||
self.assertEqual(commands, result['commands'], result['commands'])
|
||||
|
||||
return result
|
||||
|
||||
def failed(self):
|
||||
with self.assertRaises(AnsibleFailJson) as exc:
|
||||
self.module.main()
|
||||
|
||||
result = exc.exception.args[0]
|
||||
self.assertTrue(result['failed'], result)
|
||||
return result
|
||||
|
||||
def changed(self, changed=False):
|
||||
with self.assertRaises(AnsibleExitJson) as exc:
|
||||
self.module.main()
|
||||
|
||||
result = exc.exception.args[0]
|
||||
self.assertEqual(result['changed'], changed, result)
|
||||
return result
|
||||
|
||||
def load_fixtures(self, commands=None):
|
||||
pass
|
|
@ -1,9 +0,0 @@
|
|||
sysname router
|
||||
|
||||
interface create mtc-1 1
|
||||
interface address dynamic-interface mtc-1 10.33.20.4 255.255.255.0 10.33.20.1
|
||||
interface vlan mtc-1 1
|
||||
|
||||
interface create mtc-2 2
|
||||
interface address dynamic-interface mtc-2 10.33.26.4 255.255.255.0 10.33.26.1
|
||||
interface vlan mtc-2 2
|
|
@ -1,9 +0,0 @@
|
|||
sysname foo
|
||||
|
||||
interface create mtc-1 1
|
||||
interface address dynamic-interface mtc-1 10.33.20.4 255.255.255.0 10.33.20.2
|
||||
interface vlan mtc-1 1
|
||||
|
||||
interface create mtc-2 2
|
||||
interface address dynamic-interface mtc-2 10.33.26.4 255.255.255.0 10.33.26.1
|
||||
interface vlan mtc-2 2
|
|
@ -1,43 +0,0 @@
|
|||
Manufacturer's Name.............................. Cisco Systems Inc.
|
||||
Product Name..................................... Cisco Controller
|
||||
Product Version.................................. 8.2.110.0
|
||||
RTOS Version..................................... 8.2.110.0
|
||||
Bootloader Version............................... 8.0.100.0
|
||||
Emergency Image Version.......................... 8.0.100.0
|
||||
|
||||
Build Type....................................... DATA + WPS
|
||||
|
||||
System Name...................................... SOMEHOST
|
||||
System Location.................................. USA
|
||||
System Contact................................... SN:E228240;ASSET:LSMTCc1
|
||||
System ObjectID.................................. 1.3.6.1.4.1.9.1.1615
|
||||
Redundancy Mode.................................. Disabled
|
||||
IP Address....................................... 10.10.10.10
|
||||
IPv6 Address..................................... ::
|
||||
System Up Time................................... 328 days 7 hrs 54 mins 49 secs
|
||||
System Timezone Location......................... (GMT) London, Lisbon, Dublin, Edinburgh
|
||||
System Stats Realtime Interval................... 5
|
||||
System Stats Normal Interval..................... 180
|
||||
|
||||
Configured Country............................... US - United States
|
||||
Operating Environment............................ Commercial (10 to 35 C)
|
||||
Internal Temp Alarm Limits....................... 10 to 38 C
|
||||
Internal Temperature............................. +18 C
|
||||
Fan Status....................................... OK
|
||||
|
||||
RAID Volume Status
|
||||
Drive 0.......................................... Good
|
||||
Drive 1.......................................... Good
|
||||
|
||||
State of 802.11b Network......................... Enabled
|
||||
State of 802.11a Network......................... Enabled
|
||||
Number of WLANs.................................. 1
|
||||
Number of Active Clients......................... 0
|
||||
|
||||
Burned-in MAC Address............................ AA:AA:AA:AA:AA:AA
|
||||
Power Supply 1................................... Present, OK
|
||||
Power Supply 2................................... Present, OK
|
||||
Maximum number of APs supported.................. 6000
|
||||
System Nas-Id....................................
|
||||
WLC MIC Certificate Types........................ SHA1/SHA2
|
||||
Licensing Type................................... RTU
|
|
@ -1,122 +0,0 @@
|
|||
# (c) 2016 Red Hat Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch
|
||||
from ansible_collections.community.general.plugins.modules.network.aireos import aireos_command
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args
|
||||
from .aireos_module import TestCiscoWlcModule, load_fixture
|
||||
from ansible.module_utils import six
|
||||
|
||||
|
||||
class TestCiscoWlcCommandModule(TestCiscoWlcModule):
|
||||
|
||||
module = aireos_command
|
||||
|
||||
def setUp(self):
|
||||
super(TestCiscoWlcCommandModule, self).setUp()
|
||||
self.mock_run_commands = patch('ansible_collections.community.general.plugins.modules.network.aireos.aireos_command.run_commands')
|
||||
self.run_commands = self.mock_run_commands.start()
|
||||
|
||||
def tearDown(self):
|
||||
super(TestCiscoWlcCommandModule, self).tearDown()
|
||||
self.mock_run_commands.stop()
|
||||
|
||||
def load_fixtures(self, commands=None):
|
||||
|
||||
def load_from_file(*args, **kwargs):
|
||||
module, commands = args
|
||||
output = list()
|
||||
|
||||
for item in commands:
|
||||
try:
|
||||
obj = json.loads(item['command'])
|
||||
command = obj['command']
|
||||
except ValueError:
|
||||
command = item['command']
|
||||
filename = str(command).replace(' ', '_')
|
||||
output.append(load_fixture(filename))
|
||||
return output
|
||||
|
||||
self.run_commands.side_effect = load_from_file
|
||||
|
||||
def test_aireos_command_simple(self):
|
||||
set_module_args(dict(commands=['show sysinfo']))
|
||||
result = self.execute_module()
|
||||
self.assertEqual(len(result['stdout']), 1)
|
||||
self.assertTrue(result['stdout'][0].startswith('Manufacturer\'s Name'))
|
||||
|
||||
def test_aireos_command_multiple(self):
|
||||
set_module_args(dict(commands=['show sysinfo', 'show sysinfo']))
|
||||
result = self.execute_module()
|
||||
self.assertEqual(len(result['stdout']), 2)
|
||||
self.assertTrue(result['stdout'][0].startswith('Manufacturer\'s Name'))
|
||||
|
||||
def test_aireos_command_wait_for(self):
|
||||
wait_for = 'result[0] contains "Cisco Systems Inc"'
|
||||
set_module_args(dict(commands=['show sysinfo'], wait_for=wait_for))
|
||||
self.execute_module()
|
||||
|
||||
def test_aireos_command_wait_for_fails(self):
|
||||
wait_for = 'result[0] contains "test string"'
|
||||
set_module_args(dict(commands=['show sysinfo'], wait_for=wait_for))
|
||||
self.execute_module(failed=True)
|
||||
self.assertEqual(self.run_commands.call_count, 10)
|
||||
|
||||
def test_aireos_command_retries(self):
|
||||
wait_for = 'result[0] contains "test string"'
|
||||
set_module_args(dict(commands=['show sysinfo'], wait_for=wait_for, retries=2))
|
||||
self.execute_module(failed=True)
|
||||
self.assertEqual(self.run_commands.call_count, 2)
|
||||
|
||||
def test_aireos_command_match_any(self):
|
||||
wait_for = ['result[0] contains "Cisco Systems Inc"',
|
||||
'result[0] contains "test string"']
|
||||
set_module_args(dict(commands=['show sysinfo'], wait_for=wait_for, match='any'))
|
||||
self.execute_module()
|
||||
|
||||
def test_aireos_command_match_all(self):
|
||||
wait_for = ['result[0] contains "Cisco Systems Inc"',
|
||||
'result[0] contains "Cisco Controller"']
|
||||
set_module_args(dict(commands=['show sysinfo'], wait_for=wait_for, match='all'))
|
||||
self.execute_module()
|
||||
|
||||
def test_aireos_command_match_all_failure(self):
|
||||
wait_for = ['result[0] contains "Cisco Systems Inc"',
|
||||
'result[0] contains "test string"']
|
||||
commands = ['show sysinfo', 'show sysinfo']
|
||||
set_module_args(dict(commands=commands, wait_for=wait_for, match='all'))
|
||||
self.execute_module(failed=True)
|
||||
|
||||
def test_aireos_command_to_lines_non_ascii(self):
|
||||
''' Test data is one variation of the result of a `show run-config commands`
|
||||
command on Cisco WLC version 8.8.120.0 '''
|
||||
test_data = '''
|
||||
wlan flexconnect learn-ipaddr 101 enable
|
||||
`\xc8\x92\xef\xbf\xbdR\x7f`\xc8\x92\xef\xbf\xbdR\x7f`
|
||||
wlan wgb broadcast-tagging disable 1
|
||||
'''.strip()
|
||||
test_string = six.u(test_data)
|
||||
test_stdout = [test_string, ]
|
||||
result = list(aireos_command.to_lines(test_stdout))
|
||||
print(result[0])
|
||||
self.assertEqual(len(result[0]), 3)
|
|
@ -1,131 +0,0 @@
|
|||
#
|
||||
# (c) 2016 Red Hat Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch
|
||||
from ansible_collections.community.general.plugins.modules.network.aireos import aireos_config
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args
|
||||
from .aireos_module import TestCiscoWlcModule, load_fixture
|
||||
|
||||
|
||||
class TestCiscoWlcConfigModule(TestCiscoWlcModule):
|
||||
|
||||
module = aireos_config
|
||||
|
||||
def setUp(self):
|
||||
super(TestCiscoWlcConfigModule, self).setUp()
|
||||
|
||||
self.mock_get_config = patch('ansible_collections.community.general.plugins.modules.network.aireos.aireos_config.get_config')
|
||||
self.get_config = self.mock_get_config.start()
|
||||
|
||||
self.mock_load_config = patch('ansible_collections.community.general.plugins.modules.network.aireos.aireos_config.load_config')
|
||||
self.load_config = self.mock_load_config.start()
|
||||
|
||||
self.mock_run_commands = patch('ansible_collections.community.general.plugins.modules.network.aireos.aireos_config.run_commands')
|
||||
self.run_commands = self.mock_run_commands.start()
|
||||
|
||||
self.mock_save_config = patch('ansible_collections.community.general.plugins.modules.network.aireos.aireos_config.save_config')
|
||||
self.save_config = self.mock_save_config.start()
|
||||
|
||||
def tearDown(self):
|
||||
super(TestCiscoWlcConfigModule, self).tearDown()
|
||||
self.mock_get_config.stop()
|
||||
self.mock_load_config.stop()
|
||||
self.mock_run_commands.stop()
|
||||
|
||||
def load_fixtures(self, commands=None):
|
||||
config_file = 'aireos_config_config.cfg'
|
||||
self.get_config.return_value = load_fixture(config_file)
|
||||
self.load_config.return_value = None
|
||||
|
||||
def test_aireos_config_unchanged(self):
|
||||
src = load_fixture('aireos_config_config.cfg')
|
||||
set_module_args(dict(src=src))
|
||||
self.execute_module()
|
||||
|
||||
def test_aireos_config_src(self):
|
||||
src = load_fixture('aireos_config_src.cfg')
|
||||
set_module_args(dict(src=src))
|
||||
commands = ['sysname foo', 'interface address dynamic-interface mtc-1 10.33.20.4 255.255.255.0 10.33.20.2']
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
def test_aireos_config_backup(self):
|
||||
set_module_args(dict(backup=True))
|
||||
result = self.execute_module()
|
||||
self.assertIn('__backup__', result)
|
||||
|
||||
def test_aireos_config_save(self):
|
||||
set_module_args(dict(save=True))
|
||||
self.execute_module()
|
||||
self.assertEqual(self.save_config.call_count, 1)
|
||||
self.assertEqual(self.get_config.call_count, 0)
|
||||
self.assertEqual(self.load_config.call_count, 0)
|
||||
|
||||
def test_aireos_config_before(self):
|
||||
set_module_args(dict(lines=['sysname foo'], before=['test1', 'test2']))
|
||||
commands = ['test1', 'test2', 'sysname foo']
|
||||
self.execute_module(changed=True, commands=commands, sort=False)
|
||||
|
||||
def test_aireos_config_after(self):
|
||||
set_module_args(dict(lines=['sysname foo'], after=['test1', 'test2']))
|
||||
commands = ['sysname foo', 'test1', 'test2']
|
||||
self.execute_module(changed=True, commands=commands, sort=False)
|
||||
|
||||
def test_aireos_config_before_after_no_change(self):
|
||||
set_module_args(dict(lines=['sysname router'],
|
||||
before=['test1', 'test2'],
|
||||
after=['test3', 'test4']))
|
||||
self.execute_module()
|
||||
|
||||
def test_aireos_config_config(self):
|
||||
config = 'sysname localhost'
|
||||
set_module_args(dict(lines=['sysname router'], config=config))
|
||||
commands = ['sysname router']
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
def test_aireos_config_match_none(self):
|
||||
lines = ['sysname router', 'interface create mtc-1 1']
|
||||
set_module_args(dict(lines=lines, match='none'))
|
||||
self.execute_module(changed=True, commands=lines, sort=False)
|
||||
|
||||
def test_nxos_config_save_always(self):
|
||||
args = dict(save_when='always')
|
||||
set_module_args(args)
|
||||
self.execute_module()
|
||||
self.assertEqual(self.save_config.call_count, 1)
|
||||
self.assertEqual(self.get_config.call_count, 0)
|
||||
self.assertEqual(self.load_config.call_count, 0)
|
||||
|
||||
def test_nxos_config_save_changed_true(self):
|
||||
args = dict(save_when='changed', lines=['sysname foo', 'interface create mtc-3 3'])
|
||||
set_module_args(args)
|
||||
self.execute_module(changed=True)
|
||||
self.assertEqual(self.save_config.call_count, 1)
|
||||
self.assertEqual(self.get_config.call_count, 1)
|
||||
self.assertEqual(self.load_config.call_count, 1)
|
||||
|
||||
def test_nxos_config_save_changed_false(self):
|
||||
args = dict(save_when='changed')
|
||||
set_module_args(args)
|
||||
self.execute_module()
|
||||
self.assertEqual(self.save_config.call_count, 0)
|
||||
self.assertEqual(self.get_config.call_count, 0)
|
||||
self.assertEqual(self.load_config.call_count, 0)
|
|
@ -1,88 +0,0 @@
|
|||
# (c) 2019 Red Hat Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase
|
||||
|
||||
|
||||
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
|
||||
fixture_data = {}
|
||||
|
||||
|
||||
def load_fixture(name):
|
||||
path = os.path.join(fixture_path, name)
|
||||
|
||||
if path in fixture_data:
|
||||
return fixture_data[path]
|
||||
|
||||
with open(path) as f:
|
||||
data = f.read()
|
||||
|
||||
try:
|
||||
data = json.loads(data)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
fixture_data[path] = data
|
||||
return data
|
||||
|
||||
|
||||
class TestApconosModule(ModuleTestCase):
|
||||
|
||||
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
|
||||
|
||||
self.load_fixtures(commands)
|
||||
|
||||
if failed:
|
||||
result = self.failed()
|
||||
self.assertTrue(result['failed'], result)
|
||||
else:
|
||||
result = self.changed(changed)
|
||||
self.assertEqual(result['changed'], changed, result)
|
||||
|
||||
if commands is not None:
|
||||
if sort:
|
||||
self.assertEqual(sorted(commands), sorted(result['commands']), result['commands'])
|
||||
else:
|
||||
self.assertEqual(commands, result['commands'], result['commands'])
|
||||
|
||||
return result
|
||||
|
||||
def failed(self):
|
||||
with self.assertRaises(AnsibleFailJson) as exc:
|
||||
self.module.main()
|
||||
|
||||
result = exc.exception.args[0]
|
||||
self.assertTrue(result['failed'], result)
|
||||
return result
|
||||
|
||||
def changed(self, changed=False):
|
||||
with self.assertRaises(AnsibleExitJson) as exc:
|
||||
self.module.main()
|
||||
|
||||
result = exc.exception.args[0]
|
||||
self.assertEqual(result['changed'], changed, result)
|
||||
return result
|
||||
|
||||
def load_fixtures(self, commands=None):
|
||||
pass
|
|
@ -1,2 +0,0 @@
|
|||
APCON
|
||||
COMPONENT MODEL VERSION
|
|
@ -1,110 +0,0 @@
|
|||
# (c) 2019 Red Hat Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch
|
||||
from ansible_collections.community.general.plugins.modules.network.apconos import apconos_command
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args
|
||||
from .apconos_module import TestApconosModule, load_fixture
|
||||
|
||||
|
||||
class TestApconosCommandModule(TestApconosModule):
|
||||
|
||||
module = apconos_command
|
||||
|
||||
def setUp(self):
|
||||
super(TestApconosCommandModule, self).setUp()
|
||||
|
||||
self.mock_run_commands = patch('ansible_collections.community.general.plugins.modules.network.apconos.apconos_command.run_commands')
|
||||
self.run_commands = self.mock_run_commands.start()
|
||||
|
||||
def tearDown(self):
|
||||
super(TestApconosCommandModule, self).tearDown()
|
||||
self.mock_run_commands.stop()
|
||||
|
||||
def load_fixtures(self, commands=None):
|
||||
|
||||
def load_from_file(*args, **kwargs):
|
||||
module, commands = args
|
||||
output = list()
|
||||
for item in commands:
|
||||
filename = str(item).replace(' ', '_')
|
||||
output.append(load_fixture(filename))
|
||||
return output
|
||||
|
||||
self.run_commands.side_effect = load_from_file
|
||||
|
||||
def test_apcon_command_simple(self):
|
||||
set_module_args(dict(commands=['show version']))
|
||||
result = self.execute_module()
|
||||
self.assertEqual(len(result['stdout_lines']), 1)
|
||||
self.assertEqual(result['stdout_lines'][0][0], 'APCON')
|
||||
|
||||
def test_apcon_command_multiple(self):
|
||||
set_module_args(dict(commands=['show version', 'show version']))
|
||||
result = self.execute_module()
|
||||
self.assertEqual(len(result['stdout_lines']), 2)
|
||||
self.assertEqual(result['stdout_lines'][0][0], 'APCON')
|
||||
self.assertEqual(result['stdout_lines'][1][0], 'APCON')
|
||||
|
||||
def test_apcon_command_wait_for(self):
|
||||
wait_for = 'result[0] contains "APCON"'
|
||||
set_module_args(dict(commands=['show version'], wait_for=wait_for))
|
||||
self.execute_module()
|
||||
|
||||
def test_apcon_command_wait_for_fails(self):
|
||||
wait_for = 'result[0] contains "test string"'
|
||||
set_module_args(dict(commands=['show version'], wait_for=wait_for))
|
||||
self.execute_module(failed=True)
|
||||
self.assertEqual(self.run_commands.call_count, 10)
|
||||
|
||||
def test_apcon_command_retries(self):
|
||||
wait_for = 'result[0] contains "test string"'
|
||||
set_module_args(dict(commands=['show version'], wait_for=wait_for, retries=2))
|
||||
self.execute_module(failed=True)
|
||||
self.assertEqual(self.run_commands.call_count, 2)
|
||||
|
||||
def test_apcon_command_match_any(self):
|
||||
wait_for = ['result[0] contains "test string"',
|
||||
'result[0] contains "VERSION"']
|
||||
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='any'))
|
||||
self.execute_module()
|
||||
|
||||
def test_apcon_command_match_all(self):
|
||||
wait_for = ['result[0] contains "COMPONENT"',
|
||||
'result[0] contains "MODEL"',
|
||||
'result[0] contains "VERSION"']
|
||||
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='all'))
|
||||
self.execute_module()
|
||||
|
||||
def test_apcon_command_match_all_failure(self):
|
||||
wait_for = ['result[0] contains "APCON OS"',
|
||||
'result[0] contains "test string"']
|
||||
commands = ['show version', 'show version']
|
||||
set_module_args(dict(commands=commands, wait_for=wait_for, match='all'))
|
||||
self.execute_module(failed=True)
|
||||
|
||||
def test_apcon_command_checkmode_not_warning(self):
|
||||
commands = ['enable ssh']
|
||||
set_module_args(dict(commands=commands, _ansible_check_mode=False))
|
||||
result = self.execute_module(changed=True)
|
||||
self.assertEqual(result['warnings'], [])
|
|
@ -1,88 +0,0 @@
|
|||
# (c) 2016 Red Hat Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import json
|
||||
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase
|
||||
|
||||
|
||||
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
|
||||
fixture_data = {}
|
||||
|
||||
|
||||
def load_fixture(name):
|
||||
path = os.path.join(fixture_path, name)
|
||||
|
||||
if path in fixture_data:
|
||||
return fixture_data[path]
|
||||
|
||||
with open(path) as f:
|
||||
data = f.read()
|
||||
|
||||
try:
|
||||
data = json.loads(data)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
fixture_data[path] = data
|
||||
return data
|
||||
|
||||
|
||||
class TestArubaModule(ModuleTestCase):
|
||||
|
||||
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
|
||||
|
||||
self.load_fixtures(commands)
|
||||
|
||||
if failed:
|
||||
result = self.failed()
|
||||
self.assertTrue(result['failed'], result)
|
||||
else:
|
||||
result = self.changed(changed)
|
||||
self.assertEqual(result['changed'], changed, result)
|
||||
|
||||
if commands is not None:
|
||||
if sort:
|
||||
self.assertEqual(sorted(commands), sorted(result['commands']), result['commands'])
|
||||
else:
|
||||
self.assertEqual(commands, result['commands'], result['commands'])
|
||||
|
||||
return result
|
||||
|
||||
def failed(self):
|
||||
with self.assertRaises(AnsibleFailJson) as exc:
|
||||
self.module.main()
|
||||
|
||||
result = exc.exception.args[0]
|
||||
self.assertTrue(result['failed'], result)
|
||||
return result
|
||||
|
||||
def changed(self, changed=False):
|
||||
with self.assertRaises(AnsibleExitJson) as exc:
|
||||
self.module.main()
|
||||
|
||||
result = exc.exception.args[0]
|
||||
self.assertEqual(result['changed'], changed, result)
|
||||
return result
|
||||
|
||||
def load_fixtures(self, commands=None):
|
||||
pass
|
|
@ -1,17 +0,0 @@
|
|||
!
|
||||
hostname router
|
||||
!
|
||||
interface GigabitEthernet0/0
|
||||
ip address 1.2.3.4 255.255.255.0
|
||||
description test string
|
||||
!
|
||||
interface GigabitEthernet0/1
|
||||
ip address 6.7.8.9 255.255.255.0
|
||||
description test string
|
||||
shutdown
|
||||
!
|
||||
wlan ssid-profile "blah"
|
||||
essid "blah"
|
||||
!
|
||||
ip access-list session blah
|
||||
any any any permit
|
|
@ -1,13 +0,0 @@
|
|||
!
|
||||
hostname router
|
||||
!
|
||||
interface GigabitEthernet0/0
|
||||
ip address 1.2.3.4 255.255.255.0
|
||||
description test string
|
||||
no shutdown
|
||||
!
|
||||
interface GigabitEthernet0/1
|
||||
ip address 6.7.8.9 255.255.255.0
|
||||
description test string
|
||||
shutdown
|
||||
!
|
|
@ -1,11 +0,0 @@
|
|||
!
|
||||
hostname foo
|
||||
!
|
||||
interface GigabitEthernet0/0
|
||||
no ip address
|
||||
!
|
||||
interface GigabitEthernet0/1
|
||||
ip address 6.7.8.9 255.255.255.0
|
||||
description test string
|
||||
shutdown
|
||||
!
|
|
@ -1,17 +0,0 @@
|
|||
Aruba Operating System Software.
|
||||
ArubaOS (MODEL: Aruba7220-US), Version 6.4.3.10
|
||||
Website: http://www.arubanetworks.com
|
||||
Copyright (c) 2002-2016, Aruba Networks, Inc.
|
||||
Compiled on 2016-08-31 at 18:31:30 PDT (build 56305) by p4build
|
||||
|
||||
ROM: System Bootstrap, Version CPBoot 1.2.1.0 (build 39183)
|
||||
Built: 2013-07-26 04:57:47
|
||||
Built by: p4build@re_client_39183
|
||||
|
||||
|
||||
Switch uptime is 15 days 20 hours 51 minutes 51 seconds
|
||||
Reboot Cause: User reboot (Intent:cause:register 78:86:50:2)
|
||||
Supervisor Card
|
||||
Processor (XLP432 Rev B1 (Secure Boot) , 1000 MHz) with 7370M bytes of memory.
|
||||
32K bytes of non-volatile configuration memory.
|
||||
7920M bytes of Supervisor Card system flash.
|
|
@ -1,109 +0,0 @@
|
|||
# (c) 2016 Red Hat Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import json
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch
|
||||
from ansible_collections.community.general.plugins.modules.network.aruba import aruba_command
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args
|
||||
from .aruba_module import TestArubaModule, load_fixture
|
||||
|
||||
|
||||
class TestArubaCommandModule(TestArubaModule):
|
||||
|
||||
module = aruba_command
|
||||
|
||||
def setUp(self):
|
||||
super(TestArubaCommandModule, self).setUp()
|
||||
|
||||
self.mock_run_commands = patch('ansible_collections.community.general.plugins.modules.network.aruba.aruba_command.run_commands')
|
||||
self.run_commands = self.mock_run_commands.start()
|
||||
|
||||
def tearDown(self):
|
||||
super(TestArubaCommandModule, self).tearDown()
|
||||
|
||||
self.mock_run_commands.stop()
|
||||
|
||||
def load_fixtures(self, commands=None):
|
||||
|
||||
def load_from_file(*args, **kwargs):
|
||||
module, commands = args
|
||||
output = list()
|
||||
|
||||
for item in commands:
|
||||
try:
|
||||
obj = json.loads(item['command'])
|
||||
command = obj['command']
|
||||
except ValueError:
|
||||
command = item['command']
|
||||
filename = str(command).replace(' ', '_')
|
||||
output.append(load_fixture(filename))
|
||||
return output
|
||||
|
||||
self.run_commands.side_effect = load_from_file
|
||||
|
||||
def test_aruba_command_simple(self):
|
||||
set_module_args(dict(commands=['show version']))
|
||||
result = self.execute_module()
|
||||
self.assertEqual(len(result['stdout']), 1)
|
||||
self.assertTrue(result['stdout'][0].startswith('Aruba Operating System Software'))
|
||||
|
||||
def test_aruba_command_multiple(self):
|
||||
set_module_args(dict(commands=['show version', 'show version']))
|
||||
result = self.execute_module()
|
||||
self.assertEqual(len(result['stdout']), 2)
|
||||
self.assertTrue(result['stdout'][0].startswith('Aruba Operating System Software'))
|
||||
|
||||
def test_aruba_command_wait_for(self):
|
||||
wait_for = 'result[0] contains "Aruba Operating System Software"'
|
||||
set_module_args(dict(commands=['show version'], wait_for=wait_for))
|
||||
self.execute_module()
|
||||
|
||||
def test_aruba_command_wait_for_fails(self):
|
||||
wait_for = 'result[0] contains "test string"'
|
||||
set_module_args(dict(commands=['show version'], wait_for=wait_for))
|
||||
self.execute_module(failed=True)
|
||||
self.assertEqual(self.run_commands.call_count, 10)
|
||||
|
||||
def test_aruba_command_retries(self):
|
||||
wait_for = 'result[0] contains "test string"'
|
||||
set_module_args(dict(commands=['show version'], wait_for=wait_for, retries=2))
|
||||
self.execute_module(failed=True)
|
||||
self.assertEqual(self.run_commands.call_count, 2)
|
||||
|
||||
def test_aruba_command_match_any(self):
|
||||
wait_for = ['result[0] contains "Aruba Operating System Software"',
|
||||
'result[0] contains "test string"']
|
||||
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='any'))
|
||||
self.execute_module()
|
||||
|
||||
def test_aruba_command_match_all(self):
|
||||
wait_for = ['result[0] contains "Aruba Operating System Software"',
|
||||
'result[0] contains "Aruba Networks"']
|
||||
set_module_args(dict(commands=['show version'], wait_for=wait_for, match='all'))
|
||||
self.execute_module()
|
||||
|
||||
def test_aruba_command_match_all_failure(self):
|
||||
wait_for = ['result[0] contains "Aruba Operating System Software"',
|
||||
'result[0] contains "test string"']
|
||||
commands = ['show version', 'show version']
|
||||
set_module_args(dict(commands=commands, wait_for=wait_for, match='all'))
|
||||
self.execute_module(failed=True)
|
|
@ -1,189 +0,0 @@
|
|||
#
|
||||
# (c) 2016 Red Hat Inc.
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import patch
|
||||
from ansible_collections.community.general.plugins.modules.network.aruba import aruba_config
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args
|
||||
from .aruba_module import TestArubaModule, load_fixture
|
||||
|
||||
|
||||
class TestArubaConfigModule(TestArubaModule):
|
||||
|
||||
module = aruba_config
|
||||
|
||||
def setUp(self):
|
||||
super(TestArubaConfigModule, self).setUp()
|
||||
|
||||
self.mock_get_config = patch('ansible_collections.community.general.plugins.modules.network.aruba.aruba_config.get_config')
|
||||
self.get_config = self.mock_get_config.start()
|
||||
|
||||
self.mock_load_config = patch('ansible_collections.community.general.plugins.modules.network.aruba.aruba_config.load_config')
|
||||
self.load_config = self.mock_load_config.start()
|
||||
|
||||
self.mock_run_commands = patch('ansible_collections.community.general.plugins.modules.network.aruba.aruba_config.run_commands')
|
||||
self.run_commands = self.mock_run_commands.start()
|
||||
|
||||
def tearDown(self):
|
||||
super(TestArubaConfigModule, self).tearDown()
|
||||
|
||||
self.mock_get_config.stop()
|
||||
self.mock_load_config.stop()
|
||||
self.mock_run_commands.stop()
|
||||
|
||||
def load_fixtures(self, commands=None):
|
||||
config_file = 'aruba_config_config.cfg'
|
||||
self.get_config.return_value = load_fixture(config_file)
|
||||
self.load_config.return_value = None
|
||||
|
||||
def test_aruba_config_unchanged(self):
|
||||
src = load_fixture('aruba_config_config.cfg')
|
||||
set_module_args(dict(src=src))
|
||||
self.execute_module()
|
||||
|
||||
def test_aruba_config_unchanged_different_spacing(self):
|
||||
# Tab indented
|
||||
set_module_args(dict(lines=['description test string'], parents=['interface GigabitEthernet0/0']))
|
||||
self.execute_module(changed=False)
|
||||
# 3 spaces indented
|
||||
set_module_args(dict(lines=['essid "blah"'], parents=['wlan ssid-profile "blah"']))
|
||||
self.execute_module(changed=False)
|
||||
|
||||
def test_aruba_config_src(self):
|
||||
src = load_fixture('aruba_config_src.cfg')
|
||||
set_module_args(dict(src=src))
|
||||
commands = ['hostname foo', 'interface GigabitEthernet0/0',
|
||||
'no ip address']
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
def test_aruba_config_backup(self):
|
||||
set_module_args(dict(backup=True))
|
||||
result = self.execute_module()
|
||||
self.assertIn('__backup__', result)
|
||||
|
||||
def test_aruba_config_save_always(self):
|
||||
self.run_commands.return_value = "Hostname foo"
|
||||
set_module_args(dict(save_when='always'))
|
||||
self.execute_module(changed=True)
|
||||
self.assertEqual(self.run_commands.call_count, 1)
|
||||
self.assertEqual(self.get_config.call_count, 0)
|
||||
self.assertEqual(self.load_config.call_count, 0)
|
||||
args = self.run_commands.call_args[0][1]
|
||||
self.assertIn('write memory', args)
|
||||
|
||||
def test_aruba_config_save_changed_true(self):
|
||||
src = load_fixture('aruba_config_src.cfg')
|
||||
set_module_args(dict(src=src, save_when='changed'))
|
||||
commands = ['hostname foo', 'interface GigabitEthernet0/0',
|
||||
'no ip address']
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
# src = load_fixture('aruba_config_src.cfg')
|
||||
|
||||
# set_module_args(dict(save_when='changed'))
|
||||
# commands = ['hostname changed']
|
||||
# self.execute_module(changed=False, commands=commands)
|
||||
self.assertEqual(self.run_commands.call_count, 1)
|
||||
self.assertEqual(self.get_config.call_count, 1)
|
||||
self.assertEqual(self.load_config.call_count, 1)
|
||||
args = self.run_commands.call_args[0][1]
|
||||
self.assertIn('write memory', args)
|
||||
|
||||
def test_aruba_config_save_changed_false(self):
|
||||
set_module_args(dict(save_when='changed'))
|
||||
self.execute_module(changed=False)
|
||||
self.assertEqual(self.run_commands.call_count, 0)
|
||||
self.assertEqual(self.get_config.call_count, 0)
|
||||
self.assertEqual(self.load_config.call_count, 0)
|
||||
|
||||
def test_aruba_config_lines_wo_parents(self):
|
||||
set_module_args(dict(lines=['hostname foo']))
|
||||
commands = ['hostname foo']
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
def test_aruba_config_lines_w_parents(self):
|
||||
set_module_args(dict(lines=['shutdown'], parents=['interface GigabitEthernet0/0']))
|
||||
commands = ['interface GigabitEthernet0/0', 'shutdown']
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
def test_aruba_config_before(self):
|
||||
set_module_args(dict(lines=['hostname foo'], before=['test1', 'test2']))
|
||||
commands = ['test1', 'test2', 'hostname foo']
|
||||
self.execute_module(changed=True, commands=commands, sort=False)
|
||||
|
||||
def test_aruba_config_after(self):
|
||||
set_module_args(dict(lines=['hostname foo'], after=['test1', 'test2']))
|
||||
commands = ['hostname foo', 'test1', 'test2']
|
||||
self.execute_module(changed=True, commands=commands, sort=False)
|
||||
|
||||
def test_aruba_config_before_after_no_change(self):
|
||||
set_module_args(dict(lines=['hostname router'],
|
||||
before=['test1', 'test2'],
|
||||
after=['test3', 'test4']))
|
||||
self.execute_module()
|
||||
|
||||
def test_aruba_config_config(self):
|
||||
config = 'hostname localhost'
|
||||
set_module_args(dict(lines=['hostname router'], config=config))
|
||||
commands = ['hostname router']
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
def test_aruba_config_replace_block(self):
|
||||
lines = ['description test string', 'test string']
|
||||
parents = ['interface GigabitEthernet0/0']
|
||||
set_module_args(dict(lines=lines, replace='block', parents=parents))
|
||||
commands = parents + lines
|
||||
self.execute_module(changed=True, commands=commands)
|
||||
|
||||
def test_aruba_config_force(self):
|
||||
lines = ['hostname router']
|
||||
set_module_args(dict(lines=lines, match='none'))
|
||||
self.execute_module(changed=True, commands=lines)
|
||||
|
||||
def test_aruba_config_match_none(self):
|
||||
lines = ['ip address 1.2.3.4 255.255.255.0', 'description test string']
|
||||
parents = ['interface GigabitEthernet0/0']
|
||||
set_module_args(dict(lines=lines, parents=parents, match='none'))
|
||||
commands = parents + lines
|
||||
self.execute_module(changed=True, commands=commands, sort=False)
|
||||
|
||||
def test_aruba_config_match_strict(self):
|
||||
lines = ['ip address 1.2.3.4 255.255.255.0', 'description test string',
|
||||
'shutdown']
|
||||
parents = ['interface GigabitEthernet0/0']
|
||||
set_module_args(dict(lines=lines, parents=parents, match='strict'))
|
||||
commands = parents + ['shutdown']
|
||||
self.execute_module(changed=True, commands=commands, sort=False)
|
||||
|
||||
def test_aruba_config_match_exact(self):
|
||||
lines = ['ip address 1.2.3.4 255.255.255.0', 'description test string',
|
||||
'shutdown']
|
||||
parents = ['interface GigabitEthernet0/0']
|
||||
set_module_args(dict(lines=lines, parents=parents, match='exact'))
|
||||
commands = parents + lines
|
||||
self.execute_module(changed=True, commands=commands, sort=False)
|
||||
|
||||
def test_aruba_encrypt_false(self):
|
||||
set_module_args(dict(encrypt=False))
|
||||
self.execute_module()
|
||||
self.assertEqual(self.run_commands.call_count, 2)
|
||||
args = self.run_commands.call_args_list
|
||||
self.assertIn('encrypt disable', args[0][0])
|
||||
self.assertIn('encrypt enable', args[1][0])
|
|
@ -1,215 +0,0 @@
|
|||
{
|
||||
"mock_create_res": {
|
||||
"ansible_facts": {
|
||||
"avi_api_context": {
|
||||
"192.0.2.97:admin:None": {
|
||||
"csrftoken": "qG23CCARDL3rh1KZ66XXPIeUYCUCOZ4q",
|
||||
"session_id": "h5nynf9u9nompp5byai7vii2v8bbn9kd"
|
||||
}
|
||||
}
|
||||
},
|
||||
"api_context": null,
|
||||
"changed": true,
|
||||
"invocation": {
|
||||
"module_args": {
|
||||
"access": [{
|
||||
"role_ref": "/api/role?name=Tenant-Admin",
|
||||
"tenant_ref": "/api/tenant/********#********",
|
||||
"all_tenants": false
|
||||
}],
|
||||
"api_context": null,
|
||||
"api_version": "18.2.5",
|
||||
"avi_api_update_method": "put",
|
||||
"avi_credentials": null,
|
||||
"avi_disable_session_cache_as_fact": false,
|
||||
"avi_login_info": null,
|
||||
"controller": "192.0.2.97",
|
||||
"default_tenant_ref": "/api/tenant?name=********",
|
||||
"email": "test@abc.com",
|
||||
"is_active": true,
|
||||
"is_superuser": true,
|
||||
"name": "testuser",
|
||||
"obj_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
|
||||
"obj_username": "testuser",
|
||||
"password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
|
||||
"state": "present",
|
||||
"tenant": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
|
||||
"tenant_uuid": "",
|
||||
"user_profile_ref": "/api/useraccountprofile?name=Default-User-Account-Profile",
|
||||
"username": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
|
||||
}
|
||||
},
|
||||
"obj": {
|
||||
"_last_modified": "1559736767460818",
|
||||
"access": [{
|
||||
"all_tenants": false,
|
||||
"role_ref": "https://192.0.2.97/api/tenant/********/role/role-ff851004-bd75-485b-87ec-2fe1d6a03fb9#Tenant-Admin",
|
||||
"tenant_ref": "https://192.0.2.97/api/tenant/********#********"
|
||||
}],
|
||||
"default_tenant_ref": "https://192.0.2.97/api/tenant/********#********",
|
||||
"email": "test@abc.com",
|
||||
"full_name": "testuser",
|
||||
"is_active": true,
|
||||
"is_superuser": true,
|
||||
"local": true,
|
||||
"name": "testuser",
|
||||
"obj_password": "<sensitive>",
|
||||
"obj_username": "testuser",
|
||||
"password": "<sensitive>",
|
||||
"uid": 2004,
|
||||
"url": "https://192.0.2.97/api/user/user-7087578f-4dfe-4e06-a153-495a91824a1d#testuser",
|
||||
"user_profile_ref": "https://192.0.2.97/api/useraccountprofile/useraccountprofile-78063e7c-b443-48d6-b34c-5253ae1fcd2a#Default-User-Account-Profile",
|
||||
"username": "testuser",
|
||||
"uuid": "user-7087578f-4dfe-4e06-a153-495a91824a1d"
|
||||
},
|
||||
"old_obj": null
|
||||
},
|
||||
"mock_put_res": {
|
||||
"obj": {
|
||||
"username": "testuser",
|
||||
"user_profile_ref": "https://192.0.2.97/api/useraccountprofile/useraccountprofile-546c5e88-6270-4ba1-9cfd-d0c755e68f47#Default-User-Account-Profile",
|
||||
"name": "testuser",
|
||||
"url": "https://192.0.2.97/api/user/user-ed10f328-bd92-4db2-bacd-0cf795fcbf8a#testuser",
|
||||
"is_active": true,
|
||||
"uuid": "user-ed10f328-bd92-4db2-bacd-0cf795fcbf8a",
|
||||
"email": "newemail@abc.com",
|
||||
"access": [{
|
||||
"tenant_ref": "https://192.0.2.97/api/tenant/tenant-57af0f3f-6f14-4657-8f32-9b289407752b#Test-Admin",
|
||||
"all_tenants": false,
|
||||
"role_ref": "https://192.0.2.97/api/tenant/********/role/role-b073ab0d-e1d0-4800-95ef-6ecf2c5ed7d1#Tenant-Admin"
|
||||
}],
|
||||
"is_superuser": true,
|
||||
"obj_username": "testuser",
|
||||
"full_name": "testuser",
|
||||
"_last_modified": "1559802772203285",
|
||||
"password": "<sensitive>",
|
||||
"local": true,
|
||||
"obj_password": "<sensitive>",
|
||||
"default_tenant_ref": "https://192.0.2.97/api/tenant/********#********",
|
||||
"uid": 2002
|
||||
},
|
||||
"changed": true,
|
||||
"api_context": null,
|
||||
"invocation": {
|
||||
"module_args": {
|
||||
"username": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
|
||||
"user_profile_ref": "/api/useraccountprofile?name=Default-User-Account-Profile",
|
||||
"api_version": "18.2.5",
|
||||
"name": "testuser",
|
||||
"state": "present",
|
||||
"is_active": true,
|
||||
"api_context": null,
|
||||
"avi_disable_session_cache_as_fact": false,
|
||||
"controller": "192.0.2.97",
|
||||
"avi_api_patch_op": null,
|
||||
"access": [{
|
||||
"tenant_ref": "/api/tenant?name=Test-Admin",
|
||||
"all_tenants": false,
|
||||
"role_ref": "/api/role?name=Tenant-Admin"
|
||||
}],
|
||||
"is_superuser": true,
|
||||
"avi_credentials": null,
|
||||
"email": "newemail@abc.com",
|
||||
"default_tenant_ref": "/api/tenant?name=********",
|
||||
"obj_username": "testuser",
|
||||
"password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
|
||||
"tenant_uuid": "",
|
||||
"obj_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
|
||||
"avi_api_update_method": "put",
|
||||
"tenant": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
|
||||
}
|
||||
},
|
||||
"ansible_facts": {
|
||||
"avi_api_context": {
|
||||
"192.0.2.97:admin:None": {
|
||||
"csrftoken": "Y7CET6zaIC9VZAzBqEW4cWo1N26jPg55",
|
||||
"session_id": "364n7o0p3o5so63b9rzd47v6ehya6xg7"
|
||||
}
|
||||
}
|
||||
},
|
||||
"old_obj": {
|
||||
"username": "testuser",
|
||||
"user_profile_ref": "https://192.0.2.97/api/useraccountprofile/useraccountprofile-546c5e88-6270-4ba1-9cfd-d0c755e68f47#Default-User-Account-Profile",
|
||||
"name": "testuser",
|
||||
"url": "https://192.0.2.97/api/user/user-ed10f328-bd92-4db2-bacd-0cf795fcbf8a#testuser",
|
||||
"is_active": true,
|
||||
"uuid": "user-ed10f328-bd92-4db2-bacd-0cf795fcbf8a",
|
||||
"access": [{
|
||||
"tenant_ref": "https://192.0.2.97/api/tenant/tenant-57af0f3f-6f14-4657-8f32-9b289407752b#Test-Admin",
|
||||
"all_tenants": false,
|
||||
"role_ref": "https://192.0.2.97/api/tenant/********/role/role-b073ab0d-e1d0-4800-95ef-6ecf2c5ed7d1#Tenant-Admin"
|
||||
}],
|
||||
"is_superuser": true,
|
||||
"full_name": "testuser",
|
||||
"ui_property": "",
|
||||
"password": "<sensitive>",
|
||||
"local": true,
|
||||
"email": "test@abc.com",
|
||||
"default_tenant_ref": "https://192.0.2.97/api/tenant/********#********",
|
||||
"uid": 2002
|
||||
}
|
||||
},
|
||||
"mock_del_res": {
|
||||
"ansible_facts": {
|
||||
"avi_api_context": {
|
||||
"192.0.2.97:admin:None": {
|
||||
"csrftoken": "Vtkx9GeS2lsrld5yX83cmJqbZO3MAimb",
|
||||
"session_id": "ix3t1dja8yzwb155de59viyn96hibn6b"
|
||||
}
|
||||
}
|
||||
},
|
||||
"api_context": null,
|
||||
"changed": true,
|
||||
"invocation": {
|
||||
"module_args": {
|
||||
"access": [{
|
||||
"role_ref": "/api/role?name=Tenant-Admin",
|
||||
"tenant_ref": "/api/tenant/********#********"
|
||||
}],
|
||||
"api_context": null,
|
||||
"api_version": "18.2.5",
|
||||
"avi_api_update_method": "put",
|
||||
"avi_credentials": null,
|
||||
"avi_disable_session_cache_as_fact": false,
|
||||
"avi_login_info": null,
|
||||
"controller": "192.0.2.97",
|
||||
"default_tenant_ref": "/api/tenant?name=********",
|
||||
"email": "test@abc.com",
|
||||
"is_active": true,
|
||||
"is_superuser": true,
|
||||
"name": "testuser",
|
||||
"obj_password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
|
||||
"obj_username": "testuser",
|
||||
"password": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
|
||||
"state": "absent",
|
||||
"tenant": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER",
|
||||
"tenant_uuid": "",
|
||||
"user_profile_ref": "/api/useraccountprofile?name=Default-User-Account-Profile",
|
||||
"username": "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER"
|
||||
}
|
||||
},
|
||||
"obj": null,
|
||||
"old_obj": {
|
||||
"_last_modified": "1559803346264869",
|
||||
"access": [{
|
||||
"all_tenants": false,
|
||||
"role_ref": "https://192.0.2.97/api/tenant/********/role/role-b073ab0d-e1d0-4800-95ef-6ecf2c5ed7d1#Tenant-Admin",
|
||||
"tenant_ref": "https://192.0.2.97/api/tenant/tenant-57af0f3f-6f14-4657-8f32-9b289407752b#Test-Admin"
|
||||
}],
|
||||
"default_tenant_ref": "https://192.0.2.97/api/tenant/********#********",
|
||||
"email": "newemail@abc.com",
|
||||
"full_name": "testuser",
|
||||
"is_active": true,
|
||||
"is_superuser": true,
|
||||
"local": true,
|
||||
"name": "testuser",
|
||||
"password": "<sensitive>",
|
||||
"ui_property": "",
|
||||
"uid": 2002,
|
||||
"url": "https://192.0.2.97/api/user/user-ed10f328-bd92-4db2-bacd-0cf795fcbf8a#testuser",
|
||||
"user_profile_ref": "https://192.0.2.97/api/useraccountprofile/useraccountprofile-546c5e88-6270-4ba1-9cfd-d0c755e68f47#Default-User-Account-Profile",
|
||||
"username": "testuser",
|
||||
"uuid": "user-ed10f328-bd92-4db2-bacd-0cf795fcbf8a"
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,101 +0,0 @@
|
|||
import os
|
||||
import json
|
||||
from ansible_collections.community.general.tests.unit.compat import unittest
|
||||
from ansible_collections.community.general.tests.unit.compat.mock import Mock
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args
|
||||
from ansible_collections.community.general.plugins.modules.network.avi import avi_user
|
||||
|
||||
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
|
||||
with open(fixture_path + '/avi_user.json') as json_file:
|
||||
data = json.load(json_file)
|
||||
|
||||
|
||||
class TestAviUser(unittest.TestCase):
|
||||
|
||||
def test_create_user(self):
|
||||
set_module_args({
|
||||
"avi_credentials": {
|
||||
"controller": "192.0.2.13",
|
||||
"username": "username",
|
||||
"password": "fakepassword",
|
||||
"api_version": "18.2.5"
|
||||
},
|
||||
"state": "present",
|
||||
"name": "testuser",
|
||||
"obj_username": "testuser",
|
||||
"obj_password": "test123",
|
||||
"email": "test@abc.com",
|
||||
"access": [
|
||||
{
|
||||
"role_ref": "/api/role?name=Tenant-Admin",
|
||||
"tenant_ref": "/api/tenant?name=Test-Admin",
|
||||
"all_tenants": False
|
||||
}
|
||||
],
|
||||
"user_profile_ref": "/api/useraccountprofile?name=Default-User-Account-Profile",
|
||||
"is_active": True,
|
||||
"is_superuser": True,
|
||||
"default_tenant_ref": "/api/tenant?name=admin"
|
||||
})
|
||||
avi_user.avi_ansible_api = Mock(return_value=data['mock_create_res'])
|
||||
response = avi_user.main()
|
||||
assert response['changed']
|
||||
|
||||
def test_put_on_user(self):
|
||||
set_module_args({
|
||||
"avi_credentials": {
|
||||
"controller": "192.0.2.13",
|
||||
"username": "username",
|
||||
"password": "fakepassword",
|
||||
"api_version": "18.2.5"
|
||||
},
|
||||
"state": "present",
|
||||
"avi_api_update_method": "put",
|
||||
"name": "testuser",
|
||||
"obj_username": "testuser",
|
||||
"obj_password": "test123",
|
||||
"email": "newemail@abc.com",
|
||||
"access": [{
|
||||
"role_ref": "/api/role?name=Tenant-Admin",
|
||||
"tenant_ref": "/api/tenant?name=Test-Admin",
|
||||
"all_tenants": False
|
||||
}],
|
||||
"user_profile_ref": "/api/useraccountprofile?name=Default-User-Account-Profile",
|
||||
"is_active": True,
|
||||
"is_superuser": True,
|
||||
"default_tenant_ref": "/api/tenant?name=admin"
|
||||
})
|
||||
avi_user.avi_ansible_api = Mock(return_value=data['mock_put_res'])
|
||||
response = avi_user.main()
|
||||
assert response['changed']
|
||||
assert response['obj']
|
||||
assert response['old_obj']
|
||||
|
||||
def test_delete_user(self):
|
||||
set_module_args({
|
||||
"avi_credentials": {
|
||||
"controller": "192.0.2.13",
|
||||
"username": "username",
|
||||
"password": "fakepassword",
|
||||
"api_version": "18.2.5"
|
||||
|
||||
},
|
||||
"name": "testuser",
|
||||
"obj_username": "testuser",
|
||||
"obj_password": "test123",
|
||||
"email": "test@abc.com",
|
||||
"access": [{
|
||||
"role_ref": "/api/role?name=Tenant-Admin",
|
||||
"tenant_ref": "/api/tenant?name=Test-Admin",
|
||||
"all_tenants": False
|
||||
}],
|
||||
"user_profile_ref": "/api/useraccountprofile?name=Default-User-Account-Profile",
|
||||
"is_active": True,
|
||||
"is_superuser": True,
|
||||
"default_tenant_ref": "/api/tenant?name=admin"
|
||||
})
|
||||
avi_user.avi_ansible_api = Mock(return_value=data['mock_del_res'])
|
||||
response = avi_user.main()
|
||||
assert response['changed']
|
||||
assert not response['obj']
|
||||
assert response['old_obj']
|
|
@ -1,105 +0,0 @@
|
|||
# Copyright (c) 2018 Red Hat
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import pytest
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args, exit_json, fail_json, AnsibleFailJson, AnsibleExitJson
|
||||
|
||||
from ansible.module_utils import basic
|
||||
from ansible_collections.community.general.plugins.modules.network.check_point import checkpoint_access_rule
|
||||
|
||||
OBJECT = {'layer': 'foo', 'position': 'bar', 'name': 'baz',
|
||||
'source': [{'name': 'lol'}], 'destination': [{'name': 'Any'}],
|
||||
'action': {'name': 'drop'}, 'enabled': True}
|
||||
PAYLOAD = {'layer': 'foo', 'position': 'bar', 'name': 'baz'}
|
||||
|
||||
|
||||
class TestCheckpointAccessRule(object):
|
||||
module = checkpoint_access_rule
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def module_mock(self, mocker):
|
||||
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
|
||||
|
||||
@pytest.fixture
|
||||
def connection_mock(self, mocker):
|
||||
connection_class_mock = mocker.patch('ansible_collections.community.general.plugins.modules.network.check_point.checkpoint_access_rule.Connection')
|
||||
return connection_class_mock.return_value
|
||||
|
||||
@pytest.fixture
|
||||
def get_access_rule_200(self, mocker):
|
||||
mock_function = mocker.patch('ansible_collections.community.general.plugins.modules.network.check_point.checkpoint_access_rule.get_access_rule')
|
||||
mock_function.return_value = (200, OBJECT)
|
||||
return mock_function.return_value
|
||||
|
||||
@pytest.fixture
|
||||
def get_access_rule_404(self, mocker):
|
||||
mock_function = mocker.patch('ansible_collections.community.general.plugins.modules.network.check_point.checkpoint_access_rule.get_access_rule')
|
||||
mock_function.return_value = (404, 'Object not found')
|
||||
return mock_function.return_value
|
||||
|
||||
def test_create(self, get_access_rule_404, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
result = self._run_module(PAYLOAD)
|
||||
|
||||
assert result['changed']
|
||||
assert 'checkpoint_access_rules' in result
|
||||
|
||||
def test_create_idempotent(self, get_access_rule_200, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, PAYLOAD)
|
||||
result = self._run_module(PAYLOAD)
|
||||
|
||||
assert not result['changed']
|
||||
|
||||
def test_update(self, get_access_rule_200, connection_mock):
|
||||
payload_for_update = {'enabled': False}
|
||||
payload_for_update.update(PAYLOAD)
|
||||
connection_mock.send_request.return_value = (200, payload_for_update)
|
||||
result = self._run_module(payload_for_update)
|
||||
|
||||
assert result['changed']
|
||||
assert not result['checkpoint_access_rules']['enabled']
|
||||
|
||||
def test_delete(self, get_access_rule_200, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
payload_for_delete = {'state': 'absent'}
|
||||
payload_for_delete.update(PAYLOAD)
|
||||
result = self._run_module(payload_for_delete)
|
||||
|
||||
assert result['changed']
|
||||
|
||||
def test_delete_idempotent(self, get_access_rule_404, connection_mock):
|
||||
payload = {'name': 'baz', 'state': 'absent'}
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
result = self._run_module(payload)
|
||||
|
||||
assert not result['changed']
|
||||
|
||||
def _run_module(self, module_args):
|
||||
set_module_args(module_args)
|
||||
with pytest.raises(AnsibleExitJson) as ex:
|
||||
self.module.main()
|
||||
return ex.value.args[0]
|
||||
|
||||
def _run_module_with_fail_json(self, module_args):
|
||||
set_module_args(module_args)
|
||||
with pytest.raises(AnsibleFailJson) as exc:
|
||||
self.module.main()
|
||||
result = exc.value.args[0]
|
||||
return result
|
|
@ -1,99 +0,0 @@
|
|||
# Copyright (c) 2018 Red Hat
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import pytest
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args, exit_json, fail_json, AnsibleFailJson, AnsibleExitJson
|
||||
|
||||
from ansible.module_utils import basic
|
||||
from ansible_collections.community.general.plugins.modules.network.check_point import checkpoint_host
|
||||
|
||||
OBJECT = {'name': 'foo', 'ipv4-address': '192.168.0.15'}
|
||||
CREATE_PAYLOAD = {'name': 'foo', 'ip_address': '192.168.0.15'}
|
||||
UPDATE_PAYLOAD = {'name': 'foo', 'ip_address': '192.168.0.16'}
|
||||
DELETE_PAYLOAD = {'name': 'foo', 'state': 'absent'}
|
||||
|
||||
|
||||
class TestCheckpointHost(object):
|
||||
module = checkpoint_host
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def module_mock(self, mocker):
|
||||
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
|
||||
|
||||
@pytest.fixture
|
||||
def connection_mock(self, mocker):
|
||||
connection_class_mock = mocker.patch('ansible_collections.community.general.plugins.modules.network.check_point.checkpoint_host.Connection')
|
||||
return connection_class_mock.return_value
|
||||
|
||||
@pytest.fixture
|
||||
def get_host_200(self, mocker):
|
||||
mock_function = mocker.patch('ansible_collections.community.general.plugins.modules.network.check_point.checkpoint_host.get_host')
|
||||
mock_function.return_value = (200, OBJECT)
|
||||
return mock_function.return_value
|
||||
|
||||
@pytest.fixture
|
||||
def get_host_404(self, mocker):
|
||||
mock_function = mocker.patch('ansible_collections.community.general.plugins.modules.network.check_point.checkpoint_host.get_host')
|
||||
mock_function.return_value = (404, 'Object not found')
|
||||
return mock_function.return_value
|
||||
|
||||
def test_create(self, get_host_404, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
result = self._run_module(CREATE_PAYLOAD)
|
||||
|
||||
assert result['changed']
|
||||
assert 'checkpoint_hosts' in result
|
||||
|
||||
def test_create_idempotent(self, get_host_200, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
result = self._run_module(CREATE_PAYLOAD)
|
||||
|
||||
assert not result['changed']
|
||||
|
||||
def test_update(self, get_host_200, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
result = self._run_module(UPDATE_PAYLOAD)
|
||||
|
||||
assert result['changed']
|
||||
|
||||
def test_delete(self, get_host_200, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
result = self._run_module(DELETE_PAYLOAD)
|
||||
|
||||
assert result['changed']
|
||||
|
||||
def test_delete_idempotent(self, get_host_404, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
result = self._run_module(DELETE_PAYLOAD)
|
||||
|
||||
assert not result['changed']
|
||||
|
||||
def _run_module(self, module_args):
|
||||
set_module_args(module_args)
|
||||
with pytest.raises(AnsibleExitJson) as ex:
|
||||
self.module.main()
|
||||
return ex.value.args[0]
|
||||
|
||||
def _run_module_with_fail_json(self, module_args):
|
||||
set_module_args(module_args)
|
||||
with pytest.raises(AnsibleFailJson) as exc:
|
||||
self.module.main()
|
||||
result = exc.value.args[0]
|
||||
return result
|
|
@ -1,67 +0,0 @@
|
|||
# Copyright (c) 2018 Red Hat
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import pytest
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args, exit_json, fail_json, AnsibleFailJson, AnsibleExitJson
|
||||
|
||||
from ansible.module_utils import basic
|
||||
from ansible_collections.community.general.plugins.modules.network.check_point import checkpoint_session
|
||||
|
||||
OBJECT = {'uid': '1234'}
|
||||
PAYLOAD = {}
|
||||
|
||||
|
||||
class TestCheckpointAccessRule(object):
|
||||
module = checkpoint_session
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def module_mock(self, mocker):
|
||||
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
|
||||
|
||||
@pytest.fixture
|
||||
def connection_mock(self, mocker):
|
||||
connection_class_mock = mocker.patch('ansible_collections.community.general.plugins.modules.network.check_point.checkpoint_session.Connection')
|
||||
return connection_class_mock.return_value
|
||||
|
||||
@pytest.fixture
|
||||
def get_session_200(self, mocker):
|
||||
mock_function = mocker.patch('ansible_collections.community.general.plugins.modules.network.check_point.checkpoint_session.get_session')
|
||||
mock_function.return_value = (200, OBJECT)
|
||||
return mock_function.return_value
|
||||
|
||||
def test_publish(self, get_session_200, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
result = self._run_module(PAYLOAD)
|
||||
|
||||
assert result['changed']
|
||||
assert 'checkpoint_session' in result
|
||||
|
||||
def _run_module(self, module_args):
|
||||
set_module_args(module_args)
|
||||
with pytest.raises(AnsibleExitJson) as ex:
|
||||
self.module.main()
|
||||
return ex.value.args[0]
|
||||
|
||||
def _run_module_with_fail_json(self, module_args):
|
||||
set_module_args(module_args)
|
||||
with pytest.raises(AnsibleFailJson) as exc:
|
||||
self.module.main()
|
||||
result = exc.value.args[0]
|
||||
return result
|
|
@ -1,99 +0,0 @@
|
|||
# Copyright (c) 2018 Red Hat
|
||||
#
|
||||
# This file is part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import pytest
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import set_module_args, exit_json, fail_json, AnsibleFailJson, AnsibleExitJson
|
||||
|
||||
from ansible.module_utils import basic
|
||||
from ansible_collections.community.general.plugins.modules.network.check_point import checkpoint_host
|
||||
|
||||
OBJECT = {'name': 'foo', 'ipv4-address': '192.168.0.15'}
|
||||
CREATE_PAYLOAD = {'name': 'foo', 'ip_address': '192.168.0.15'}
|
||||
UPDATE_PAYLOAD = {'name': 'foo', 'ip_address': '192.168.0.16'}
|
||||
DELETE_PAYLOAD = {'name': 'foo', 'state': 'absent'}
|
||||
|
||||
|
||||
class TestCheckpointHost(object):
|
||||
module = checkpoint_host
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def module_mock(self, mocker):
|
||||
return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json)
|
||||
|
||||
@pytest.fixture
|
||||
def connection_mock(self, mocker):
|
||||
connection_class_mock = mocker.patch('ansible_collections.community.general.plugins.modules.network.check_point.checkpoint_host.Connection')
|
||||
return connection_class_mock.return_value
|
||||
|
||||
@pytest.fixture
|
||||
def get_host_200(self, mocker):
|
||||
mock_function = mocker.patch('ansible_collections.community.general.plugins.modules.network.check_point.checkpoint_host.get_host')
|
||||
mock_function.return_value = (200, OBJECT)
|
||||
return mock_function.return_value
|
||||
|
||||
@pytest.fixture
|
||||
def get_host_404(self, mocker):
|
||||
mock_function = mocker.patch('ansible_collections.community.general.plugins.modules.network.check_point.checkpoint_host.get_host')
|
||||
mock_function.return_value = (404, 'Object not found')
|
||||
return mock_function.return_value
|
||||
|
||||
def test_create(self, get_host_404, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
result = self._run_module(CREATE_PAYLOAD)
|
||||
|
||||
assert result['changed']
|
||||
assert 'checkpoint_hosts' in result
|
||||
|
||||
def test_create_idempotent(self, get_host_200, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
result = self._run_module(CREATE_PAYLOAD)
|
||||
|
||||
assert not result['changed']
|
||||
|
||||
def test_update(self, get_host_200, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
result = self._run_module(UPDATE_PAYLOAD)
|
||||
|
||||
assert result['changed']
|
||||
|
||||
def test_delete(self, get_host_200, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
result = self._run_module(DELETE_PAYLOAD)
|
||||
|
||||
assert result['changed']
|
||||
|
||||
def test_delete_idempotent(self, get_host_404, connection_mock):
|
||||
connection_mock.send_request.return_value = (200, OBJECT)
|
||||
result = self._run_module(DELETE_PAYLOAD)
|
||||
|
||||
assert not result['changed']
|
||||
|
||||
def _run_module(self, module_args):
|
||||
set_module_args(module_args)
|
||||
with pytest.raises(AnsibleExitJson) as ex:
|
||||
self.module.main()
|
||||
return ex.value.args[0]
|
||||
|
||||
def _run_module_with_fail_json(self, module_args):
|
||||
set_module_args(module_args)
|
||||
with pytest.raises(AnsibleFailJson) as exc:
|
||||
self.module.main()
|
||||
result = exc.value.args[0]
|
||||
return result
|
|
@ -1,90 +0,0 @@
|
|||
# Copyright (c) 2019 Red Hat
|
||||
#
|
||||
# This file is a part of Ansible
|
||||
#
|
||||
# Ansible is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# Ansible is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
|
||||
#
|
||||
|
||||
# Make coding more python3-ish
|
||||
from __future__ import (absolute_import, division, print_function)
|
||||
__metaclass__ = type
|
||||
|
||||
import os
|
||||
import json
|
||||
from ansible_collections.community.general.tests.unit.plugins.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase
|
||||
|
||||
|
||||
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
|
||||
fixture_data = {}
|
||||
|
||||
|
||||
def load_fixture(module_name, name, device=''):
|
||||
path = os.path.join(fixture_path, module_name, device, name)
|
||||
if not os.path.exists(path):
|
||||
path = os.path.join(fixture_path, module_name, name)
|
||||
|
||||
if path in fixture_data:
|
||||
return fixture_data[path]
|
||||
|
||||
with open(path) as f:
|
||||
data = f.read()
|
||||
|
||||
try:
|
||||
data = json.loads(data)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
fixture_data[path] = data
|
||||
return data
|
||||
|
||||
|
||||
class TestCloudEngineModule(ModuleTestCase):
|
||||
|
||||
def execute_module(self, failed=False, changed=False, commands=None, sort=True, defaults=False):
|
||||
|
||||
self.load_fixtures(commands)
|
||||
|
||||
if failed:
|
||||
result = self.failed()
|
||||
self.assertTrue(result['failed'], result)
|
||||
else:
|
||||
result = self.changed(changed)
|
||||
self.assertEqual(result['changed'], changed, result)
|
||||
|
||||
if commands is not None:
|
||||
if sort:
|
||||
self.assertEqual(sorted(commands), sorted(result['commands']), result['commands'])
|
||||
else:
|
||||
self.assertEqual(commands, result['commands'], result['commands'])
|
||||
|
||||
return result
|
||||
|
||||
def failed(self):
|
||||
with self.assertRaises(AnsibleFailJson) as exc:
|
||||
self.module.main()
|
||||
|
||||
result = exc.exception.args[0]
|
||||
self.assertTrue(result['failed'], result)
|
||||
return result
|
||||
|
||||
def changed(self, changed=False):
|
||||
with self.assertRaises(AnsibleExitJson) as exc:
|
||||
self.module.main()
|
||||
|
||||
result = exc.exception.args[0]
|
||||
self.assertEqual(result['changed'], changed, result)
|
||||
return result
|
||||
|
||||
def load_fixtures(self, commands=None):
|
||||
pass
|
|
@ -1,11 +0,0 @@
|
|||
<data>
|
||||
<isiscomm xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
|
||||
<isSites>
|
||||
<isSite>
|
||||
<instanceId>100</instanceId>
|
||||
<vpnName>_public_</vpnName>
|
||||
<description>ISIS</description>
|
||||
</isSite>
|
||||
</isSites>
|
||||
</isiscomm>
|
||||
</data>
|
|
@ -1,11 +0,0 @@
|
|||
<data>
|
||||
<isiscomm xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
|
||||
<isSites>
|
||||
<isSite>
|
||||
<instanceId></instanceId>
|
||||
<vpnName></vpnName>
|
||||
<description></description>
|
||||
</isSite>
|
||||
</isSites>
|
||||
</isiscomm>
|
||||
</data>
|
|
@ -1,26 +0,0 @@
|
|||
<data>
|
||||
<isiscomm>
|
||||
<isSites>
|
||||
<isSite>
|
||||
<instanceId>100</instanceId>
|
||||
<isCircuits>
|
||||
<isCircuit>
|
||||
<ifName></ifName>
|
||||
<circuitLevelType>level_1</circuitLevelType>
|
||||
<level1DisPriority>10</level1DisPriority>
|
||||
<level2DisPriority>10</level2DisPriority>
|
||||
<silentEnable>true</silentEnable>
|
||||
<silentCost>true</silentCost>
|
||||
<typeP2pEnable>true</typeP2pEnable>
|
||||
<snpaCheck>true</snpaCheck>
|
||||
<p2pNegotiationMode>2_way</p2pNegotiationMode>
|
||||
<p2pPeerIPIgnore>true</p2pPeerIPIgnore>
|
||||
<pPPOsicpCheckEnable>true</pPPOsicpCheckEnable>
|
||||
<level1Cost>10</level1Cost>
|
||||
<level2Cost>10</level2Cost>
|
||||
</isCircuit>
|
||||
</isCircuits>
|
||||
</isSite>
|
||||
</isSites>
|
||||
</isiscomm>
|
||||
</data>
|
|
@ -1,26 +0,0 @@
|
|||
<data>
|
||||
<isiscomm>
|
||||
<isSites>
|
||||
<isSite>
|
||||
<instanceId>100</instanceId>
|
||||
<isCircuits>
|
||||
<isCircuit>
|
||||
<ifName></ifName>
|
||||
<circuitLevelType></circuitLevelType>
|
||||
<level1DisPriority></level1DisPriority>
|
||||
<level2DisPriority></level2DisPriority>
|
||||
<silentEnable></silentEnable>
|
||||
<silentCost></silentCost>
|
||||
<typeP2pEnable></typeP2pEnable>
|
||||
<snpaCheck></snpaCheck>
|
||||
<p2pNegotiationMode></p2pNegotiationMode>
|
||||
<p2pPeerIPIgnore></p2pPeerIPIgnore>
|
||||
<pPPOsicpCheckEnable></pPPOsicpCheckEnable>
|
||||
<level1Cost></level1Cost>
|
||||
<level2Cost></level2Cost>
|
||||
</isCircuit>
|
||||
</isCircuits>
|
||||
</isSite>
|
||||
</isSites>
|
||||
</isiscomm>
|
||||
</data>
|
|
@ -1,104 +0,0 @@
|
|||
<data>
|
||||
<isiscomm>
|
||||
<isSites>
|
||||
<isSite>
|
||||
<instanceId>100</instanceId>
|
||||
<vpnName>_public_</vpnName>
|
||||
<description>ISIS</description>
|
||||
<isLevel>level_1</isLevel>
|
||||
<costStyle>narrow</costStyle>
|
||||
<relaxSpfLimit>true</relaxSpfLimit>
|
||||
<stdLevel1Cost>60</stdLevel1Cost>
|
||||
<stdLevel2Cost>60</stdLevel2Cost>
|
||||
<stdbandwidth>100</stdbandwidth>
|
||||
<stdAutoCostEnable>true</stdAutoCostEnable>
|
||||
<stdAutoCostEnableCompatible>true</stdAutoCostEnableCompatible>
|
||||
<isNetEntitys>
|
||||
<isNetEntity>
|
||||
<netEntity>netentity</netEntity>
|
||||
</isNetEntity>
|
||||
</isNetEntitys>
|
||||
<isSiteMTs>
|
||||
<isSiteMT>
|
||||
<addressFamily>afIpv4</addressFamily>
|
||||
<mtId>0</mtId>
|
||||
<bfdMinRx>100</bfdMinRx>
|
||||
<bfdMinTx>100</bfdMinTx>
|
||||
<bfdMultNum>10</bfdMultNum>
|
||||
<maxLoadBalancing>32</maxLoadBalancing>
|
||||
<isPreferences>
|
||||
<isPreference>
|
||||
<preferenceValue>100</preferenceValue>
|
||||
<routePolicyName>route</routePolicyName>
|
||||
</isPreference>
|
||||
</isPreferences>
|
||||
<isNextHopWeights>
|
||||
<isNextHopWeight>
|
||||
<ipAddress>1.1.1.1</ipAddress>
|
||||
<weight>100</weight>
|
||||
</isNextHopWeight>
|
||||
</isNextHopWeights>
|
||||
<isFilterImports>
|
||||
<isFilterImport>
|
||||
<aclNumOrName>3001</aclNumOrName>
|
||||
<ipPrefix>ip</ipPrefix>
|
||||
<routePolicyName>route</routePolicyName>
|
||||
<policyType>level_1</policyType>
|
||||
</isFilterImport>
|
||||
</isFilterImports>
|
||||
<isFilterExports>
|
||||
<isFilterExport>
|
||||
<protocol>ospf</protocol>
|
||||
<processId>100</processId>
|
||||
<policyType>level_1</policyType>
|
||||
</isFilterExport>
|
||||
</isFilterExports>
|
||||
<isDefaultRoutes>
|
||||
<isDefaultRoute>
|
||||
<defaultMode>always</defaultMode>
|
||||
<routePolicyName>mode</routePolicyName>
|
||||
<cost>100</cost>
|
||||
<tag>100</tag>
|
||||
<levelType>level_1</levelType>
|
||||
<avoidLearning>true</avoidLearning>
|
||||
</isDefaultRoute>
|
||||
</isDefaultRoutes>
|
||||
<isImportRoutes>
|
||||
<isImportRoute>
|
||||
<protocol>import</protocol>
|
||||
<processId>100</processId>
|
||||
<costType>level_1</costType>
|
||||
<cost>100</cost>
|
||||
<tag>100</tag>
|
||||
<policyType>level_1</policyType>
|
||||
<routePolicyName>import</routePolicyName>
|
||||
<levelType>level_1</levelType>
|
||||
<inheritCost>100</inheritCost>
|
||||
<permitIbgp>true</permitIbgp>
|
||||
</isImportRoute>
|
||||
</isImportRoutes>
|
||||
<isLeakRouteLevel1ToLevel2s>
|
||||
<isLeakRouteLevel1ToLevel2>
|
||||
<tag>100</tag>
|
||||
<routePolicyName>route</routePolicyName>
|
||||
<aclNumOrName>3001</aclNumOrName>
|
||||
<ipPrefix>ip</ipPrefix>
|
||||
<leakEnableFlag>true</leakEnableFlag>
|
||||
<allowFilter>true</allowFilter>
|
||||
</isLeakRouteLevel1ToLevel2>
|
||||
</isLeakRouteLevel1ToLevel2s>
|
||||
<isLeakRouteLevel2ToLevel1s>
|
||||
<isLeakRouteLevel2ToLevel1>
|
||||
<tag>100</tag>
|
||||
<routePolicyName>route</routePolicyName>
|
||||
<aclNumOrName>3001</aclNumOrName>
|
||||
<ipPrefix>ip</ipPrefix>
|
||||
<allowFilter>true</allowFilter>
|
||||
</isLeakRouteLevel2ToLevel1>
|
||||
</isLeakRouteLevel2ToLevel1s>
|
||||
</isSiteMT>
|
||||
</isSiteMTs>
|
||||
</isSite>
|
||||
</isSites>
|
||||
</isiscomm>
|
||||
</data>
|
|
@ -1,10 +0,0 @@
|
|||
<data>
|
||||
<isiscomm>
|
||||
<isSites>
|
||||
<isSite>
|
||||
<instanceId>100</instanceId>
|
||||
<vpnName>_public_</vpnName>
|
||||
</isSite>
|
||||
</isSites>
|
||||
</isiscomm>
|
||||
</data>
|
|
@ -1,26 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" message-id="1024">
|
||||
<data>
|
||||
<ifmtrunk xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
|
||||
<TrunkIfs>
|
||||
<TrunkIf>
|
||||
<ifName>Eth-Trunk10</ifName>
|
||||
<lacpTrunk>
|
||||
<isSupportPrmpt>false</isSupportPrmpt>
|
||||
<rcvTimeoutType>Fast</rcvTimeoutType>
|
||||
<fastTimeoutUserDefinedValue>3</fastTimeoutUserDefinedValue>
|
||||
<selectPortStd>Speed</selectPortStd>
|
||||
<promptDelay>30</promptDelay>
|
||||
<maxActiveNum>1</maxActiveNum>
|
||||
<collectMaxDelay>0</collectMaxDelay>
|
||||
<mixRateEnable>false</mixRateEnable>
|
||||
<dampStaFlapEn>false</dampStaFlapEn>
|
||||
<dampUnexpMacEn>false</dampUnexpMacEn>
|
||||
<trunkSysMac>11-22-33</trunkSysMac>
|
||||
<trunkPortIdExt>false</trunkPortIdExt>
|
||||
</lacpTrunk>
|
||||
</TrunkIf>
|
||||
</TrunkIfs>
|
||||
</ifmtrunk>
|
||||
</data>
|
||||
</rpc-reply>
|
|
@ -1,26 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" message-id="1024">
|
||||
<data>
|
||||
<ifmtrunk xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
|
||||
<TrunkIfs>
|
||||
<TrunkIf>
|
||||
<ifName>Eth-Trunk10</ifName>
|
||||
<lacpTrunk>
|
||||
<isSupportPrmpt>true</isSupportPrmpt>
|
||||
<rcvTimeoutType>Fast</rcvTimeoutType>
|
||||
<fastTimeoutUserDefinedValue>10</fastTimeoutUserDefinedValue>
|
||||
<selectPortStd>Speed</selectPortStd>
|
||||
<promptDelay>130</promptDelay>
|
||||
<maxActiveNum>13</maxActiveNum>
|
||||
<collectMaxDelay>12</collectMaxDelay>
|
||||
<mixRateEnable>true</mixRateEnable>
|
||||
<dampStaFlapEn>true</dampStaFlapEn>
|
||||
<dampUnexpMacEn>true</dampUnexpMacEn>
|
||||
<trunkSysMac>0000-1111-2222</trunkSysMac>
|
||||
<trunkPortIdExt>true</trunkPortIdExt>
|
||||
</lacpTrunk>
|
||||
</TrunkIf>
|
||||
</TrunkIfs>
|
||||
</ifmtrunk>
|
||||
</data>
|
||||
</rpc-reply>
|
|
@ -1,10 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" message-id="1024">
|
||||
<data>
|
||||
<ifmtrunk xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
|
||||
<lacpSysInfo>
|
||||
<priority>32768</priority>
|
||||
</lacpSysInfo>
|
||||
</ifmtrunk>
|
||||
</data>
|
||||
</rpc-reply>
|
|
@ -1,10 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" message-id="1024">
|
||||
<data>
|
||||
<ifmtrunk xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
|
||||
<lacpSysInfo>
|
||||
<priority>32769</priority>
|
||||
</lacpSysInfo>
|
||||
</ifmtrunk>
|
||||
</data>
|
||||
</rpc-reply>
|
|
@ -1,21 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" message-id="1024">
|
||||
<data>
|
||||
<lldp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
|
||||
<lldpSys>
|
||||
<lldpSysParameter>
|
||||
<messageTxInterval>30</messageTxInterval>
|
||||
<messageTxHoldMultiplier>4</messageTxHoldMultiplier>
|
||||
<reinitDelay>2</reinitDelay>
|
||||
<txDelay>2</txDelay>
|
||||
<notificationInterval>5</notificationInterval>
|
||||
<fastMessageCount>4</fastMessageCount>
|
||||
<mdnNotificationInterval>5</mdnNotificationInterval>
|
||||
<mdnNotificationEnable>disabled</mdnNotificationEnable>
|
||||
<configManAddr></configManAddr>
|
||||
<bindifName></bindifName>
|
||||
</lldpSysParameter>
|
||||
</lldpSys>
|
||||
</lldp>
|
||||
</data>
|
||||
</rpc-reply>
|
|
@ -1,21 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" message-id="1024">
|
||||
<data>
|
||||
<lldp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
|
||||
<lldpSys>
|
||||
<lldpSysParameter>
|
||||
<messageTxInterval>8</messageTxInterval>
|
||||
<messageTxHoldMultiplier>8</messageTxHoldMultiplier>
|
||||
<reinitDelay>8</reinitDelay>
|
||||
<txDelay>8</txDelay>
|
||||
<notificationInterval>8</notificationInterval>
|
||||
<fastMessageCount>8</fastMessageCount>
|
||||
<mdnNotificationInterval>8</mdnNotificationInterval>
|
||||
<mdnNotificationEnable>enabled</mdnNotificationEnable>
|
||||
<configManAddr>1.1.1.1</configManAddr>
|
||||
<bindifName>bind-name</bindifName>
|
||||
</lldpSysParameter>
|
||||
</lldpSys>
|
||||
</lldp>
|
||||
</data>
|
||||
</rpc-reply>
|
|
@ -1,11 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" message-id="1024">
|
||||
<data>
|
||||
<lldp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
|
||||
<lldpSys>
|
||||
<lldpEnable>disabled</lldpEnable>
|
||||
<mdnStatus>disabled</mdnStatus>
|
||||
</lldpSys>
|
||||
</lldp>
|
||||
</data>
|
||||
</rpc-reply>
|
|
@ -1,11 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rpc-reply xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" message-id="1024">
|
||||
<data>
|
||||
<lldp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
|
||||
<lldpSys>
|
||||
<lldpEnable>enabled</lldpEnable>
|
||||
<mdnStatus>rxOnly</mdnStatus>
|
||||
</lldpSys>
|
||||
</lldp>
|
||||
</data>
|
||||
</rpc-reply>
|
|
@ -1,3 +0,0 @@
|
|||
<rpc-reply message-id="801" xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" flow-id="98">
|
||||
<ok/>
|
||||
</rpc-reply>
|
|
@ -1,29 +0,0 @@
|
|||
<data>
|
||||
<lldp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
|
||||
<lldpInterfaces>
|
||||
<lldpInterface>
|
||||
<ifName>10GE1/0/1</ifName>
|
||||
<lldpAdminStatus>txAndRx</lldpAdminStatus>
|
||||
<msgInterval operation="merge">
|
||||
<txInterval>8</txInterval>
|
||||
</msgInterval>
|
||||
<tlvTxEnable>
|
||||
<manAddrTxEnable>true</manAddrTxEnable>
|
||||
<portDescTxEnable>true</portDescTxEnable>
|
||||
<sysCapTxEnable>true</sysCapTxEnable>
|
||||
<sysDescTxEnable>true</sysDescTxEnable>
|
||||
<sysNameTxEnable>true</sysNameTxEnable>
|
||||
<portVlanTxEnable>true</portVlanTxEnable>
|
||||
<protoVlanTxEnable>true</protoVlanTxEnable>
|
||||
<txProtocolVlanId>112</txProtocolVlanId>
|
||||
<vlanNameTxEnable>true</vlanNameTxEnable>
|
||||
<txVlanNameId>32</txVlanNameId>
|
||||
<linkAggreTxEnable>true</linkAggreTxEnable>
|
||||
<macPhyTxEnable>true</macPhyTxEnable>
|
||||
<maxFrameTxEnable>true</maxFrameTxEnable>
|
||||
<eee>true</eee>
|
||||
</tlvTxEnable>
|
||||
</lldpInterface>
|
||||
</lldpInterfaces>
|
||||
</lldp>
|
||||
</data>
|
|
@ -1,29 +0,0 @@
|
|||
<data>
|
||||
<lldp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
|
||||
<lldpInterfaces>
|
||||
<lldpInterface>
|
||||
<ifName>10GE1/0/1</ifName>
|
||||
<lldpAdminStatus>txOnly</lldpAdminStatus>
|
||||
<msgInterval operation="merge">
|
||||
<txInterval>1</txInterval>
|
||||
</msgInterval>
|
||||
<tlvTxEnable>
|
||||
<manAddrTxEnable>false</manAddrTxEnable>
|
||||
<portDescTxEnable>false</portDescTxEnable>
|
||||
<sysCapTxEnable>false</sysCapTxEnable>
|
||||
<sysDescTxEnable>false</sysDescTxEnable>
|
||||
<sysNameTxEnable>false</sysNameTxEnable>
|
||||
<portVlanTxEnable>false</portVlanTxEnable>
|
||||
<protoVlanTxEnable>false</protoVlanTxEnable>
|
||||
<txProtocolVlanId></txProtocolVlanId>
|
||||
<vlanNameTxEnable>false</vlanNameTxEnable>
|
||||
<txVlanNameId></txVlanNameId>
|
||||
<linkAggreTxEnable>false</linkAggreTxEnable>
|
||||
<macPhyTxEnable>false</macPhyTxEnable>
|
||||
<maxFrameTxEnable>false</maxFrameTxEnable>
|
||||
<eee></eee>
|
||||
</tlvTxEnable>
|
||||
</lldpInterface>
|
||||
</lldpInterfaces>
|
||||
</lldp>
|
||||
</data>
|
|
@ -1,3 +0,0 @@
|
|||
<rpc-reply message-id="801" xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" flow-id="98">
|
||||
<ok/>
|
||||
</rpc-reply>
|
|
@ -1,14 +0,0 @@
|
|||
<data>
|
||||
<lldp>
|
||||
<lldpSys>
|
||||
<lldpEnable>enabled</lldpEnable>
|
||||
<mdnStatus>enabled</mdnStatus>
|
||||
</lldpSys>
|
||||
<mdnInterfaces>
|
||||
<mdnInterface>
|
||||
<ifName>10GE1/0/1</ifName>
|
||||
<mdnStatus>rxOnly</mdnStatus>
|
||||
</mdnInterface>
|
||||
</mdnInterfaces>
|
||||
</lldp>
|
||||
</data>
|
|
@ -1,14 +0,0 @@
|
|||
<data>
|
||||
<lldp>
|
||||
<lldpSys>
|
||||
<lldpEnable>disabled</lldpEnable>
|
||||
<mdnStatus>disabled</mdnStatus>
|
||||
</lldpSys>
|
||||
<mdnInterfaces>
|
||||
<mdnInterface>
|
||||
<ifName>10GE1/0/1</ifName>
|
||||
<mdnStatus>disabled</mdnStatus>
|
||||
</mdnInterface>
|
||||
</mdnInterfaces>
|
||||
</lldp>
|
||||
</data>
|
|
@ -1,10 +0,0 @@
|
|||
<data>
|
||||
<mcastbase>
|
||||
<mcastAfsEnables>
|
||||
<mcastAfsEnable>
|
||||
<vrfName>vpna</vrfName>
|
||||
<addressFamily>ipv4unicast</addressFamily>
|
||||
</mcastAfsEnable>
|
||||
</mcastAfsEnables>
|
||||
</mcastbase>
|
||||
</data>
|
|
@ -1 +0,0 @@
|
|||
<data/>
|
|
@ -1,22 +0,0 @@
|
|||
<data>
|
||||
<l2mc >
|
||||
<l2McSnpgEnables>
|
||||
<l2McSnpgEnable>
|
||||
<addrFamily>ipv4unicast</addrFamily>
|
||||
<sendQueryEnable>false</sendQueryEnable>
|
||||
<sendQuerySrcIpAddr>192.168.0.1</sendQuerySrcIpAddr>
|
||||
</l2McSnpgEnable>
|
||||
</l2McSnpgEnables>
|
||||
<vlan>
|
||||
<l2McVlanCfgs>
|
||||
<l2McVlanCfg>
|
||||
<addrFamily>ipv4unicast</addrFamily>
|
||||
<vlanId>1</vlanId>
|
||||
<version>2</version>
|
||||
<snoopingEnable>true</snoopingEnable>
|
||||
<proxyEnable>true</proxyEnable>
|
||||
</l2McVlanCfg>
|
||||
</l2McVlanCfgs>
|
||||
</vlan>
|
||||
</l2mc>
|
||||
</data>
|
|
@ -1 +0,0 @@
|
|||
<data/>
|
|
@ -1,3 +0,0 @@
|
|||
<rpc-reply message-id="801" xmlns="urn:ietf:params:xml:ns:netconf:base:1.0" flow-id="98">
|
||||
<ok/>
|
||||
</rpc-reply>
|
|
@ -1,18 +0,0 @@
|
|||
<data>
|
||||
<staticrt xmlns="http://www.huawei.com/netconf/vrp" format-version="1.0" content-version="1.0">
|
||||
<staticrtbase>
|
||||
<srBfdParas>
|
||||
<srBfdPara>
|
||||
<afType>ipv4unicast</afType>
|
||||
<ifName>Ethernet3/0/0</ifName>
|
||||
<destVrfName>_public_</destVrfName>
|
||||
<nexthop>192.168.2.2</nexthop>
|
||||
<localAddress>192.168.2.1</localAddress>
|
||||
<minRxInterval>50</minRxInterval>
|
||||
<minTxInterval>50</minTxInterval>
|
||||
<multiplier>3</multiplier>
|
||||
</srBfdPara>
|
||||
</srBfdParas>
|
||||
</staticrtbase>
|
||||
</staticrt>
|
||||
</data>
|
|
@ -1,18 +0,0 @@
|
|||
<data>
|
||||
<staticrt xmlns="http://www.huawei.com/netconf/vrp" format-version="1.0" content-version="1.0">
|
||||
<staticrtbase>
|
||||
<srBfdParas>
|
||||
<srBfdPara>
|
||||
<afType>ipv4unicast</afType>
|
||||
<ifName>Ethernet3/0/0</ifName>
|
||||
<destVrfName>_public_</destVrfName>
|
||||
<nexthop>192.168.2.2</nexthop>
|
||||
<localAddress>192.168.2.1</localAddress>
|
||||
<minRxInterval>50</minRxInterval>
|
||||
<minTxInterval>50</minTxInterval>
|
||||
<multiplier>3</multiplier>
|
||||
</srBfdPara>
|
||||
</srBfdParas>
|
||||
</staticrtbase>
|
||||
</staticrt>
|
||||
</data>
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue