Fix netapp modules (#76)

* Replace missing netapp parts with own copy.

* Localize final fragment.

* Mark netapps docs fragment as deprecated.

* Drop dependency on netapp.ontap.

* Remove all netapp_e_* modules.

* Remove docs fragment.
This commit is contained in:
Felix Fontein 2020-03-31 09:41:29 +02:00 committed by GitHub
commit 6172e56b62
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
125 changed files with 829 additions and 20206 deletions

View file

@ -1,10 +0,0 @@
# This test is not enabled by default, but can be utilized by defining required variables in integration_config.yml
# Example integration_config.yml:
# ---
#netapp_e_api_host: 10.113.1.111:8443
#netapp_e_api_username: admin
#netapp_e_api_password: myPass
#netapp_e_ssid: 1
unsupported
netapp/eseries

View file

@ -1 +0,0 @@
- include_tasks: run.yml

View file

@ -1,39 +0,0 @@
# Test code for the netapp_e_iscsi_interface module
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
- name: NetApp Test ASUP module
fail:
msg: 'Please define netapp_e_api_username, netapp_e_api_password, netapp_e_api_host, and netapp_e_ssid.'
when: netapp_e_api_username is undefined or netapp_e_api_password is undefined
or netapp_e_api_host is undefined or netapp_e_ssid is undefined
vars:
defaults: &defaults
api_url: "https://{{ netapp_e_api_host }}/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: no
state: enabled
server: mail.example.com
sender: noreply@example.com
recipients:
- noreply@example.com
- name: set default vars
set_fact:
vars: *defaults
- name: Set the initial alerting settings
netapp_e_alerts:
<<: *defaults
register: result
- name: Validate the idempotency of the module
netapp_e_alerts:
<<: *defaults
register: result
- name: Ensure we still have the same settings, but had no change
assert:
that: not result.changed

View file

@ -1,10 +0,0 @@
# This test is not enabled by default, but can be utilized by defining required variables in integration_config.yml
# Example integration_config.yml:
# ---
#netapp_e_api_host: 10.113.1.111:8443
#netapp_e_api_username: admin
#netapp_e_api_password: myPass
#netapp_e_ssid: 1
unsupported
netapp/eseries

View file

@ -1 +0,0 @@
- include_tasks: run.yml

View file

@ -1,233 +0,0 @@
# Test code for the netapp_e_iscsi_interface module
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
- name: NetApp Test ASUP module
fail:
msg: 'Please define netapp_e_api_username, netapp_e_api_password, netapp_e_api_host, and netapp_e_ssid.'
when: netapp_e_api_username is undefined or netapp_e_api_password is undefined
or netapp_e_api_host is undefined or netapp_e_ssid is undefined
vars: &vars
credentials: &creds
api_url: "https://{{ netapp_e_api_host }}/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: no
- name: set credentials
set_fact:
credentials: *creds
- name: Show some debug information
debug:
msg: "Using user={{ credentials.api_username }} on server={{ credentials.api_url }}."
# ****************************************************
# *** Enable auto-support using all default values ***
# ****************************************************
- name: Enable auto-support using default values
netapp_e_asup:
<<: *creds
verbose: true
- name: Collect auto-support state information from the array
uri:
url: "{{ credentials.api_url }}/device-asup"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: current
- name: Validate auto-support expected default state
assert:
that: "{{ current.json.asupEnabled and
current.json.onDemandEnabled and
current.json.remoteDiagsEnabled and
current.json.schedule.dailyMinTime == 0 and
current.json.schedule.dailyMaxTime == 1439 }}"
msg: "Unexpected auto-support state"
- name: Validate auto-support schedule
assert:
that: "{{ item in current.json.schedule.daysOfWeek }}"
msg: "{{ item }} is missing from the schedule"
loop: "{{ lookup('list', ['monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'saturday', 'sunday']) }}"
# ****************************
# *** Disable auto-support ***
# ****************************
- name: Disable auto-support
netapp_e_asup:
<<: *creds
state: disabled
- name: Collect auto-support state information from the array
uri:
url: "{{ credentials.api_url }}/device-asup"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: current
- name: Validate auto-support is disabled
assert:
that: "{{ not current.json.asupEnabled }}"
msg: "Auto-support failed to be disabled"
# ****************************************************
# *** Enable auto-support using specific values ***
# ****************************************************
- name: Enable auto-support using specific values
netapp_e_asup:
<<: *creds
state: enabled
active: true
start: 22
end: 24
days:
- friday
- saturday
verbose: true
- name: Collect auto-support state information from the array
uri:
url: "{{ credentials.api_url }}/device-asup"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: current
- name: Validate auto-support expected state
assert:
that: "{{ current.json.asupEnabled and
current.json.onDemandEnabled and
current.json.remoteDiagsEnabled and
current.json.schedule.dailyMinTime == (22 * 60) and
current.json.schedule.dailyMaxTime == (24 * 60 - 1) }}"
msg: "Unexpected auto-support state"
- name: Validate auto-support schedule
assert:
that: "{{ item in current.json.schedule.daysOfWeek }}"
msg: "{{ item }} is missing from the schedule"
loop: "{{ lookup('list', ['friday', 'saturday']) }}"
# ***********************************
# *** Alter auto-support schedule ***
# ***********************************
- name: Auto auto-support schedule
netapp_e_asup:
<<: *creds
state: enabled
active: true
start: 0
end: 5
days:
- monday
- thursday
- sunday
verbose: true
- name: Collect auto-support state information from the array
uri:
url: "{{ credentials.api_url }}/device-asup"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: current
- name: Validate auto-support expected state
assert:
that: "{{ current.json.asupEnabled and
current.json.onDemandEnabled and
current.json.remoteDiagsEnabled and
current.json.schedule.dailyMinTime == (0 * 60) and
current.json.schedule.dailyMaxTime == (5 * 60) }}"
msg: "Unexpected auto-support state"
- name: Validate auto-support schedule
assert:
that: "{{ item in current.json.schedule.daysOfWeek }}"
msg: "{{ item }} is missing from the schedule"
loop: "{{ lookup('list', ['monday', 'thursday', 'sunday']) }}"
# *************************************************************
# *** Repeat previous test to verify state remains the same ***
# *************************************************************
- name: Repeat auto-support schedule change to verify idempotency
netapp_e_asup:
<<: *creds
state: enabled
active: true
start: 0
end: 5
days:
- monday
- thursday
- sunday
verbose: true
register: result
- name: Collect auto-support state information from the array
uri:
url: "{{ credentials.api_url }}/device-asup"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: current
- name: Validate auto-support expected state
assert:
that: "{{ current.json.asupEnabled and
current.json.onDemandEnabled and
current.json.remoteDiagsEnabled and
current.json.schedule.dailyMinTime == (0 * 60) and
current.json.schedule.dailyMaxTime == (5 * 60) }}"
msg: "Unexpected auto-support state"
- name: Validate auto-support schedule
assert:
that: "{{ item in current.json.schedule.daysOfWeek }}"
msg: "{{ item }} is missing from the schedule"
loop: "{{ lookup('list', ['monday', 'thursday', 'sunday']) }}"
- name: Validate change was not detected
assert:
that: "{{ not result.changed }}"
msg: "Invalid change was detected"
# ***********************************
# *** Disable auto-support active ***
# ***********************************
- name: Auto auto-support schedule
netapp_e_asup:
<<: *creds
state: enabled
active: false
start: 0
end: 5
days:
- monday
- thursday
- sunday
verbose: true
- name: Collect auto-support state information from the array
uri:
url: "{{ credentials.api_url }}/device-asup"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: current
- name: Validate auto-support expected state
assert:
that: "{{ current.json.asupEnabled and not current.json.onDemandEnabled and not current.json.remoteDiagsEnabled }}"
msg: "Unexpected auto-support state"

View file

@ -1,10 +0,0 @@
# This test is not enabled by default, but can be utilized by defining required variables in integration_config.yml
# Example integration_config.yml:
# ---
#netapp_e_api_host: 192.168.1.1000
#netapp_e_api_username: admin
#netapp_e_api_password: myPass
#netapp_e_ssid: 1
unsupported
netapp/eseries

View file

@ -1 +0,0 @@
- include_tasks: run.yml

View file

@ -1,209 +0,0 @@
# Test code for the netapp_e_iscsi_interface module
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Existing symbol issue: occasionally symbol will return 422 which causes Ansible to fail; however the drive firmware download will complete.
# Work-around: Remove all storage provisioning before commencing test.
- name: NetApp Test ASUP module
fail:
msg: 'Please define netapp_e_api_username, netapp_e_api_password, netapp_e_api_host, and netapp_e_ssid.'
when: netapp_e_api_username is undefined or netapp_e_api_password is undefined
or netapp_e_api_host is undefined or netapp_e_ssid is undefined
- set_fact:
firmware:
downgrade:
list:
- "/home/swartzn/Downloads/drive firmware/D_PX04SVQ160_DOWNGRADE_MS00toMSB6_801.dlp"
- "/home/swartzn/Downloads/drive firmware/D_ST1200MM0017_DNGRADE_MS02toMS00_6600_802.dlp"
check:
- firmware: "D_PX04SVQ160_DOWNGRADE_MS00toMSB6_801.dlp"
drive: "PX04SVQ160"
version: "MSB6"
- firmware: "D_ST1200MM0017_DNGRADE_MS02toMS00_6600_802.dlp"
drive: "ST1200MM0017"
version: "MS00"
upgrade:
list:
- "/home/swartzn/Downloads/drive firmware/D_PX04SVQ160_30603183_MS00_6600_001.dlp"
- "/home/swartzn/Downloads/drive firmware/D_ST1200MM0017_30602214_MS02_5600_002.dlp"
check:
- firmware: "D_PX04SVQ160_30603183_MS00_6600_001.dlp"
drive: "PX04SVQ160"
version: "MS00"
- firmware: "D_ST1200MM0017_30602214_MS02_5600_002.dlp"
drive: "ST1200MM0017"
version: "MS02"
- name: Set drive firmware (baseline, maybe change)
netapp_e_drive_firmware:
api_url: "https://{{ netapp_e_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: false
firmware: "{{ firmware['downgrade']['list'] }}"
wait_for_completion: true
ignore_inaccessible_drives: true
upgrade_drives_online: false
register: drive_firmware
- pause:
seconds: 60
- name: Retrieve current firmware version
uri:
url: "https://{{ netapp_e_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_ssid }}/drives"
user: "{{ netapp_e_api_username }}"
password: "{{ netapp_e_api_password }}"
validate_certs: no
register: current_drive_firmware
- name: Check if drive firmware is the expected versions
assert:
that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
(firmware['downgrade']['check'][0]['drive'] == item['productID'].strip() and
firmware['downgrade']['check'][0]['version'] == item['softwareVersion']) or
(firmware['downgrade']['check'][1]['drive'] == item['productID'].strip() and
firmware['downgrade']['check'][1]['version'] == item['softwareVersion']) }}"
msg: "Drive firmware failed to update all drives"
loop: "{{ lookup('list', current_drive_firmware['json']) }}"
- name: Set drive firmware (upgrade, change-checkmode)
netapp_e_drive_firmware:
api_url: "https://{{ netapp_e_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: false
firmware: "{{ firmware['upgrade']['list'] }}"
wait_for_completion: true
ignore_inaccessible_drives: true
upgrade_drives_online: false
register: drive_firmware
check_mode: true
- pause:
seconds: 60
- name: Retrieve current firmware version
uri:
url: "https://{{ netapp_e_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_ssid }}/drives"
user: "{{ netapp_e_api_username }}"
password: "{{ netapp_e_api_password }}"
validate_certs: no
register: current_drive_firmware
- name: Validate change status
assert:
that: "{{ drive_firmware.changed }}"
msg: "Change status is incorrect."
- name: Check if drive firmware is the expected versions
assert:
that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
(firmware['downgrade']['check'][0]['drive'] == item['productID'].strip() and
firmware['downgrade']['check'][0]['version'] == item['softwareVersion']) or
(firmware['downgrade']['check'][1]['drive'] == item['productID'].strip() and
firmware['downgrade']['check'][1]['version'] == item['softwareVersion']) }}"
msg: "Drive firmware failed to update all drives"
loop: "{{ lookup('list', current_drive_firmware['json']) }}"
- name: Set drive firmware (upgrade, change)
netapp_e_drive_firmware:
api_url: "https://{{ netapp_e_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: false
firmware: "{{ firmware['upgrade']['list'] }}"
wait_for_completion: true
ignore_inaccessible_drives: true
upgrade_drives_online: false
register: drive_firmware
- pause:
seconds: 60
- name: Retrieve current firmware version
uri:
url: "https://{{ netapp_e_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_ssid }}/drives"
user: "{{ netapp_e_api_username }}"
password: "{{ netapp_e_api_password }}"
validate_certs: no
register: current_drive_firmware
- name: Validate change status
assert:
that: "{{ drive_firmware.changed }}"
msg: "Change status is incorrect."
- name: Check if drive firmware is the expected versions
assert:
that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
(firmware['upgrade']['check'][0]['drive'] == item['productID'].strip() and
firmware['upgrade']['check'][0]['version'] == item['softwareVersion']) or
(firmware['upgrade']['check'][1]['drive'] == item['productID'].strip() and
firmware['upgrade']['check'][1]['version'] == item['softwareVersion']) }}"
msg: "Drive firmware failed to update all drives"
loop: "{{ lookup('list', current_drive_firmware['json']) }}"
- name: Set drive firmware (upgrade, no change)
netapp_e_drive_firmware:
api_url: "https://{{ netapp_e_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: false
firmware: "{{ firmware['upgrade']['list'] }}"
wait_for_completion: true
ignore_inaccessible_drives: true
upgrade_drives_online: false
register: drive_firmware
- pause:
seconds: 60
- name: Retrieve current firmware version
uri:
url: "https://{{ netapp_e_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_ssid }}/drives"
user: "{{ netapp_e_api_username }}"
password: "{{ netapp_e_api_password }}"
validate_certs: no
register: current_drive_firmware
- name: Validate change status
assert:
that: "{{ not drive_firmware.changed }}"
msg: "Change status is incorrect."
- name: Check if drive firmware is the expected versions
assert:
that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
(firmware['upgrade']['check'][0]['drive'] == item['productID'].strip() and
firmware['upgrade']['check'][0]['version'] == item['softwareVersion']) or
(firmware['upgrade']['check'][1]['drive'] == item['productID'].strip() and
firmware['upgrade']['check'][1]['version'] == item['softwareVersion']) }}"
msg: "Drive firmware failed to update all drives"
loop: "{{ lookup('list', current_drive_firmware['json']) }}"
- name: Set drive firmware (downgrade, change)
netapp_e_drive_firmware:
api_url: "https://{{ netapp_e_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: false
firmware: "{{ firmware['downgrade']['list'] }}"
wait_for_completion: true
ignore_inaccessible_drives: true
upgrade_drives_online: false
register: drive_firmware
- pause:
seconds: 60
- name: Retrieve current firmware version
uri:
url: "https://{{ netapp_e_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_ssid }}/drives"
user: "{{ netapp_e_api_username }}"
password: "{{ netapp_e_api_password }}"
validate_certs: no
register: current_drive_firmware
- name: Validate change status
assert:
that: "{{ drive_firmware.changed }}"
msg: "Change status is incorrect."
- name: Check if drive firmware is the expected versions
assert:
that: "{{ (item['productID'].strip() not in [firmware['downgrade']['check'][0]['drive'], firmware['downgrade']['check'][1]['drive']]) or
(firmware['downgrade']['check'][0]['drive'] == item['productID'].strip() and
firmware['downgrade']['check'][0]['version'] == item['softwareVersion']) or
(firmware['downgrade']['check'][1]['drive'] == item['productID'].strip() and
firmware['downgrade']['check'][1]['version'] == item['softwareVersion']) }}"
msg: "Drive firmware failed to update all drives"
loop: "{{ lookup('list', current_drive_firmware['json']) }}"

View file

@ -1,15 +0,0 @@
# This test is not enabled by default, but can be utilized by defining required variables in integration_config.yml
# Example integration_config.yml:
# ---
netapp_e_embedded_api_host: 192.168.1.1
netapp_e_embedded_api_username: admin
netapp_e_embedded_api_password: adminPass
netapp_e_embedded_ssid: 1
netapp_e_proxy_api_host: 192.168.1.100
netapp_e_proxy_api_username: admin
netapp_e_proxy_api_password: adminPass
netapp_e_proxy_ssid: 10
unsupported
netapp/eseries

View file

@ -1 +0,0 @@
- include_tasks: run.yml

View file

@ -1,348 +0,0 @@
# Test code for the netapp_e_firmware module
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# TODO: MUST BE DOWNGRADE BEFORE EXECUTING INTEGRATION TO RCB_11.40.3R2_280x_5c7d81b3.dlp and N280X-842834-D02.dlp
# loadControllerFirmware_MT swartzn@10.113.1.250 /home/swartzn/Downloads/RCB_11.40.3R2_280x_5c7d81b3.dlp /home/swartzn/Downloads/N280X-842834-D02.dlp
# This integration test will validate upgrade functionality for firmware-only, firmware-and-nvsram, and check mode.
- name: NetApp Test ASUP module
fail:
msg: "Please define netapp_e_embedded_api_host, netapp_e_embedded_api_username, netapp_e_embedded_api_password, netapp_e_embedded_ssid,
netapp_e_proxy_api_host, netapp_e_proxy_api_username, netapp_e_proxy_api_password, and netapp_e_proxy_ssid."
when: "netapp_e_embedded_api_host is undefined or netapp_e_embedded_api_username is undefined or netapp_e_embedded_api_password is undefined or
netapp_e_embedded_ssid is undefined or netapp_e_proxy_api_host is undefined or netapp_e_proxy_api_username is undefined or
netapp_e_proxy_api_password is undefined or netapp_e_proxy_ssid is undefined"
- set_fact:
path: "/home/swartzn/Downloads/"
upgrades:
- firmware: "RCB_11.40.3R2_280x_5c7d81b3.dlp"
nvsram: "N280X-842834-D02.dlp"
expected_firmware_version: "08.42.30.05"
expected_nvsram_version: "N280X-842834-D02"
- firmware: "RCB_11.40.5_280x_5ceef00e.dlp"
nvsram: "N280X-842834-D02.dlp"
expected_firmware_version: "08.42.50.00"
expected_nvsram_version: "N280X-842834-D02"
- firmware: "RCB_11.50.2_280x_5ce8501f.dlp"
nvsram: "N280X-852834-D02.dlp"
expected_firmware_version: "08.52.00.00"
expected_nvsram_version: "N280X-852834-D02"
- name: Perform firmware upgrade using the Web Services REST API (checkmode-no change, firmware only)
netapp_e_firmware:
ssid: "{{ netapp_e_embedded_ssid }}"
api_url: "https://{{ netapp_e_embedded_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_embedded_api_username }}"
api_password: "{{ netapp_e_embedded_api_password }}"
validate_certs: no
nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
wait_for_completion: true
ignore_health_check: true
check_mode: true
register: netapp_e_firmware
- name: Retrieve current firmware version
uri:
url: "https://{{ netapp_e_embedded_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_embedded_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
user: "{{ netapp_e_embedded_api_username }}"
password: "{{ netapp_e_embedded_api_password }}"
validate_certs: no
register: current_firmware
- name: Retrieve current nvsram version
uri:
url: "https://{{ netapp_e_embedded_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_embedded_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
user: "{{ netapp_e_embedded_api_username }}"
password: "{{ netapp_e_embedded_api_password }}"
validate_certs: no
register: current_nvsram
- name: Verify change status
assert:
that: "{{ netapp_e_firmware.changed == False }}"
msg: "Failed to return unchanged."
- name: Verify current firmware version
assert:
that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
msg: "Unexpected firmware version."
- name: Verify current nvsram version
assert:
that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
msg: "Unexpected nvsram version."
- name: Perform firmware upgrade using the Web Services REST API (no change, firmware only)
netapp_e_firmware:
ssid: "{{ netapp_e_embedded_ssid }}"
api_url: "https://{{ netapp_e_embedded_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_embedded_api_username }}"
api_password: "{{ netapp_e_embedded_api_password }}"
validate_certs: no
nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
wait_for_completion: true
ignore_health_check: true
register: netapp_e_firmware
- name: Retrieve current firmware version
uri:
url: "https://{{ netapp_e_embedded_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_embedded_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
user: "{{ netapp_e_embedded_api_username }}"
password: "{{ netapp_e_embedded_api_password }}"
validate_certs: no
register: current_firmware
- name: Retrieve current nvsram version
uri:
url: "https://{{ netapp_e_embedded_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_embedded_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
user: "{{ netapp_e_embedded_api_username }}"
password: "{{ netapp_e_embedded_api_password }}"
validate_certs: no
register: current_nvsram
- name: Verify change status
assert:
that: "{{ netapp_e_firmware.changed == False }}"
msg: "Failed to return changed."
- name: Verify current firmware version
assert:
that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
msg: "Unexpected firmware version."
- name: Verify current nvsram version
assert:
that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
msg: "Unexpected nvsram version."
- name: Perform firmware upgrade using the Web Services REST API (checkmode-change, firmware)
netapp_e_firmware:
ssid: "{{ netapp_e_embedded_ssid }}"
api_url: "https://{{ netapp_e_embedded_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_embedded_api_username }}"
api_password: "{{ netapp_e_embedded_api_password }}"
validate_certs: no
nvsram: "{{ path }}{{ upgrades[1]['nvsram'] }}"
firmware: "{{ path }}{{ upgrades[1]['firmware'] }}"
wait_for_completion: true
ignore_health_check: true
register: netapp_e_firmware
check_mode: true
- name: Retrieve current firmware version
uri:
url: "https://{{ netapp_e_embedded_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_embedded_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
user: "{{ netapp_e_embedded_api_username }}"
password: "{{ netapp_e_embedded_api_password }}"
validate_certs: no
register: current_firmware
- name: Retrieve current nvsram version
uri:
url: "https://{{ netapp_e_embedded_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_embedded_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
user: "{{ netapp_e_embedded_api_username }}"
password: "{{ netapp_e_embedded_api_password }}"
validate_certs: no
register: current_nvsram
- name: Verify change status
assert:
that: "{{ netapp_e_firmware.changed == True }}"
msg: "Failed to return changed."
- name: Verify current firmware version
assert:
that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
msg: "Unexpected firmware version."
- name: Verify current nvsram version
assert:
that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
msg: "Unexpected nvsram version."
- name: Perform firmware upgrade using the Web Services REST API (change, firmware)
netapp_e_firmware:
ssid: "{{ netapp_e_embedded_ssid }}"
api_url: "https://{{ netapp_e_embedded_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_embedded_api_username }}"
api_password: "{{ netapp_e_embedded_api_password }}"
validate_certs: no
nvsram: "{{ path }}{{ upgrades[1]['nvsram'] }}"
firmware: "{{ path }}{{ upgrades[1]['firmware'] }}"
wait_for_completion: true
ignore_health_check: true
register: netapp_e_firmware
- name: Retrieve current firmware version
uri:
url: "https://{{ netapp_e_embedded_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_embedded_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
user: "{{ netapp_e_embedded_api_username }}"
password: "{{ netapp_e_embedded_api_password }}"
validate_certs: no
register: current_firmware
- name: Retrieve current nvsram version
uri:
url: "https://{{ netapp_e_embedded_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_embedded_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
user: "{{ netapp_e_embedded_api_username }}"
password: "{{ netapp_e_embedded_api_password }}"
validate_certs: no
register: current_nvsram
- name: Verify change status
assert:
that: "{{ netapp_e_firmware.changed == True }}"
msg: "Failed to return changed."
- name: Verify current firmware version
assert:
that: "{{ current_firmware['json'][0] == upgrades[1]['expected_firmware_version'] }}"
msg: "Unexpected firmware version. {{ current_firmware['json'][0] }} != {{ upgrades[1]['expected_firmware_version'] }}"
- name: Verify current nvsram version
assert:
that: "{{ current_nvsram['json'][0] == upgrades[1]['expected_nvsram_version'] }}"
msg: "Unexpected nvsram version. {{ current_nvsram['json'][0] }} != {{ upgrades[1]['expected_nvsram_version'] }}"
- name: Perform firmware upgrade using the Web Services REST API (changed, firmware)
netapp_e_firmware:
ssid: "{{ netapp_e_proxy_ssid }}"
api_url: "https://{{ netapp_e_proxy_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_proxy_api_username }}"
api_password: "{{ netapp_e_proxy_api_password }}"
validate_certs: no
nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
wait_for_completion: true
ignore_health_check: true
register: netapp_e_firmware
- name: Retrieve current firmware version
uri:
url: "https://{{ netapp_e_proxy_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_proxy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
user: "{{ netapp_e_proxy_api_username }}"
password: "{{ netapp_e_proxy_api_password }}"
validate_certs: no
register: current_firmware
- name: Retrieve current nvsram version
uri:
url: "https://{{ netapp_e_proxy_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_proxy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
user: "{{ netapp_e_proxy_api_username }}"
password: "{{ netapp_e_proxy_api_password }}"
validate_certs: no
register: current_nvsram
- name: Verify change status
assert:
that: "{{ netapp_e_firmware.changed == True }}"
msg: "Failed to return changed."
- name: Verify current firmware version
assert:
that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
msg: "Failed to change the firmware version."
- name: Verify current nvsram version
assert:
that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
msg: "Failed to change the nvsram version."
- name: Perform firmware upgrade using the Web Services REST API (checkmode-unchanged, firmware)
netapp_e_firmware:
ssid: "{{ netapp_e_proxy_ssid }}"
api_url: "https://{{ netapp_e_proxy_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_proxy_api_username }}"
api_password: "{{ netapp_e_proxy_api_password }}"
validate_certs: no
nvsram: "{{ path }}{{ upgrades[0]['nvsram'] }}"
firmware: "{{ path }}{{ upgrades[0]['firmware'] }}"
wait_for_completion: true
ignore_health_check: true
check_mode: true
register: netapp_e_firmware
- name: Retrieve current firmware version
uri:
url: "https://{{ netapp_e_proxy_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_proxy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
user: "{{ netapp_e_proxy_api_username }}"
password: "{{ netapp_e_proxy_api_password }}"
validate_certs: no
register: current_firmware
- name: Retrieve current nvsram version
uri:
url: "https://{{ netapp_e_proxy_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_proxy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
user: "{{ netapp_e_proxy_api_username }}"
password: "{{ netapp_e_proxy_api_password }}"
validate_certs: no
register: current_nvsram
- name: Verify change status
assert:
that: "{{ netapp_e_firmware.changed == False }}"
msg: "Failed to return unchanged."
- name: Verify current firmware version
assert:
that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
msg: "Failed to change the firmware version."
- name: Verify current nvsram version
assert:
that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
msg: "Failed to change the nvsram version."
- name: Perform firmware upgrade using the Web Services REST API (checkmode-change, firmware and nvsram)
netapp_e_firmware:
ssid: "{{ netapp_e_proxy_ssid }}"
api_url: "https://{{ netapp_e_proxy_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_proxy_api_username }}"
api_password: "{{ netapp_e_proxy_api_password }}"
validate_certs: no
nvsram: "{{ path }}{{ upgrades[2]['nvsram'] }}"
firmware: "{{ path }}{{ upgrades[2]['firmware'] }}"
wait_for_completion: true
ignore_health_check: true
check_mode: true
register: netapp_e_firmware
- name: Retrieve current firmware version
uri:
url: "https://{{ netapp_e_proxy_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_proxy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
user: "{{ netapp_e_proxy_api_username }}"
password: "{{ netapp_e_proxy_api_password }}"
validate_certs: no
register: current_firmware
- name: Retrieve current nvsram version
uri:
url: "https://{{ netapp_e_proxy_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_proxy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
user: "{{ netapp_e_proxy_api_username }}"
password: "{{ netapp_e_proxy_api_password }}"
validate_certs: no
register: current_nvsram
- name: Verify change status
assert:
that: "{{ netapp_e_firmware.changed == True }}"
msg: "Failed to return changed."
- name: Verify current firmware version
assert:
that: "{{ current_firmware['json'][0] == upgrades[0]['expected_firmware_version'] }}"
msg: "Failed to change the firmware version."
- name: Verify current nvsram version
assert:
that: "{{ current_nvsram['json'][0] == upgrades[0]['expected_nvsram_version'] }}"
msg: "Failed to change the nvsram version."
- name: Perform firmware upgrade using the Web Services REST API (changed, firmware and nvsram)
netapp_e_firmware:
ssid: "{{ netapp_e_proxy_ssid }}"
api_url: "https://{{ netapp_e_proxy_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_proxy_api_username }}"
api_password: "{{ netapp_e_proxy_api_password }}"
validate_certs: no
nvsram: "{{ path }}{{ upgrades[2]['nvsram'] }}"
firmware: "{{ path }}{{ upgrades[2]['firmware'] }}"
wait_for_completion: true
ignore_health_check: true
register: netapp_e_firmware
- name: Retrieve current firmware version
uri:
url: "https://{{ netapp_e_proxy_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_proxy_ssid }}/graph/xpath-filter?query=/sa/saData/fwVersion"
user: "{{ netapp_e_proxy_api_username }}"
password: "{{ netapp_e_proxy_api_password }}"
validate_certs: no
register: current_firmware
- name: Retrieve current nvsram version
uri:
url: "https://{{ netapp_e_proxy_api_host }}:8443/devmgr/v2/storage-systems/{{ netapp_e_proxy_ssid }}/graph/xpath-filter?query=/sa/saData/nvsramVersion"
user: "{{ netapp_e_proxy_api_username }}"
password: "{{ netapp_e_proxy_api_password }}"
validate_certs: no
register: current_nvsram
- name: Verify change status
assert:
that: "{{ netapp_e_firmware.changed == True }}"
msg: "Failed to return changed."
- name: Verify current firmware version
assert:
that: "{{ current_firmware['json'][0] == upgrades[2]['expected_firmware_version'] }}"
msg: "Failed to change the firmware version."
- name: Verify current nvsram version
assert:
that: "{{ current_nvsram['json'][0] == upgrades[2]['expected_nvsram_version'] }}"
msg: "Failed to change the nvsram version."

View file

@ -1,10 +0,0 @@
# This test is not enabled by default, but can be utilized by defining required variables in integration_config.yml
# Example integration_config.yml:
# ---
#netapp_e_api_host: 10.113.1.111:8443
#netapp_e_api_username: admin
#netapp_e_api_password: myPass
#netapp_e_ssid: 1
unsupported
netapp/eseries

View file

@ -1 +0,0 @@
- include_tasks: run.yml

View file

@ -1,51 +0,0 @@
# Test code for the netapp_e_iscsi_interface module
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
- name: NetApp Test Global Settings module
fail:
msg: 'Please define netapp_e_api_username, netapp_e_api_password, netapp_e_api_host, and netapp_e_ssid.'
when: netapp_e_api_username is undefined or netapp_e_api_password is undefined
or netapp_e_api_host is undefined or netapp_e_ssid is undefined
vars: &vars
credentials: &creds
api_url: "https://{{ netapp_e_api_host }}/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: no
name: TestArray
- name: set credentials
set_fact:
credentials: *creds
- name: Show some debug information
debug:
msg: "Using user={{ credentials.api_username }} on server={{ credentials.api_url }}."
- name: Set the name to the default
netapp_e_global:
<<: *creds
- name: Set a few different names
netapp_e_global:
<<: *creds
name: "{{ item }}"
loop:
- a
- x
- "000001111122222333334444455555"
- name: Set an explicit name
netapp_e_global:
<<: *creds
name: abc
register: result
- name: Validate name
assert:
that: result.name == "abc"
- name: Restore the original name
netapp_e_global:
<<: *creds

View file

@ -1,10 +0,0 @@
# This test is not enabled by default, but can be utilized by defining required variables in integration_config.yml
# Example integration_config.yml:
# ---
#netapp_e_api_host: 192.168.1.1
#netapp_e_api_username: admin
#netapp_e_api_password: myPass
#netapp_e_ssid: 1
unsupported
netapp/eseries

View file

@ -1 +0,0 @@
- include_tasks: run.yml

View file

@ -1,276 +0,0 @@
---
# Test code for the netapp_e_host module
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
- name: NetApp Test Host module
fail:
msg: 'Please define netapp_e_api_username, netapp_e_api_password, netapp_e_api_host, and netapp_e_ssid.'
when: netapp_e_api_username is undefined or netapp_e_api_password is undefined or
netapp_e_api_host is undefined or netapp_e_ssid is undefined
vars:
gather_facts: yes
credentials: &creds
api_url: "https://{{ netapp_e_api_host }}/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: no
hosts: &hosts
1:
host_type: 27
update_host_type: 28
ports:
- type: 'iscsi'
label: 'I1_1'
port: 'iqn.1996-04.de.suse:01:56f86f9bd1fe-PORT1'
- type: 'iscsi'
label: 'I1_2'
port: 'iqn.1996-04.de.suse:01:56f86f9bd1ff-port1'
ports2:
- type: 'iscsi'
label: 'I1_1'
port: 'iqn.1996-04.de.suse:01:56f86f9bd1fe-port2'
- type: 'iscsi'
label: 'I1_2'
port: 'iqn.1996-04.de.suse:01:56f86f9bd1ff-port2'
- type: 'iscsi'
label: 'I1_3'
port: 'iqn.1996-04.redhat:01:56f86f9bd1fe-PORT1'
2:
host_type: 27
update_host_type: 28
ports:
- type: 'iscsi'
label: 'I2_1'
port: 'iqn.1996-04.redhat:01:56f86f9bd1fe-port1'
- type: 'iscsi'
label: 'I2_2'
port: 'iqn.1996-04.redhat:01:56f86f9bd1ff-port1'
ports2:
- type: 'iscsi'
label: 'I2_1'
port: 'iqn.1996-04.redhat:01:56f86f9bd1fe-port2'
- type: 'iscsi'
label: 'I2_2'
port: 'iqn.1996-04.redhat:01:56f86f9bd1ff-PORT2'
# ********************************************
# *** Ensure jmespath package is installed ***
# ********************************************
# NOTE: jmespath must be installed for the json_query filter
- name: Ensure that jmespath is installed
pip:
name: jmespath
state: present
register: jmespath
- fail:
msg: "Restart playbook, the jmespath package was installed and is need for the playbook's execution."
when: jmespath.changed
# *****************************************
# *** Set credential and host variables ***
# *****************************************
- name: Set hosts variable
set_fact:
hosts: *hosts
- name: set credentials
set_fact:
credentials: *creds
- name: Show some debug information
debug:
msg: "Using user={{ credentials.api_username }} on server={{ credentials.api_url }}."
# *** Remove any existing hosts to set initial state and verify state ***
- name: Remove any existing hosts
netapp_e_host:
<<: *creds
state: absent
name: "{{ item.key }}"
with_dict: *hosts
# Retrieve array host definitions
- name: HTTP request for all host definitions from array
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/hosts"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: result
# Verify that host 1 and 2 host objects do not exist
- name: Collect host side port labels
set_fact:
host_labels: "{{ result | json_query('json[*].label') }}"
- name: Assert hosts were removed
assert:
that: "'{{ item.key }}' not in host_labels"
msg: "Host, {{ item.key }}, failed to be removed from the hosts!"
loop: "{{ lookup('dict', hosts) }}"
# *****************************************************************
# *** Create host definitions and validate host object creation ***
# *****************************************************************
- name: Define hosts
netapp_e_host:
<<: *creds
state: present
host_type: "{{ item.value.host_type }}"
ports: "{{ item.value.ports }}"
name: "{{ item.key }}"
with_dict: *hosts
# Retrieve array host definitions
- name: https request to validate host definitions were created
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/hosts"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: result
# Verify hosts were indeed created
- name: Collect host label list
set_fact:
hosts_labels: "{{ result | json_query('json[*].label') }}"
- name: Validate hosts were in fact created
assert:
that: "'{{ item.key }}' in hosts_labels"
msg: "host, {{ item.key }}, not define on array!"
loop: "{{ lookup('dict', hosts) }}"
# *** Update with no state changes results in no changes ***
- name: Redefine hosts, expecting no changes
netapp_e_host:
<<: *creds
state: present
host_type: "{{ item.value.host_type }}"
ports: "{{ item.value.ports }}"
name: "{{ item.key }}"
with_dict: *hosts
register: result
# Verify that no changes occurred
- name: Ensure no change occurred
assert:
msg: "A change was not detected!"
that: "not result.changed"
# ***********************************************************************************
# *** Redefine hosts using ports2 host definitions and validate the updated state ***
# ***********************************************************************************
- name: Redefine hosts, expecting changes
netapp_e_host:
<<: *creds
state: present
host_type: "{{ item.value.host_type }}"
ports: "{{ item.value.ports2 }}"
name: "{{ item.key }}"
force_port: yes
with_dict: *hosts
register: result
# Request from the array all host definitions
- name: HTTP request for port information
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/hosts"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: result
# Compile a list of array host port information for verifying changes
- name: Compile array host port information list
set_fact:
tmp: []
# Append each loop to the previous extraction. Each loop consists of host definitions and the filters will perform
# the following: grab host side port lists; combine to each list a dictionary containing the host name(label);
# lastly, convert the zip_longest object into a list
- set_fact:
tmp: "{{ tmp }} + {{ [item | json_query('hostSidePorts[*]')] |
zip_longest([], fillvalue={'host_name': item.label}) | list }}"
loop: "{{ result.json }}"
# Make new list, port_info, by combining each list entry's dictionaries into a single dictionary
- name: Create port information list
set_fact:
port_info: []
- set_fact:
port_info: "{{ port_info + [item[0] |combine(item[1])] }}"
loop: "{{ tmp }}"
# Compile list of expected host port information for verifying changes
- name: Create expected port information list
set_fact:
tmp: []
# Append each loop to the previous extraction. Each loop consists of host definitions and the filters will perform
# the following: grab host side port lists; combine to each list a dictionary containing the host name(label);
# lastly, convert the zip_longest object into a list
- set_fact:
tmp: "{{ tmp }} + {{ [item | json_query('value.ports2[*]')]|
zip_longest([], fillvalue={'host_name': item.key|string}) | list }}"
loop: "{{ lookup('dict', hosts) }}"
# Make new list, expected_port_info, by combining each list entry's dictionaries into a single dictionary
- name: Create expected port information list
set_fact:
expected_port_info: []
- set_fact:
expected_port_info: "{{ expected_port_info + [ item[0] |combine(item[1]) ] }}"
loop: "{{ tmp }}"
# Verify that each host object has the expected protocol type and address/port
- name: Assert hosts information was updated with new port information
assert:
that: "{{ item[0].host_name != item[1].host_name or
item[0].label != item[1].label or
(item[0].type == item[1].type and
(item[0].address|regex_replace(':','')) == (item[1].port|regex_replace(':',''))) }}"
msg: "port failed to be updated!"
loop: "{{ query('nested', port_info, expected_port_info) }}"
# ****************************************************
# *** Remove any existing hosts and verify changes ***
# ****************************************************
- name: Remove any existing hosts
netapp_e_host:
<<: *creds
state: absent
name: "{{ item.key }}"
with_dict: *hosts
# Request all host object definitions
- name: HTTP request for all host definitions from array
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/hosts"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: results
# Collect port label information
- name: Collect host side port labels
set_fact:
host_side_port_labels: "{{ results | json_query('json[*].hostSidePorts[*].label') }}"
- name: Collect removed port labels
set_fact:
removed_host_side_port_labels: "{{ hosts | json_query('*.ports[*].label') }}"
# Verify host 1 and 2 objects were removed
- name: Assert hosts were removed
assert:
that: item not in host_side_port_labels
msg: "Host {{ item }} failed to be removed from the hosts!"
loop: "{{ removed_host_side_port_labels }}"

View file

@ -1,10 +0,0 @@
# This test is not enabled by default, but can be utilized by defining required variables in integration_config.yml
# Example integration_config.yml:
# ---
#netapp_e_api_host: 10.113.1.111:8443
#netapp_e_api_username: admin
#netapp_e_api_password: myPass
#netapp_e_ssid: 1
unsupported
netapp/eseries

View file

@ -1 +0,0 @@
- include_tasks: run.yml

View file

@ -1,448 +0,0 @@
---
# Test code for the netapp_e_iscsi_interface module
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ***********************
# *** Local test data ***
# ***********************
- name: NetApp Test iSCSI Interface module
fail:
msg: 'Please define netapp_e_api_username, netapp_e_api_password, netapp_e_api_host, and netapp_e_ssid.'
when: netapp_e_api_username is undefined or netapp_e_api_password is undefined
or netapp_e_api_host is undefined or netapp_e_ssid is undefined
vars:
credentials: &creds
api_url: "https://{{ netapp_e_api_host }}/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: no
array: &array
subnet: 255.255.255.0
gateway: 10.10.10.1
A:
- channel: 1
max_frame_size: 1500
- channel: 2
max_frame_size: 2000
- channel: 3
max_frame_size: 9000
- channel: 4
max_frame_size: 1500
- channel: 5
max_frame_size: 2000
- channel: 6
max_frame_size: 9000
B:
- channel: 1
max_frame_size: 9000
- channel: 2
max_frame_size: 1500
- channel: 3
max_frame_size: 2000
- channel: 4
max_frame_size: 9000
- channel: 5
max_frame_size: 1500
- channel: 6
max_frame_size: 2000
# ***************************************************
# *** Ensure python jmespath package is installed ***
# ***************************************************
- name: Ensure that jmespath is installed
pip:
name: jmespath
state: enabled
register: jmespath
- fail:
msg: "Restart playbook, the jmespath package was installed and is need for the playbook's execution."
when: jmespath.changed
# ************************************
# *** Set local playbook test data ***
# ************************************
- name: set credentials
set_fact:
credentials: *creds
- name: set array
set_fact:
array: *array
- name: Show some debug information
debug:
msg: "Using user={{ credentials.api_username }} on server={{ credentials.api_url }}."
# *****************************************
# *** Disable all controller A channels ***
# *****************************************
- name: Disable all controller A ports
netapp_e_iscsi_interface:
<<: *creds
controller: "A"
channel: "{{ item.channel }}"
state: disabled
loop: "{{ lookup('list', array.A) }}"
# Delay to give time for the asynchronous symbol call has complete
- pause:
seconds: 30
# Request all controller's iscsi host interface information
- name: Collect iscsi port information
uri:
url: "{{ xpath_filter_url }}?query=controller/hostInterfaces//iscsi"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: result
vars:
xpath_filter_url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/graph/xpath-filter"
# Extract controller A's port information from the iscsi host interfaces list
# Note: min filter is used because there are only two controller ids and the smaller corresponds with controller A
- name: Get controller A's controllerId
set_fact:
controller_a_id: "{{ result | json_query('json[*].controllerId') | min }}"
# Collect all port information associated with controller A
- name: Get controller A's port information
set_fact:
controller_a: "{{ result | json_query(controller_a_query) }}"
vars:
controller_a_query: "json[?controllerId=='{{ controller_a_id }}']"
# Confirm controller A's ports are disabled
- name: Verify all controller A ports are disabled
assert:
that: "{{ item.ipv4Enabled == false }}"
msg: "Controller A, channel {{ item.channel }} is not disabled"
loop: "{{ controller_a }}"
# *****************************************
# *** Disable all controller B channels ***
# *****************************************
- name: Disable all controller B ports
netapp_e_iscsi_interface:
<<: *creds
controller: "B"
channel: "{{ item.channel }}"
state: disabled
loop: "{{ lookup('list', array.B) }}"
# Delay to give time for the asynchronous symbol call has complete
- pause:
seconds: 30
# Request all controller's iscsi host interface information
- name: Collect iscsi port information
uri:
url: "{{ xpath_filter_url }}?query=controller/hostInterfaces//iscsi"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: result
vars:
xpath_filter_url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/graph/xpath-filter"
# Extract controller B's port information from the iscsi host interfaces list
# Note: min filter is used because there are only two controller ids and the smaller corresponds with controller B
- name: Get controller B's controllerId
set_fact:
controller_b_id: "{{ result | json_query('json[*].controllerId') | max }}"
# Collect all port information associated with controller B
- name: Get controller B's port information
set_fact:
controller_b: "{{ result | json_query(controller_b_query) }}"
vars:
controller_b_query: "json[?controllerId=='{{ controller_b_id }}']"
# Confirm controller B's ports are disabled
- name: Verify all controller B ports are disabled
assert:
that: "{{ item.ipv4Enabled == false }}"
msg: "Controller B, channel {{ item.channel }} is not disabled"
loop: "{{ controller_b }}"
# *****************************************************
# *** Configure all controller A's ports statically ***
# *****************************************************
- name: Configure controller A's port to use a static configuration method
netapp_e_iscsi_interface:
<<: *creds
controller: "A"
channel: "{{ item.channel }}"
state: enabled
config_method: static
address: "{{ array.gateway.split('.')[:3] | join('.') }}.{{ item.channel }}"
subnet_mask: "{{ array.subnet }}"
gateway: "{{ array.gateway }}"
max_frame_size: "{{ item.max_frame_size }}"
loop: "{{ lookup('list', array.A) }}"
# Delay to give time for the asynchronous symbol call has complete
- pause:
seconds: 30
# Request a list of iscsi host interfaces
- name: Collect array information
uri:
url: "{{ xpath_filter }}?query=controller/hostInterfaces//iscsi"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: result
vars:
xpath_filter: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/graph/xpath-filter"
# Extract controller A's port information from the iscsi host interfaces list
# Note: min filter is used because there are only two controller ids and the smaller corresponds with controller A
- name: Get controller A's controllerId
set_fact:
controller_a_id: "{{ result | json_query('json[*].controllerId') | min }}"
# Compile any iscsi port information associated with controller A
- name: Get controller A's port information
set_fact:
controller_a: "{{ result | json_query(controller_a_query) }}"
vars:
controller_a_query: "json[?controllerId=='{{ controller_a_id }}']"
# Confirm that controller A ports are statically defined with the expected MTU, gateway, subnet and ipv4 address
- name: Verify expected controller A's port configuration
assert:
that: "{{ item[0].channel != item[1].channel or
( item[0].ipv4Data.ipv4AddressConfigMethod == 'configStatic' and
item[0].interfaceData.ethernetData.maximumFramePayloadSize == item[1].max_frame_size and
item[0].ipv4Data.ipv4AddressData.ipv4GatewayAddress == array.gateway and
item[0].ipv4Data.ipv4AddressData.ipv4SubnetMask == array.subnet and
item[0].ipv4Data.ipv4AddressData.ipv4Address == partial_address + item[1].channel | string ) }}"
msg: "Failed to configure controller A, channel {{ item[0].channel }}"
loop: "{{ query('nested', lookup('list', controller_a), lookup('list', array.A) ) }}"
vars:
partial_address: "{{ array.gateway.split('.')[:3] | join('.') + '.' }}"
# *******************************************************************************************
# *** Configure controller B's channels for dhcp and specific frame maximum payload sizes ***
# *******************************************************************************************
- name: Configure controller B's ports to use dhcp with different MTU
netapp_e_iscsi_interface:
<<: *creds
controller: "B"
channel: "{{ item.channel }}"
state: enabled
config_method: dhcp
max_frame_size: "{{ item.max_frame_size }}"
loop: "{{ lookup('list', array.B) }}"
# Delay to give time for the asynchronous symbol call has complete
- pause:
seconds: 30
# request a list of iscsi host interfaces
- name: Collect array information
uri:
url: "{{ xpath_filter_url }}?query=controller/hostInterfaces//iscsi"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: result
vars:
xpath_filter_url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/graph/xpath-filter"
# Extract controller B's port information from the iscsi host interfaces list
# Note: max filter is used because there are only two controller ids and the larger corresponds with controller B
- name: Get controller B's controllerId
set_fact:
controller_b_id: "{{ result | json_query('json[*].controllerId') | max }}"
- name: Get controller B port information list
set_fact:
controller_b: "{{ result | json_query(controller_b_query) }}"
vars:
controller_b_query: "json[?controllerId=='{{ controller_b_id }}']"
# Using a nested loop of array information and expected information, verify that each channel has the appropriate max
# frame payload size and is configured for dhcp
- name: Verify expected controller B's port configuration
assert:
that: "{{ item[0].channel != item[1].channel or
( item[0].ipv4Data.ipv4AddressConfigMethod == 'configDhcp' and
item[0].interfaceData.ethernetData.maximumFramePayloadSize == item[1].max_frame_size ) }}"
msg: >
Failed to configure controller channel {{ item[0].channel }} for dhcp
and/or maximum frame size of {{ item[1].max_frame_size }}!
loop: "{{ query('nested', lookup('list', controller_b), lookup('list', array.B)) }}"
# *******************************************************************************************
# *** Configure controller A's channels for dhcp and specific frame maximum payload sizes ***
# *******************************************************************************************
- name: Configure controller A's ports to use dhcp with different MTU
netapp_e_iscsi_interface:
<<: *creds
controller: "A"
channel: "{{ item.channel }}"
state: enabled
config_method: dhcp
max_frame_size: "{{ item.max_frame_size }}"
loop: "{{ lookup('list', array.A) }}"
# Delay to give time for the asynchronous symbol call has complete
- pause:
seconds: 30
# Request a list of iscsi host interfaces
- name: Collect array information
uri:
url: "{{ xpath_filter_url }}?query=controller/hostInterfaces//iscsi"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: result
vars:
xpath_filter_url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/graph/xpath-filter"
# Extract controller A's port information from the iscsi host interfaces list
# Note: min filter is used because there are only two controller ids and the larger corresponds with controller A
- name: Get controller A's controllerId
set_fact:
controller_a_id: "{{ result | json_query('json[*].controllerId') | min }}"
- name: Get controller A port information list
set_fact:
controller_a: "{{ result | json_query(controller_a_query) }}"
vars:
controller_a_query: "json[?controllerId=='{{ controller_a_id }}']"
# Using a nested loop of array information and expected information, verify that each channel has the appropriate max
# frame payload size and is configured for dhcp
- name: Verify expected controller A's port configuration
assert:
that: "{{ item[0].channel != item[1].channel or
( item[0].ipv4Data.ipv4AddressConfigMethod == 'configDhcp' and
item[0].interfaceData.ethernetData.maximumFramePayloadSize == item[1].max_frame_size ) }}"
msg: >
Failed to configure controller channel {{ item[0].channel }} for dhcp
and/or maximum frame size of {{ item[1].max_frame_size }}!
loop: "{{ query('nested', lookup('list', controller_a), lookup('list', array.A)) }}"
# *****************************************************
# *** Configure all controller B's ports statically ***
# *****************************************************
- name: Configure controller B's ports to use a static configuration method
netapp_e_iscsi_interface:
<<: *creds
controller: "B"
channel: "{{ item.channel }}"
state: enabled
config_method: static
address: "{{ array.gateway.split('.')[:3] | join('.') }}.{{ item.channel }}"
subnet_mask: "{{ array.subnet }}"
gateway: "{{ array.gateway }}"
max_frame_size: "{{ item.max_frame_size }}"
loop: "{{ lookup('list', array.B) }}"
# Delay to give time for the asynchronous symbol call has complete
- pause:
seconds: 30
# request a list of iscsi host interfaces
- name: Collect array information
uri:
url: "{{ xpath_filter }}?query=controller/hostInterfaces//iscsi"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: result
vars:
xpath_filter: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/graph/xpath-filter"
# Extract controller B's port information from the iscsi host interfaces list
# Note: min filter is used because there are only two controller ids and the smaller corresponds with controller B
- name: Get controller B's controllerId
set_fact:
controller_b_id: "{{ result | json_query('json[*].controllerId') | max }}"
# Compile any iscsi port information associated with controller B
- name: Get controller B's port information
set_fact:
controller_b: "{{ result | json_query(controller_b_query) }}"
vars:
controller_b_query: "json[?controllerId=='{{ controller_b_id }}']"
# Confirm that controller B ports are statically defined with the expected MTU, gateway, subnet and ipv4 address
- name: Verify expected controller B's port configuration
assert:
that: "{{ item[0].channel != item[1].channel or
( item[0].ipv4Data.ipv4AddressConfigMethod == 'configStatic' and
item[0].interfaceData.ethernetData.maximumFramePayloadSize == item[1].max_frame_size and
item[0].ipv4Data.ipv4AddressData.ipv4GatewayAddress == array.gateway and
item[0].ipv4Data.ipv4AddressData.ipv4SubnetMask == array.subnet and
item[0].ipv4Data.ipv4AddressData.ipv4Address == partial_address + item[1].channel | string ) }}"
msg: "Failed to configure controller B, channel {{ item[0].channel }}"
loop: "{{ query('nested', lookup('list', controller_b), lookup('list', array.B) ) }}"
vars:
partial_address: "{{ array.gateway.split('.')[:3] | join('.') + '.' }}"
# **************************************
# *** Disable all controller B ports ***
# **************************************
- name: Disable all controller B's ports
netapp_e_iscsi_interface:
<<: *creds
controller: "B"
channel: "{{ item.channel }}"
state: disabled
loop: "{{ lookup('list', array.B) }}"
# Delay to give time for the asynchronous symbol call has complete
- pause:
seconds: 30
# Request controller iscsi host interface information
- name: Collect iscsi port information
uri:
url: "{{ xpath_filter_url }}?query=controller/hostInterfaces//iscsi"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: result
vars:
xpath_filter_url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/graph/xpath-filter"
# Extract controller A's port information from the iscsi host interfaces list
# Note: min filter is used because there are only two controller ids and the smaller corresponds with controller B
- name: Get controller B's controllerId
set_fact:
controller_b_id: "{{ result | json_query('json[*].controllerId') | max }}"
# Compile any iscsi port information associated with controller B
- name: Get controller B's port information
set_fact:
controller_b: "{{ result | json_query(controller_b_query) }}"
vars:
controller_b_query: "json[?controllerId=='{{ controller_b_id }}']"
# Confirm that all of controller B's ports are disabled
- name: Verify all controller B ports are disabled
assert:
that: "{{ item.ipv4Enabled == false }}"
msg: "Controller B, channel {{ item.channel }} is not disabled"
loop: "{{ controller_b }}"

View file

@ -1,10 +0,0 @@
# This test is not enabled by default, but can be utilized by defining required variables in integration_config.yml
# Example integration_config.yml:
# ---
#netapp_e_api_host: 10.113.1.111:8443
#netapp_e_api_username: admin
#netapp_e_api_password: myPass
#netapp_e_ssid: 1
unsupported
netapp/eseries

View file

@ -1 +0,0 @@
- include_tasks: run.yml

View file

@ -1,68 +0,0 @@
# Test code for the netapp_e_iscsi_interface module
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
- name: NetApp Test iSCSI Target module
fail:
msg: 'Please define netapp_e_api_username, netapp_e_api_password, netapp_e_api_host, and netapp_e_ssid.'
when: netapp_e_api_username is undefined or netapp_e_api_password is undefined
or netapp_e_api_host is undefined or netapp_e_ssid is undefined
vars: &vars
credentials: &creds
api_url: "https://{{ netapp_e_api_host }}/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: no
secrets: &secrets
# 12 characters
- 012345678912
# 16 characters
- 0123456789123456
- name: set credentials
set_fact:
credentials: *creds
- name: Show some debug information
debug:
msg: "Using user={{ credentials.api_username }} on server={{ credentials.api_url }}."
- name: Ensure we can set the chap secret
netapp_e_iscsi_target:
<<: *creds
name: myTarget
chap_secret: "{{ item }}"
loop: *secrets
- name: Turn off all of the options
netapp_e_iscsi_target:
<<: *creds
name: abc
ping: no
unnamed_discovery: no
- name: Ensure we can set the ping option
netapp_e_iscsi_target:
<<: *creds
name: myTarget
ping: yes
unnamed_discovery: yes
register: result
- name: Ensure we received a change
assert:
that: result.changed
- name: Run the ping change in check-mode
netapp_e_iscsi_target:
<<: *creds
name: myTarget
ping: yes
unnamed_discovery: yes
check_mode: yes
register: result
- name: Ensure no change resulted
assert:
that: not result.changed

View file

@ -1,10 +0,0 @@
# This test is not enabled by default, but can be utilized by defining required variables in integration_config.yml
# Example integration_config.yml:
# ---
#netapp_e_api_host: 192.168.1.100
#netapp_e_api_username: admin
#netapp_e_api_password: myPass
#netapp_e_ssid: 1
unsupported
netapp/eseries

View file

@ -1 +0,0 @@
- include_tasks: run.yml

View file

@ -1,326 +0,0 @@
# Test code for the netapp_e_iscsi_interface module
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
- name: NetApp Test ASUP module
fail:
msg: 'Please define netapp_e_api_username, netapp_e_api_password, netapp_e_api_host, and netapp_e_ssid.'
when: netapp_e_api_username is undefined or netapp_e_api_password is undefined
or netapp_e_api_host is undefined or netapp_e_ssid is undefined
vars:
credentials: &creds
api_url: "https://{{ netapp_e_api_host }}/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: no
- name: set credentials
set_fact:
credentials: *creds
# ****************************************************
# *** Setup test hosts, storage pools, and volumes ***
# ****************************************************
- name: Create host for host mapping
netapp_e_host:
<<: *creds
state: present
name: test_host_mapping_host
host_type: 27
- netapp_e_host:
<<: *creds
state: present
name: test_host1
host_type: 27
- netapp_e_host:
<<: *creds
state: present
name: test_host2
host_type: 27
- name: Create storage pool for host mapping
netapp_e_storagepool:
<<: *creds
state: present
name: test_host_mapping_storage_pool
raid_level: raid0
criteria_min_usable_capacity: 1
- name: Create volume for host mapping
netapp_e_volume:
<<: *creds
state: present
name: test_host_mapping_volume
storage_pool_name: test_host_mapping_storage_pool
size: 1
- name: Create volume for host mapping
netapp_e_volume:
<<: *creds
state: present
name: test_host_mapping_volume2
storage_pool_name: test_host_mapping_storage_pool
size: 1
# **********************************************
# *** Create new lun between host and volume ***
# **********************************************
- name: Create netapp_e_lun_mapping
netapp_e_lun_mapping:
<<: *creds
state: present
target: test_host_mapping_host
volume: test_host_mapping_volume
register: result
- name: Verify lun mapping
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ netapp_e_ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: current
- assert:
that: "{{ item['mapped'] }}"
msg: "Lun failed to be created."
loop: "{{ lookup('list', current.json)}}"
# QUICK VERIFICATION OF MISMATCHING TARGET/TARGET_TYPE - GOOD
#- name: Create netapp_e_lun_mapping
# netapp_e_lun_mapping:
# <<: *creds
# state: present
# target: test_host_mapping_host
# volume: test_host_mapping_volume
# lun: 100
# target_type: group
# register: result
#
#- pause: seconds=30
# **************************************************************
# *** Repeat previous lun creation play and verify unchanged ***
# **************************************************************
- name: Repeat lun creation
netapp_e_lun_mapping:
<<: *creds
state: present
target: test_host_mapping_host
volume: test_host_mapping_volume
register: result
- name: Verify lun mapping
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ netapp_e_ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: current
- assert:
that: "{{ item['mapped'] and result.changed==False }}"
msg: "Lun failed to be unchanged."
loop: "{{ lookup('list', current.json)}}"
# ****************************************************************
# *** Move existing lun to default target and verify unchanged ***
# ****************************************************************
- name: Move lun to default target
netapp_e_lun_mapping:
<<: *creds
state: present
volume: test_host_mapping_volume
register: result
- name: Verify lun mapping
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ netapp_e_ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: current
- assert:
that: "{{ item['mapped'] }}"
msg: "Lun failed to be created."
loop: "{{ lookup('list', current.json)}}"
# *****************************************************************
# *** Move existing lun to specific target and verify unchanged ***
# *****************************************************************
- name: Move lun to default target
netapp_e_lun_mapping:
<<: *creds
state: present
target: test_host_mapping_host
volume: test_host_mapping_volume
register: result
- name: Verify lun mapping
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ netapp_e_ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: current
- assert:
that: "{{ item['mapped'] }}"
msg: "Lun failed to be created."
loop: "{{ lookup('list', current.json)}}"
# *******************************************
# *** Modify a volume mapping's lun value ***
# *******************************************
- name: Change volume mapping's lun value
netapp_e_lun_mapping:
<<: *creds
state: present
target: test_host_mapping_host
volume: test_host_mapping_volume
lun: 100
register: result
- pause: seconds=15
- name: Verify lun mapping
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ netapp_e_ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: current
- assert:
that: "{{ result.changed }}"
msg: "Lun failed to be unchanged."
loop: "{{ lookup('list', current.json)}}"
- name: Verify mapping fails when lun already in use on existing host object
netapp_e_lun_mapping:
<<: *creds
state: present
target: test_host_mapping_host
volume: test_host_mapping_volume2
lun: 100
register: result
ignore_errors: True
- pause: seconds=15
- assert:
that: "{{ not result.changed }}"
msg: "Lun succeeded when it should have failed."
loop: "{{ lookup('list', current.json)}}"
- name: Verify mapping succeeds when the same lun is used on multiple host objects.
netapp_e_lun_mapping:
<<: *creds
state: present
target: test_host1
volume: test_host_mapping_volume2
lun: 100
register: result
- pause: seconds=15
- assert:
that: "{{ result.changed }}"
msg: "Lun failed to be unchanged."
loop: "{{ lookup('list', current.json)}}"
# *************************************************************************************************
# *** Verify that exact mapping details but different lun results in an unchanged configuration ***
# *************************************************************************************************
- name: Verify that exact mapping details but different lun results in an unchanged configuration
netapp_e_lun_mapping:
<<: *creds
state: absent
target: test_host_mapping_host
volume: test_host_mapping_volume
lun: 99
register: result
- name: Verify lun mapping
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ netapp_e_ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: current
- assert:
that: "{{ item['mapped'] and not result.changed }}"
msg: "Lun failed to be unchanged."
loop: "{{ lookup('list', current.json)}}"
# ********************************
# *** Delete newly created lun ***
# ********************************
- name: Delete lun creation
netapp_e_lun_mapping:
<<: *creds
state: absent
target: test_host_mapping_host
volume: test_host_mapping_volume
register: result
- name: Verify lun mapping
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ netapp_e_ssid }}/graph/xpath-filter?query=//volume[name='test_host_mapping_volume']"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
body_format: json
validate_certs: no
register: current
- assert:
that: "{{ not item['mapped'] }}"
msg: "Lun failed to be created."
loop: "{{ lookup('list', current.json)}}"
# ********************************************************
# *** Tear down test hosts, storage pools, and volumes ***
# ********************************************************
- name: Delete volume for host mapping
netapp_e_volume:
<<: *creds
state: absent
name: test_host_mapping_volume
storage_pool_name: test_host_mapping_storage_pool
size: 1
- name: Delete volume for host mapping
netapp_e_volume:
<<: *creds
state: absent
name: test_host_mapping_volume2
storage_pool_name: test_host_mapping_storage_pool
size: 1
- name: Delete storage pool for host mapping
netapp_e_storagepool:
<<: *creds
state: absent
name: test_host_mapping_storage_pool
raid_level: raid0
criteria_min_usable_capacity: 1
- name: Delete host for host mapping
netapp_e_host:
<<: *creds
state: absent
name: test_host_mapping_host
host_type_index: 27
- name: Delete host for host mapping
netapp_e_host:
<<: *creds
state: absent
name: test_host2
host_type_index: 27
- name: Delete host for host mapping
netapp_e_host:
<<: *creds
state: absent
name: test_host1
host_type_index: 27

View file

@ -1,10 +0,0 @@
# This test is not enabled by default, but can be utilized by defining required variables in integration_config.yml
# Example integration_config.yml:
# ---
#netapp_e_api_host: 10.113.1.111:8443
#netapp_e_api_username: admin
#netapp_e_api_password: myPass
#netapp_e_ssid: 1
unsupported
netapp/eseries

View file

@ -1 +0,0 @@
- include_tasks: run.yml

File diff suppressed because it is too large Load diff

View file

@ -1,10 +0,0 @@
# This test is not enabled by default, but can be utilized by defining required variables in integration_config.yml
# Example integration_config.yml:
# ---
#netapp_e_api_host: 192.168.1.1
#netapp_e_api_username: admin
#netapp_e_api_password: myPass
#netapp_e_ssid: 1
unsupported
netapp/eseries

View file

@ -1 +0,0 @@
- include_tasks: run.yml

View file

@ -1,776 +0,0 @@
# Test code for the netapp_e_iscsi_interface module
# (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
- name: NetApp Test ASUP module
fail:
msg: 'Please define netapp_e_api_username, netapp_e_api_password, netapp_e_api_host, and netapp_e_ssid.'
when: netapp_e_api_username is undefined or netapp_e_api_password is undefined
or netapp_e_api_host is undefined or netapp_e_ssid is undefined
vars:
credentials: &creds
api_url: "https://{{ netapp_e_api_host }}:8443/devmgr/v2"
api_username: "{{ netapp_e_api_username }}"
api_password: "{{ netapp_e_api_password }}"
ssid: "{{ netapp_e_ssid }}"
validate_certs: no
- set_fact:
credentials: *creds
# test setup
- name: Delete raid 0 storage pool
netapp_e_storagepool:
<<: *creds
state: absent
name: "{{ item }}"
loop:
- storage_pool
- storage_pool2
- storage_pool3
# Thick volume testing: create, delete, expand, change properties (read/write cache), expand and change properties,
- name: Create raid 0 storage pool
netapp_e_storagepool:
<<: *creds
state: present
name: storage_pool
criteria_min_usable_capacity: 5
criteria_size_unit: tb
erase_secured_drives: yes
raid_level: raid0
- name: Delete volume in raid 0 storage pool
netapp_e_volume:
<<: *creds
state: absent
name: volume
- name: Create volume in raid 0 storage pool
netapp_e_volume:
<<: *creds
state: present
name: volume
storage_pool_name: storage_pool
size: 100
size_unit: gb
register: results
- pause: seconds=15
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
item.capacity == '107374182400' and item.segmentSize == 131072}}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- name: Re-execute volume creation in raid 0 storage pool
netapp_e_volume:
<<: *creds
state: present
name: volume
storage_pool_name: storage_pool
size: 100
size_unit: gb
register: results
- pause: seconds=15
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ not results.changed and item.name == 'volume' and not item.thinProvisioned and
item.capacity == '107374182400' and item.segmentSize == 131072}}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- name: Update volume size
netapp_e_volume:
<<: *creds
state: present
name: volume
storage_pool_name: storage_pool
size: 200
size_unit: gb
register: results
- pause: seconds=15
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
item.capacity == '214748364800' and item.segmentSize == 131072}}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- pause: seconds=15
- name: Update volume properties
netapp_e_volume:
<<: *creds
state: present
name: volume
storage_pool_name: storage_pool
size: 200
size_unit: gb
write_cache_enable: true
read_cache_enable: false
register: results
- pause: seconds=15
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
item.capacity == '214748364800' and item.segmentSize == 131072 and
not item.cacheSettings.readCacheEnable and item.cacheSettings.writeCacheEnable}}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- name: Update volume properties and expand storage capabilities
netapp_e_volume:
<<: *creds
state: present
name: volume
storage_pool_name: storage_pool
size: 300
size_unit: gb
write_cache_enable: false
read_cache_enable: true
register: results
- pause: seconds=15
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
item.capacity == '322122547200' and item.segmentSize == 131072 and
item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable}}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
# Workload tagging testing: create, utilize existing (name only, name with same attributes), modify attributes
- name: Add workload tag (change, new workload tag)
netapp_e_volume:
<<: *creds
state: present
name: volume
storage_pool_name: storage_pool
size: 300
size_unit: gb
write_cache_enable: false
read_cache_enable: true
workload_name: volume_tag
metadata:
volume_tag_key: volume_tag_value
register: results
- pause: seconds=15
- name: Validate volume workload changes
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
item.capacity == '322122547200' and item.segmentSize == 131072 and
item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
{'key': 'volumeTypeId', 'value': 'volume'} in item.metadata }}"
msg: "Failed to modify volume metadata!"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/workloads"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: workload_tags
- assert:
that: "{{ item.name == 'volume_tag' and
{'key': 'volume_tag_key', 'value': 'volume_tag_value'} in item.workloadAttributes }}"
msg: "Workload tag failed to be created!"
loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
vars:
volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
- name: Repeat add workload tag (no change)
netapp_e_volume:
<<: *creds
state: present
name: volume
storage_pool_name: storage_pool
size: 300
size_unit: gb
write_cache_enable: false
read_cache_enable: true
workload_name: volume_tag
metadata:
volume_tag_key: volume_tag_value
register: results
- pause: seconds=15
- name: Validate volume workload changes
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ not results.changed and item.name == 'volume' and not item.thinProvisioned and
item.capacity == '322122547200' and item.segmentSize == 131072 and
item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
{'key': 'volumeTypeId', 'value': 'volume'} in item.metadata }}"
msg: "Failed to not modify volume metadata!"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/workloads"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: workload_tags
- assert:
that: "{{ item.name == 'volume_tag' and
{'key': 'volume_tag_key', 'value': 'volume_tag_value'} in item.workloadAttributes }}"
msg: "Workload tag failed not to be changed"
loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
vars:
volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
- name: Workload tag (no change, just using workload_name)
netapp_e_volume:
<<: *creds
state: present
name: volume
storage_pool_name: storage_pool
size: 300
size_unit: gb
write_cache_enable: false
read_cache_enable: true
workload_name: volume_tag
register: results
- pause: seconds=15
- name: Validate volume workload changes
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ not results.changed and item.name == 'volume' and not item.thinProvisioned and
item.capacity == '322122547200' and item.segmentSize == 131072 and
item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
{'key': 'volumeTypeId', 'value': 'volume'} in item.metadata }}"
msg: "Failed to not modify volume metadata!"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/workloads"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: workload_tags
- assert:
that: "{{ item.name == 'volume_tag' and
{'key': 'volume_tag_key', 'value': 'volume_tag_value'} in item.workloadAttributes }}"
msg: "Workload tag failed to not be modified!"
loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
vars:
volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
- name: Add workload tag (change, new attributes)
netapp_e_volume:
<<: *creds
state: present
name: volume
storage_pool_name: storage_pool
size: 300
size_unit: gb
write_cache_enable: false
read_cache_enable: true
workload_name: volume_tag
metadata:
volume_tag_key2: volume_tag_value2
register: results
- pause: seconds=15
- name: Validate volume workload changes
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
item.capacity == '322122547200' and item.segmentSize == 131072 and
item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
{'key': 'volumeTypeId', 'value': 'volume'} in item.metadata }}"
msg: "Failed to not modify volume metadata!"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/workloads"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: workload_tags
- assert:
that: "{{ item.name == 'volume_tag' and
{'key': 'volume_tag_key2', 'value': 'volume_tag_value2'} in item.workloadAttributes }}"
msg: "Workload tag failed to be updated!"
loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
vars:
volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
- name: Remove workload tag from volume (change)
netapp_e_volume:
<<: *creds
state: present
name: volume
storage_pool_name: storage_pool
size: 300
size_unit: gb
write_cache_enable: false
read_cache_enable: true
register: results
- pause: seconds=15
- name: Validate volume workload changes
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
item.capacity == '322122547200' and item.segmentSize == 131072 and
item.cacheSettings.readCacheEnable and not item.cacheSettings.writeCacheEnable and
item.metadata == []}}"
msg: "Failed to not modify volume metadata!"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/workloads"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: workload_tags
- assert:
that: "{{ item.name == 'volume_tag' and
{'key': 'volume_tag_key2', 'value': 'volume_tag_value2'} in item.workloadAttributes }}"
msg: "Workload tag failed to be updated!"
loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
vars:
volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`]') }}"
- name: Delete workload tag
uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/workloads"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: workload_tags
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/workloads/{{ item }}"
method: DELETE
status_code: 204
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
loop: "{{ lookup('list', volume_tag_id, wantList=True) }}"
vars:
volume_tag_id: "{{ workload_tags | json_query('json[?name==`volume_tag`].id') }}"
- name: Delete raid 0 storage pool
netapp_e_storagepool:
<<: *creds
state: absent
name: storage_pool
# *** Thin volume testing (May not work with simulator) ***
- name: Create dynamic disk pool
netapp_e_storagepool:
<<: *creds
state: present
name: storage_pool
criteria_min_usable_capacity: 2
criteria_size_unit: tb
- name: Create thin volume
netapp_e_volume:
<<: *creds
state: present
name: thin_volume
storage_pool_name: storage_pool
size: 131072
size_unit: gb
thin_provision: true
thin_volume_repo_size: 32
thin_volume_max_repo_size: 1024
register: results
- pause: seconds=15
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/thin-volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'thin_volume' and item.thinProvisioned and
item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
item.provisionedCapacityQuota == '1099511627776' and item.expansionPolicy == 'automatic' }}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`thin_volume`]') }}"
- name: (Rerun) Create thin volume
netapp_e_volume:
<<: *creds
state: present
name: thin_volume
storage_pool_name: storage_pool
size: 131072
size_unit: gb
thin_provision: true
thin_volume_repo_size: 32
thin_volume_max_repo_size: 1024
register: results
- pause: seconds=15
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/thin-volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ not results.changed and item.name == 'thin_volume' and item.thinProvisioned and
item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
item.provisionedCapacityQuota == '1099511627776' and item.expansionPolicy == 'automatic' }}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`thin_volume`]') }}"
- name: Expand thin volume's virtual size
netapp_e_volume:
<<: *creds
state: present
name: thin_volume
storage_pool_name: storage_pool
size: 262144
size_unit: gb
thin_provision: true
thin_volume_repo_size: 32
thin_volume_max_repo_size: 1024
register: results
- pause: seconds=15
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/thin-volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'thin_volume' and item.thinProvisioned and
item.capacity == '281474976710656' and item.initialProvisionedCapacity == '34359738368' and
item.provisionedCapacityQuota == '1099511627776' and item.expansionPolicy == 'automatic' }}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`thin_volume`]') }}"
- name: Expand thin volume's maximum repository size
netapp_e_volume:
<<: *creds
state: present
name: thin_volume
storage_pool_name: storage_pool
size: 262144
size_unit: gb
thin_provision: true
thin_volume_repo_size: 32
thin_volume_max_repo_size: 2048
register: results
- pause: seconds=15
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/thin-volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'thin_volume' and item.thinProvisioned and
item.capacity == '281474976710656' and item.initialProvisionedCapacity == '34359738368' and
item.provisionedCapacityQuota == '2199023255552' and item.expansionPolicy == 'automatic' }}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`thin_volume`]') }}"
- name: Create dynamic disk pool
netapp_e_storagepool:
<<: *creds
state: present
name: storage_pool2
criteria_min_usable_capacity: 2
criteria_size_unit: tb
- pause: seconds=15
- name: Create second thin volume with manual expansion policy
netapp_e_volume:
<<: *creds
state: present
name: thin_volume2
storage_pool_name: storage_pool2
size_unit: gb
size: 131072
thin_provision: true
thin_volume_repo_size: 32
thin_volume_max_repo_size: 32
thin_volume_expansion_policy: manual
register: results
- pause: seconds=15
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/thin-volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'thin_volume2' and item.thinProvisioned and
item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
item.currentProvisionedCapacity == '34359738368' and item.expansionPolicy == 'manual' }}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`thin_volume2`]') }}"
- name: Create second thin volume with manual expansion policy
netapp_e_volume:
<<: *creds
state: present
name: thin_volume2
storage_pool_name: storage_pool2
size_unit: gb
size: 131072
thin_provision: true
thin_volume_repo_size: 288
thin_volume_max_repo_size: 288
thin_volume_expansion_policy: manual
register: results
- pause: seconds=15
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/thin-volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'thin_volume2' and item.thinProvisioned and
item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
item.currentProvisionedCapacity == '309237645312' and item.expansionPolicy == 'manual' }}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`thin_volume2`]') }}"
- name: Modify second thin volume to use automatic expansion policy
netapp_e_volume:
<<: *creds
state: present
name: thin_volume2
storage_pool_name: storage_pool2
size_unit: gb
size: 131072
thin_provision: true
thin_volume_repo_size: 288
thin_volume_max_repo_size: 288
thin_volume_expansion_policy: automatic
register: results
- pause: seconds=15
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/thin-volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'thin_volume2' and item.thinProvisioned and
item.capacity == '140737488355328' and item.initialProvisionedCapacity == '34359738368' and
item.currentProvisionedCapacity == '309237645312' and item.expansionPolicy == 'automatic' }}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`thin_volume2`]') }}"
- name: Delete raid 0 storage pool
netapp_e_storagepool:
<<: *creds
state: absent
name: "{{ item }}"
loop:
- storage_pool
- storage_pool2
- name: Create raid 0 storage pool
netapp_e_storagepool:
<<: *creds
state: present
name: storage_pool
criteria_min_usable_capacity: 5
criteria_size_unit: tb
erase_secured_drives: yes
raid_level: raid0
# Thick volume expansion testing: wait and don't wait for operation to complete
- name: Create raid 6 storage pool
netapp_e_storagepool:
<<: *creds
state: present
name: storage_pool3
criteria_min_usable_capacity: 5
criteria_size_unit: tb
erase_secured_drives: yes
raid_level: raid6
- name: Delete volume in raid 6 storage pool
netapp_e_volume:
<<: *creds
state: absent
name: volume
- name: Create volume in raid 0 storage pool for expansion testing
netapp_e_volume:
<<: *creds
state: present
name: volume
storage_pool_name: storage_pool3
size: 1
size_unit: gb
register: results
- pause: seconds=10
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- assert:
that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
item.capacity == '1073741824' and item.segmentSize == 131072}}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- name: Modify volume in raid 0 storage pool and wait for expansion testing
netapp_e_volume:
<<: *creds
state: present
name: volume
storage_pool_name: storage_pool3
size: 10
size_unit: gb
wait_for_initialization: True
register: results
- pause: seconds=10
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes/{{ volume[0]['id'] }}/expand"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: expansion_state
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- assert:
that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
item.capacity == '10737418240' and item.segmentSize == 131072 and
expansion_state['json']['action'] == 'none'}}"
msg: "Volume expansion test failed."
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- name: Modify volume in raid 0 storage pool and don't wait for expansion testing
netapp_e_volume:
<<: *creds
state: present
name: volume
storage_pool_name: storage_pool3
size: 100
size_unit: gb
wait_for_initialization: False
register: results
- pause: seconds=10
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: current
- uri:
url: "{{ credentials.api_url }}/storage-systems/{{ credentials.ssid }}/volumes/{{ volume[0]['id'] }}/expand"
user: "{{ credentials.api_username }}"
password: "{{ credentials.api_password }}"
validate_certs: no
register: expansion_state
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- assert:
that: "{{ results.changed and item.name == 'volume' and not item.thinProvisioned and
item.capacity == '107374182400' and item.segmentSize == 131072 and expansion_state['json']['action'] != 'none'}}"
msg: "Failed to create volume"
loop: "{{ lookup('list', volume, wantList=True) }}"
vars:
volume: "{{ current | json_query('json[?name==`volume`]') }}"
- name: Delete raid 0 storage pool
netapp_e_storagepool:
<<: *creds
state: absent
name: "{{ item }}"
loop:
- storage_pool3

View file

@ -4,7 +4,6 @@ integration_tests_dependencies:
- ansible.netcommon
unit_tests_dependencies:
- netbox.netbox
- netapp.ontap
- community.kubernetes
- ansible.netcommon
- cisco.meraki

View file

@ -3257,82 +3257,6 @@ plugins/modules/storage/netapp/na_ontap_gather_facts.py validate-modules:doc-mis
plugins/modules/storage/netapp/na_ontap_gather_facts.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/na_ontap_gather_facts.py validate-modules:parameter-state-invalid-choice
plugins/modules/storage/netapp/na_ontap_gather_facts.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_alerts.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_alerts.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_amg.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_amg.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_amg.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/netapp_e_amg_role.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_amg_role.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_amg_role.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_amg_role.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/netapp_e_amg_sync.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_amg_sync.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_asup.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_asup.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_auditlog.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_auth.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_auth.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_auth.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_drive_firmware.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_facts.py validate-modules:return-syntax-error
plugins/modules/storage/netapp/netapp_e_flashcache.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/storage/netapp/netapp_e_flashcache.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_flashcache.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_flashcache.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_flashcache.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_flashcache.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/netapp_e_global.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_host.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_host.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_hostgroup.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_hostgroup.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_iscsi_interface.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_iscsi_interface.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_iscsi_target.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_ldap.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_ldap.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_ldap.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_lun_mapping.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_lun_mapping.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_mgmt_interface.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_snapshot_group.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/storage/netapp/netapp_e_snapshot_group.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_snapshot_group.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_snapshot_group.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_snapshot_group.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/netapp_e_snapshot_images.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_snapshot_images.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_snapshot_images.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_snapshot_images.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/netapp_e_snapshot_volume.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/storage/netapp/netapp_e_snapshot_volume.py validate-modules:doc-default-does-not-match-spec
plugins/modules/storage/netapp/netapp_e_snapshot_volume.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_snapshot_volume.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_storage_system.py validate-modules:doc-default-does-not-match-spec
plugins/modules/storage/netapp/netapp_e_storage_system.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_storage_system.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_storage_system.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_storage_system.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_storage_system.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/netapp_e_storagepool.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_storagepool.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_syslog.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_syslog.py validate-modules:mutually_exclusive-unknown
plugins/modules/storage/netapp/netapp_e_syslog.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_syslog.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_volume.py validate-modules:doc-default-does-not-match-spec
plugins/modules/storage/netapp/netapp_e_volume.py validate-modules:doc-default-incompatible-type
plugins/modules/storage/netapp/netapp_e_volume.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_volume.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_volume.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:doc-default-does-not-match-spec
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:implied-parameter-type-mismatch
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:nonexistent-parameter-documented
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/sf_account_manager.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/sf_account_manager.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/sf_check_connections.py validate-modules:parameter-type-not-in-doc

View file

@ -3183,82 +3183,6 @@ plugins/modules/storage/netapp/sf_volume_manager.py validate-modules:doc-missing
plugins/modules/storage/netapp/sf_volume_manager.py validate-modules:parameter-invalid
plugins/modules/storage/netapp/sf_volume_manager.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/sf_volume_manager.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/netapp_e_alerts.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_alerts.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_amg.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_amg.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_amg.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/netapp_e_amg_role.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_amg_role.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_amg_role.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_amg_role.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/netapp_e_amg_sync.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_amg_sync.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_asup.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_asup.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_auditlog.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_auth.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_auth.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_auth.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_drive_firmware.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_facts.py validate-modules:return-syntax-error
plugins/modules/storage/netapp/netapp_e_flashcache.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/storage/netapp/netapp_e_flashcache.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_flashcache.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_flashcache.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_flashcache.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_flashcache.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/netapp_e_global.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_host.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_host.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_hostgroup.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_hostgroup.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_iscsi_interface.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_iscsi_interface.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_iscsi_target.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_ldap.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_ldap.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_ldap.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_lun_mapping.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_lun_mapping.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_mgmt_interface.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_snapshot_group.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/storage/netapp/netapp_e_snapshot_group.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_snapshot_group.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_snapshot_group.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_snapshot_group.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/netapp_e_snapshot_images.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_snapshot_images.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_snapshot_images.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_snapshot_images.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/netapp_e_snapshot_volume.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/storage/netapp/netapp_e_snapshot_volume.py validate-modules:doc-default-does-not-match-spec
plugins/modules/storage/netapp/netapp_e_snapshot_volume.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_snapshot_volume.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_storage_system.py validate-modules:doc-default-does-not-match-spec
plugins/modules/storage/netapp/netapp_e_storage_system.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_storage_system.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_storage_system.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_storage_system.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_storage_system.py validate-modules:undocumented-parameter
plugins/modules/storage/netapp/netapp_e_storagepool.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_storagepool.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_syslog.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_syslog.py validate-modules:mutually_exclusive-unknown
plugins/modules/storage/netapp/netapp_e_syslog.py validate-modules:parameter-list-no-elements
plugins/modules/storage/netapp/netapp_e_syslog.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_volume.py validate-modules:doc-default-does-not-match-spec
plugins/modules/storage/netapp/netapp_e_volume.py validate-modules:doc-default-incompatible-type
plugins/modules/storage/netapp/netapp_e_volume.py validate-modules:doc-missing-type
plugins/modules/storage/netapp/netapp_e_volume.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_volume.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:doc-choices-do-not-match-spec
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:doc-default-does-not-match-spec
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:doc-required-mismatch
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:implied-parameter-type-mismatch
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:nonexistent-parameter-documented
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:parameter-type-not-in-doc
plugins/modules/storage/netapp/netapp_e_volume_copy.py validate-modules:undocumented-parameter
plugins/modules/storage/purestorage/purefa_facts.py validate-modules:doc-required-mismatch
plugins/modules/storage/purestorage/purefa_facts.py validate-modules:parameter-list-no-elements
plugins/modules/storage/purestorage/purefa_facts.py validate-modules:return-syntax-error

View file

@ -1,183 +0,0 @@
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_alerts import Alerts
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
from ansible_collections.community.general.tests.unit.compat import mock
class AlertsTest(ModuleTestCase):
REQUIRED_PARAMS = {
'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1',
'state': 'disabled'
}
REQ_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_alerts.request'
def _set_args(self, **kwargs):
module_args = self.REQUIRED_PARAMS.copy()
if kwargs is not None:
module_args.update(kwargs)
set_module_args(module_args)
def _validate_args(self, **kwargs):
self._set_args(**kwargs)
Alerts()
def test_validation_disable(self):
"""Ensure a default configuration succeeds"""
self._validate_args()
def test_validation_enable(self):
"""Ensure a typical, default configuration succeeds"""
self._validate_args(state='enabled', server='localhost', sender='x@y.z', recipients=['a@b.c'])
def test_validation_fail_required(self):
"""Ensure we fail on missing configuration"""
# Missing recipients
with self.assertRaises(AnsibleFailJson):
self._validate_args(state='enabled', server='localhost', sender='x@y.z')
Alerts()
# Missing sender
with self.assertRaises(AnsibleFailJson):
self._validate_args(state='enabled', server='localhost', recipients=['a@b.c'])
Alerts()
# Missing server
with self.assertRaises(AnsibleFailJson):
self._validate_args(state='enabled', sender='x@y.z', recipients=['a@b.c'])
def test_validation_fail(self):
# Empty recipients
with self.assertRaises(AnsibleFailJson):
self._validate_args(state='enabled', server='localhost', sender='x@y.z', recipients=[])
# Bad sender
with self.assertRaises(AnsibleFailJson):
self._validate_args(state='enabled', server='localhost', sender='y.z', recipients=['a@b.c'])
def test_get_configuration(self):
"""Validate retrieving the current configuration"""
self._set_args(state='enabled', server='localhost', sender='x@y.z', recipients=['a@b.c'])
expected = 'result'
alerts = Alerts()
# Expecting an update
with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
actual = alerts.get_configuration()
self.assertEqual(expected, actual)
self.assertEqual(req.call_count, 1)
def test_update_configuration(self):
"""Validate updating the configuration"""
initial = dict(alertingEnabled=True,
emailServerAddress='localhost',
sendAdditionalContactInformation=True,
additionalContactInformation='None',
emailSenderAddress='x@y.z',
recipientEmailAddresses=['x@y.z']
)
args = dict(state='enabled', server=initial['emailServerAddress'], sender=initial['emailSenderAddress'],
contact=initial['additionalContactInformation'], recipients=initial['recipientEmailAddresses'])
self._set_args(**args)
alerts = Alerts()
# Ensure when trigger updates when each relevant field is changed
with mock.patch(self.REQ_FUNC, return_value=(200, None)) as req:
with mock.patch.object(alerts, 'get_configuration', return_value=initial):
update = alerts.update_configuration()
self.assertFalse(update)
alerts.sender = 'a@b.c'
update = alerts.update_configuration()
self.assertTrue(update)
self._set_args(**args)
alerts.recipients = ['a@b.c']
update = alerts.update_configuration()
self.assertTrue(update)
self._set_args(**args)
alerts.contact = 'abc'
update = alerts.update_configuration()
self.assertTrue(update)
self._set_args(**args)
alerts.server = 'abc'
update = alerts.update_configuration()
self.assertTrue(update)
def test_send_test_email_check(self):
"""Ensure we handle check_mode correctly"""
self._set_args(test=True)
alerts = Alerts()
alerts.check_mode = True
with mock.patch(self.REQ_FUNC) as req:
with mock.patch.object(alerts, 'update_configuration', return_value=True):
alerts.send_test_email()
self.assertFalse(req.called)
def test_send_test_email(self):
"""Ensure we send a test email if test=True"""
self._set_args(test=True)
alerts = Alerts()
with mock.patch(self.REQ_FUNC, return_value=(200, dict(response='emailSentOK'))) as req:
alerts.send_test_email()
self.assertTrue(req.called)
def test_send_test_email_fail(self):
"""Ensure we fail if the test returned a failure status"""
self._set_args(test=True)
alerts = Alerts()
ret_msg = 'fail'
with self.assertRaisesRegexp(AnsibleFailJson, ret_msg):
with mock.patch(self.REQ_FUNC, return_value=(200, dict(response=ret_msg))) as req:
alerts.send_test_email()
self.assertTrue(req.called)
def test_send_test_email_fail_connection(self):
"""Ensure we fail cleanly if we hit a connection failure"""
self._set_args(test=True)
alerts = Alerts()
with self.assertRaisesRegexp(AnsibleFailJson, r"failed to send"):
with mock.patch(self.REQ_FUNC, side_effect=Exception) as req:
alerts.send_test_email()
self.assertTrue(req.called)
def test_update(self):
# Ensure that when test is enabled and alerting is enabled, we run the test
self._set_args(state='enabled', server='localhost', sender='x@y.z', recipients=['a@b.c'], test=True)
alerts = Alerts()
with self.assertRaisesRegexp(AnsibleExitJson, r"enabled"):
with mock.patch.object(alerts, 'update_configuration', return_value=True):
with mock.patch.object(alerts, 'send_test_email') as test:
alerts.update()
self.assertTrue(test.called)
# Ensure we don't run a test when changed=False
with self.assertRaisesRegexp(AnsibleExitJson, r"enabled"):
with mock.patch.object(alerts, 'update_configuration', return_value=False):
with mock.patch.object(alerts, 'send_test_email') as test:
alerts.update()
self.assertFalse(test.called)
# Ensure that test is not called when we have alerting disabled
self._set_args(state='disabled')
alerts = Alerts()
with self.assertRaisesRegexp(AnsibleExitJson, r"disabled"):
with mock.patch.object(alerts, 'update_configuration', return_value=True):
with mock.patch.object(alerts, 'send_test_email') as test:
alerts.update()
self.assertFalse(test.called)

View file

@ -1,181 +0,0 @@
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
import json
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_asup import Asup
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
from ansible_collections.community.general.tests.unit.compat import mock
class AsupTest(ModuleTestCase):
REQUIRED_PARAMS = {
'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1',
}
REQ_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_asup.request'
def _set_args(self, args=None):
module_args = self.REQUIRED_PARAMS.copy()
if args is not None:
module_args.update(args)
set_module_args(module_args)
def test_get_config_asup_capable_false(self):
"""Ensure we fail correctly if ASUP is not available on this platform"""
self._set_args()
expected = dict(asupCapable=False, onDemandCapable=True)
asup = Asup()
# Expecting an update
with self.assertRaisesRegexp(AnsibleFailJson, r"not supported"):
with mock.patch(self.REQ_FUNC, return_value=(200, expected)):
asup.get_configuration()
def test_get_config_on_demand_capable_false(self):
"""Ensure we fail correctly if ASUP is not available on this platform"""
self._set_args()
expected = dict(asupCapable=True, onDemandCapable=False)
asup = Asup()
# Expecting an update
with self.assertRaisesRegexp(AnsibleFailJson, r"not supported"):
with mock.patch(self.REQ_FUNC, return_value=(200, expected)):
asup.get_configuration()
def test_get_config(self):
"""Validate retrieving the ASUP configuration"""
self._set_args()
expected = dict(asupCapable=True, onDemandCapable=True)
asup = Asup()
with mock.patch(self.REQ_FUNC, return_value=(200, expected)):
config = asup.get_configuration()
self.assertEqual(config, expected)
def test_update_configuration(self):
"""Validate retrieving the ASUP configuration"""
self._set_args(dict(asup='enabled'))
expected = dict()
initial = dict(asupCapable=True,
asupEnabled=True,
onDemandEnabled=False,
remoteDiagsEnabled=False,
schedule=dict(daysOfWeek=[], dailyMinTime=0, weeklyMinTime=0, dailyMaxTime=24, weeklyMaxTime=24))
asup = Asup()
with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
with mock.patch.object(asup, 'get_configuration', return_value=initial):
updated = asup.update_configuration()
self.assertTrue(req.called)
self.assertTrue(updated)
def test_update_configuration_asup_disable(self):
"""Validate retrieving the ASUP configuration"""
self._set_args(dict(asup='disabled'))
expected = dict()
initial = dict(asupCapable=True,
asupEnabled=True,
onDemandEnabled=False,
remoteDiagsEnabled=False,
schedule=dict(daysOfWeek=[], dailyMinTime=0, weeklyMinTime=0, dailyMaxTime=24, weeklyMaxTime=24))
asup = Asup()
with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
with mock.patch.object(asup, 'get_configuration', return_value=initial):
updated = asup.update_configuration()
self.assertTrue(updated)
self.assertTrue(req.called)
# Ensure it was called with the right arguments
called_with = req.call_args
body = json.loads(called_with[1]['data'])
self.assertFalse(body['asupEnabled'])
def test_update_configuration_enable(self):
"""Validate retrieving the ASUP configuration"""
self._set_args(dict(asup='enabled'))
expected = dict()
initial = dict(asupCapable=False,
asupEnabled=False,
onDemandEnabled=False,
remoteDiagsEnabled=False,
schedule=dict(daysOfWeek=[], dailyMinTime=0, weeklyMinTime=0, dailyMaxTime=24, weeklyMaxTime=24))
asup = Asup()
with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
with mock.patch.object(asup, 'get_configuration', return_value=initial):
updated = asup.update_configuration()
self.assertTrue(updated)
self.assertTrue(req.called)
# Ensure it was called with the right arguments
called_with = req.call_args
body = json.loads(called_with[1]['data'])
self.assertTrue(body['asupEnabled'])
self.assertTrue(body['onDemandEnabled'])
self.assertTrue(body['remoteDiagsEnabled'])
def test_update_configuration_request_exception(self):
"""Validate exception handling when request throws an exception."""
config_response = dict(asupEnabled=True,
onDemandEnabled=True,
remoteDiagsEnabled=True,
schedule=dict(daysOfWeek=[],
dailyMinTime=0,
weeklyMinTime=0,
dailyMaxTime=24,
weeklyMaxTime=24))
self._set_args(dict(state="enabled"))
asup = Asup()
with self.assertRaises(Exception):
with mock.patch.object(asup, 'get_configuration', return_value=config_response):
with mock.patch(self.REQ_FUNC, side_effect=Exception):
asup.update_configuration()
def test_init_schedule(self):
"""Validate schedule correct schedule initialization"""
self._set_args(dict(state="enabled", active=True, days=["sunday", "monday", "tuesday"], start=20, end=24))
asup = Asup()
self.assertTrue(asup.asup)
self.assertEqual(asup.days, ["sunday", "monday", "tuesday"]),
self.assertEqual(asup.start, 1200)
self.assertEqual(asup.end, 1439)
def test_init_schedule_invalid(self):
"""Validate updating ASUP with invalid schedule fails test."""
self._set_args(dict(state="enabled", active=True, start=22, end=20))
with self.assertRaisesRegexp(AnsibleFailJson, r"start time is invalid"):
Asup()
def test_init_schedule_days_invalid(self):
"""Validate updating ASUP with invalid schedule fails test."""
self._set_args(dict(state="enabled", active=True, days=["someday", "thataday", "nonday"]))
with self.assertRaises(AnsibleFailJson):
Asup()
def test_update(self):
"""Validate updating ASUP with valid schedule passes"""
initial = dict(asupCapable=True,
onDemandCapable=True,
asupEnabled=True,
onDemandEnabled=False,
remoteDiagsEnabled=False,
schedule=dict(daysOfWeek=[], dailyMinTime=0, weeklyMinTime=0, dailyMaxTime=24, weeklyMaxTime=24))
self._set_args(dict(state="enabled", active=True, days=["sunday", "monday", "tuesday"], start=10, end=20))
asup = Asup()
with self.assertRaisesRegexp(AnsibleExitJson, r"ASUP settings have been updated"):
with mock.patch(self.REQ_FUNC, return_value=(200, dict(asupCapable=True))):
with mock.patch.object(asup, "get_configuration", return_value=initial):
asup.update()

View file

@ -1,234 +0,0 @@
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_auditlog import AuditLog
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
from ansible_collections.community.general.tests.unit.compat import mock
class AuditLogTests(ModuleTestCase):
REQUIRED_PARAMS = {'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1'}
REQ_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_auditlog.request'
MAX_RECORDS_MAXIMUM = 50000
MAX_RECORDS_MINIMUM = 100
def _set_args(self, **kwargs):
module_args = self.REQUIRED_PARAMS.copy()
if kwargs is not None:
module_args.update(kwargs)
set_module_args(module_args)
def test_max_records_argument_pass(self):
"""Verify AuditLog arument's max_records and threshold upper and lower boundaries."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
max_records_set = (self.MAX_RECORDS_MINIMUM, 25000, self.MAX_RECORDS_MAXIMUM)
for max_records in max_records_set:
initial["max_records"] = max_records
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": False})):
audit_log = AuditLog()
self.assertTrue(audit_log.max_records == max_records)
def test_max_records_argument_fail(self):
"""Verify AuditLog arument's max_records and threshold upper and lower boundaries."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
max_records_set = (self.MAX_RECORDS_MINIMUM - 1, self.MAX_RECORDS_MAXIMUM + 1)
for max_records in max_records_set:
with self.assertRaisesRegexp(AnsibleFailJson, r"Audit-log max_records count must be between 100 and 50000"):
initial["max_records"] = max_records
self._set_args(**initial)
AuditLog()
def test_threshold_argument_pass(self):
"""Verify AuditLog arument's max_records and threshold upper and lower boundaries."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
threshold_set = (60, 75, 90)
for threshold in threshold_set:
initial["threshold"] = threshold
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": False})):
audit_log = AuditLog()
self.assertTrue(audit_log.threshold == threshold)
def test_threshold_argument_fail(self):
"""Verify AuditLog arument's max_records and threshold upper and lower boundaries."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
threshold_set = (59, 91)
for threshold in threshold_set:
with self.assertRaisesRegexp(AnsibleFailJson, r"Audit-log percent threshold must be between 60 and 90"):
initial["threshold"] = threshold
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": False})):
AuditLog()
def test_is_proxy_pass(self):
"""Verify that True is returned when proxy is used to communicate with storage."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90,
"api_url": "https://10.1.1.10/devmgr/v2"}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
self.assertTrue(audit_log.is_proxy())
def test_is_proxy_fail(self):
"""Verify that AnsibleJsonFail exception is thrown when exception occurs."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve the webservices about information"):
with mock.patch(self.REQ_FUNC, return_value=Exception()):
audit_log.is_proxy()
def test_get_configuration_pass(self):
"""Validate get configuration does not throw exception when normal request is returned."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
expected = {"auditLogMaxRecords": 1000,
"auditLogLevel": "writeOnly",
"auditLogFullPolicy": "overWrite",
"auditLogWarningThresholdPct": 90}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with mock.patch(self.REQ_FUNC, return_value=(200, expected)):
body = audit_log.get_configuration()
self.assertTrue(body == expected)
def test_get_configuration_fail(self):
"""Verify AnsibleJsonFail exception is thrown."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve the audit-log configuration!"):
with mock.patch(self.REQ_FUNC, return_value=Exception()):
audit_log.get_configuration()
def test_build_configuration_pass(self):
"""Validate configuration changes will force an update."""
response = {"auditLogMaxRecords": 1000,
"auditLogLevel": "writeOnly",
"auditLogFullPolicy": "overWrite",
"auditLogWarningThresholdPct": 90}
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
changes = [{"max_records": 50000},
{"log_level": "all"},
{"full_policy": "preventSystemAccess"},
{"threshold": 75}]
for change in changes:
initial_with_changes = initial.copy()
initial_with_changes.update(change)
self._set_args(**initial_with_changes)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with mock.patch(self.REQ_FUNC, return_value=(200, response)):
update = audit_log.build_configuration()
self.assertTrue(update)
def test_delete_log_messages_fail(self):
"""Verify AnsibleJsonFail exception is thrown."""
initial = {"max_records": 1000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to delete audit-log messages!"):
with mock.patch(self.REQ_FUNC, return_value=Exception()):
audit_log.delete_log_messages()
def test_update_configuration_delete_pass(self):
"""Verify 422 and force successfully returns True."""
body = {"auditLogMaxRecords": 1000,
"auditLogLevel": "writeOnly",
"auditLogFullPolicy": "overWrite",
"auditLogWarningThresholdPct": 90}
initial = {"max_records": 2000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90,
"force": True}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with mock.patch(self.REQ_FUNC, side_effect=[(200, body),
(422, {u"invalidFieldsIfKnown": None,
u"errorMessage": u"Configuration change...",
u"localizedMessage": u"Configuration change...",
u"retcode": u"auditLogImmediateFullCondition",
u"codeType": u"devicemgrerror"}),
(200, None),
(200, None)]):
self.assertTrue(audit_log.update_configuration())
def test_update_configuration_delete_skip_fail(self):
"""Verify 422 and no force results in AnsibleJsonFail exception."""
body = {"auditLogMaxRecords": 1000,
"auditLogLevel": "writeOnly",
"auditLogFullPolicy": "overWrite",
"auditLogWarningThresholdPct": 90}
initial = {"max_records": 2000,
"log_level": "writeOnly",
"full_policy": "overWrite",
"threshold": 90,
"force": False}
self._set_args(**initial)
with mock.patch(self.REQ_FUNC, return_value=(200, {"runningAsProxy": True})):
audit_log = AuditLog()
with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to update audit-log configuration!"):
with mock.patch(self.REQ_FUNC, side_effect=[(200, body), Exception(422, {"errorMessage": "error"}),
(200, None), (200, None)]):
audit_log.update_configuration()

View file

@ -1,216 +0,0 @@
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
try:
from unittest import mock
except ImportError:
import mock
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_drive_firmware import NetAppESeriesDriveFirmware
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
class HostTest(ModuleTestCase):
REQUIRED_PARAMS = {"api_username": "rw",
"api_password": "password",
"api_url": "http://localhost",
"ssid": "1"}
REQUEST_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_drive_firmware.NetAppESeriesDriveFirmware.request'
CREATE_MULTIPART_FORMDATA_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_drive_firmware.create_multipart_formdata'
SLEEP_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_drive_firmware.sleep'
UPGRADE_LIST_RESPONSE = ({"filename": "test_drive_firmware_1",
"driveRefList": ["010000005000C5007EDE4ECF0000000000000000",
"010000005000C5007EDF9AAB0000000000000000",
"010000005000C5007EDBE3C70000000000000000"]},
{"filename": "test_drive_firmware_2",
"driveRefList": ["010000005000C5007EDE4ECF0000000000000001",
"010000005000C5007EDF9AAB0000000000000001",
"010000005000C5007EDBE3C70000000000000001"]})
FIRMWARE_DRIVES_RESPONSE = {"compatibilities": [
{"filename": "test_drive_firmware_1",
"firmwareVersion": "MS02",
"supportedFirmwareVersions": ["MSB6", "MSB8", "MS00", "MS02"],
"compatibleDrives": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "onlineUpgradeCapable": True},
{"driveRef": "010000005000C5007EDF9AAB0000000000000000", "onlineUpgradeCapable": True},
{"driveRef": "010000005000C5007EDBE3C70000000000000000", "onlineUpgradeCapable": True}]},
{"filename": "test_drive_firmware_2",
"firmwareVersion": "MS01",
"supportedFirmwareVersions": ["MSB8", "MS00", "MS01"],
"compatibleDrives": [{"driveRef": "010000005000C5007EDE4ECF0000000000000001", "onlineUpgradeCapable": True},
{"driveRef": "010000005000C5007EDF9AAB0000000000000001", "onlineUpgradeCapable": False},
{"driveRef": "010000005000C5007EDBE3C70000000000000001", "onlineUpgradeCapable": True}]}]}
def _set_args(self, args):
module_args = self.REQUIRED_PARAMS.copy()
module_args.update(args)
set_module_args(module_args)
def test_upload_firmware(self):
"""Verify exception is thrown"""
self._set_args({"firmware": ["path_to_test_drive_firmware_1", "path_to_test_drive_firmware_2"]})
firmware_object = NetAppESeriesDriveFirmware()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to upload drive firmware"):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
with mock.patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
firmware_object.upload_firmware()
def test_upgrade_list_pass(self):
"""Verify upgrade_list method pass"""
side_effects = [(200, self.FIRMWARE_DRIVES_RESPONSE),
(200, {"offline": False, "available": True, "firmwareVersion": "MS00"}),
(200, {"offline": False, "available": True, "firmwareVersion": "MS01"}),
(200, {"offline": False, "available": True, "firmwareVersion": "MS02"})]
self._set_args({"firmware": ["path/to/test_drive_firmware_1"]})
firmware_object = NetAppESeriesDriveFirmware()
with mock.patch(self.REQUEST_FUNC, side_effect=side_effects):
self.assertEqual(firmware_object.upgrade_list(), [{"driveRefList": ["010000005000C5007EDE4ECF0000000000000000",
"010000005000C5007EDF9AAB0000000000000000"],
"filename": "test_drive_firmware_1"}])
side_effects = [(200, self.FIRMWARE_DRIVES_RESPONSE),
(200, {"offline": False, "available": True, "firmwareVersion": "MS02"}),
(200, {"offline": False, "available": True, "firmwareVersion": "MS02"}),
(200, {"offline": False, "available": True, "firmwareVersion": "MS02"})]
self._set_args({"firmware": ["path/to/test_drive_firmware_1"]})
firmware_object = NetAppESeriesDriveFirmware()
with mock.patch(self.REQUEST_FUNC, side_effect=side_effects):
self.assertEqual(firmware_object.upgrade_list(), [])
def test_upgrade_list_fail(self):
"""Verify upgrade_list method throws expected exceptions."""
self._set_args({"firmware": ["path_to_test_drive_firmware_1"]})
firmware_object = NetAppESeriesDriveFirmware()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to complete compatibility and health check."):
with mock.patch(self.REQUEST_FUNC, response=Exception()):
firmware_object.upgrade_list()
side_effects = [(200, self.FIRMWARE_DRIVES_RESPONSE),
(200, {"offline": False, "available": True, "firmwareVersion": "MS01"}),
(200, {"offline": False, "available": True, "firmwareVersion": "MS00"}),
Exception()]
self._set_args({"firmware": ["path/to/test_drive_firmware_1"]})
firmware_object = NetAppESeriesDriveFirmware()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve drive information."):
with mock.patch(self.REQUEST_FUNC, side_effect=side_effects):
firmware_object.upgrade_list()
side_effects = [(200, self.FIRMWARE_DRIVES_RESPONSE),
(200, {"offline": False, "available": True, "firmwareVersion": "MS01"}),
(200, {"offline": False, "available": True, "firmwareVersion": "MS00"}),
(200, {"offline": False, "available": True, "firmwareVersion": "MS00"})]
self._set_args({"firmware": ["path/to/test_drive_firmware_2"], "upgrade_drives_online": True})
firmware_object = NetAppESeriesDriveFirmware()
with self.assertRaisesRegexp(AnsibleFailJson, "Drive is not capable of online upgrade."):
with mock.patch(self.REQUEST_FUNC, side_effect=side_effects):
firmware_object.upgrade_list()
def test_wait_for_upgrade_completion_pass(self):
"""Verify function waits for okay status."""
self._set_args({"firmware": ["path/to/test_drive_firmware_1", "path/to/test_drive_firmware_2"], "wait_for_completion": True})
firmware_object = NetAppESeriesDriveFirmware()
firmware_object.upgrade_drives_online = True
firmware_object.upgrade_list = lambda: self.UPGRADE_LIST_RESPONSE
with mock.patch(self.SLEEP_FUNC, return_value=None):
with mock.patch(self.REQUEST_FUNC, side_effect=[
(200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "inProgress"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "okay"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
{"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]}),
(200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "inProgressRecon"},
{"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "okay"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
{"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]}),
(200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "pending"},
{"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "okay"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
{"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]}),
(200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "notAttempted"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
{"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]}),
(200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "okay"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
{"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]})]):
firmware_object.wait_for_upgrade_completion()
def test_wait_for_upgrade_completion_fail(self):
"""Verify wait for upgrade completion exceptions."""
self._set_args({"firmware": ["path/to/test_drive_firmware_1", "path/to/test_drive_firmware_2"], "wait_for_completion": True})
firmware_object = NetAppESeriesDriveFirmware()
firmware_object.upgrade_drives_online = True
firmware_object.upgrade_list = lambda: self.UPGRADE_LIST_RESPONSE
firmware_object.WAIT_TIMEOUT_SEC = 5
response = (200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "inProgress"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "inProgressRecon"},
{"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "pending"},
{"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "notAttempted"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
{"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]})
with self.assertRaisesRegexp(AnsibleFailJson, "Timed out waiting for drive firmware upgrade."):
with mock.patch(self.SLEEP_FUNC, return_value=None):
with mock.patch(self.REQUEST_FUNC, return_value=response):
firmware_object.wait_for_upgrade_completion()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve drive status."):
with mock.patch(self.SLEEP_FUNC, return_value=None):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
firmware_object.wait_for_upgrade_completion()
response = (200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "_UNDEFINED"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "inProgressRecon"},
{"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "pending"},
{"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "notAttempted"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
{"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]})
with self.assertRaisesRegexp(AnsibleFailJson, "Drive firmware upgrade failed."):
with mock.patch(self.SLEEP_FUNC, return_value=None):
with mock.patch(self.REQUEST_FUNC, return_value=response):
firmware_object.wait_for_upgrade_completion()
def test_upgrade_pass(self):
"""Verify upgrade upgrade in progress variable properly reports."""
self._set_args({"firmware": ["path/to/test_drive_firmware_1", "path/to/test_drive_firmware_2"], "wait_for_completion": False})
firmware_object = NetAppESeriesDriveFirmware()
firmware_object.upgrade_drives_online = True
firmware_object.upgrade_list = lambda: {}
with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
firmware_object.upgrade()
self.assertTrue(firmware_object.upgrade_in_progress)
self._set_args({"firmware": ["path_to_test_drive_firmware_1", "path_to_test_drive_firmware_2"], "wait_for_completion": True})
firmware_object = NetAppESeriesDriveFirmware()
firmware_object.upgrade_drives_online = True
firmware_object.upgrade_list = lambda: self.UPGRADE_LIST_RESPONSE
with mock.patch(self.REQUEST_FUNC, side_effect=[(200, {}),
(200, {"driveStatus": [{"driveRef": "010000005000C5007EDE4ECF0000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDBE3C70000000000000000", "status": "okay"},
{"driveRef": "010000005000C5007EDE4ECF0000000000000001", "status": "okay"},
{"driveRef": "010000005000C5007EDF9AAB0000000000000001", "status": "okay"},
{"driveRef": "010000005000C5007EDBE3C70000000000000001", "status": "okay"}]})]):
firmware_object.upgrade()
self.assertFalse(firmware_object.upgrade_in_progress)
def test_upgrade_fail(self):
"""Verify upgrade method exceptions."""
self._set_args({"firmware": ["path_to_test_drive_firmware_1", "path_to_test_drive_firmware_2"]})
firmware_object = NetAppESeriesDriveFirmware()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to upgrade drive firmware."):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
firmware_object.upgrade()

View file

@ -1,455 +0,0 @@
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_facts import Facts
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleFailJson, ModuleTestCase, set_module_args
from ansible_collections.community.general.tests.unit.compat import mock
class FactsTest(ModuleTestCase):
REQUIRED_PARAMS = {
'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1'
}
REQUEST_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_facts.Facts.request'
GET_CONTROLLERS_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_facts.Facts.get_controllers'
WORKLOAD_RESPONSE = [{"id": "4200000001000000000000000000000000000000", "name": "beegfs_metadata",
"workloadAttributes": [{"key": "profileId", "value": "ansible_workload_1"}]},
{"id": "4200000002000000000000000000000000000000", "name": "other_workload_1",
"workloadAttributes": [{"key": "profileId", "value": "Other_1"}]}]
GRAPH_RESPONSE = {
"sa": {"saData": {"storageArrayLabel": "ictm0718s01c1", "saId": {"worldWideName": "600A098000A4B28D000000005CF10481"}, "fwVersion": "08.42.30.05",
"chassisSerialNumber": "021633035190"},
"featureParameters": {"cacheBlockSizes": [4096, 8192, 16384, 32768],
"supportedSegSizes": [32768, 65536, 131072, 262144, 524288, 495616, 655360, 1982464]},
"capabilities": ["autoCodeSync", "autoLunTransfer", "subLunsAllowed", "stagedDownload", "mixedDriveTypes", "bundleMigration", "raid6",
"performanceTier", "secureVolume", "protectionInformation", "ssdSupport", "driveSlotLimit", "flashReadCache",
"storagePoolsType2", "totalNumberOfArvmMirrorsPerArray", "totalNumberOfPitsPerArray", "totalNumberOfThinVolumesPerArray"],
"premiumFeatures": [],
"hostSpecificVals": [{"hostType": "FactoryDefault", "index": 0}, {"hostType": "W2KNETNCL", "index": 1}, {"hostPortType": "W2KNETCL", "index": 8},
{"hostType": "LnxTPGSALUA_SF", "index": 27}, {"hostType": "LnxDHALUA", "index": 28}]}, "controller": [
{"active": True, "quiesced": False, "status": "optimal", "controllerRef": "070000000000000000000001",
"physicalLocation": {"trayRef": "0E00000000000000000000000000000000000000", "slot": 1,
"locationParent": {"refType": "generic", "controllerRef": None, "symbolRef": "0000000000000000000000000000000000000000",
"typedReference": None}, "locationPosition": 1, "label": "A"}, "manufacturer": "NETAPP ",
"manufacturerDate": "1474675200", "appVersion": "08.42.30.05", "bootVersion": "08.42.30.05", "productID": "INF-01-00 ",
"productRevLevel": "0842", "serialNumber": "021619039162 ", "boardID": "2806", "cacheMemorySize": 3328, "processorMemorySize": 1278,
"hostInterfaces": [{"interfaceType": "iscsi", "fibre": None, "ib": None,
"iscsi": {"channel": 1, "channelPortRef": "1F00010001010000000000000000000000000000", "tcpListenPort": 3260,
"ipv4Enabled": True, "ipv4Data": {"ipv4Address": "0.0.0.0", "ipv4AddressConfigMethod": "configStatic",
"ipv4OutboundPacketPriority": {"isEnabled": False, "value": 1},
"ipv4VlanId": {"isEnabled": False, "value": 1},
"ipv4AddressData": {"configState": "configured", "ipv4Address": "10.10.11.110",
"ipv4SubnetMask": "255.255.255.0",
"ipv4GatewayAddress": "0.0.0.0"}},
"interfaceData": {"type": "ethernet", "ethernetData": {
"partData": {"vendorName": "QLogic Corporation", "partNumber": "83xx", "revisionNumber": "5.5.31.511",
"serialNumber": "00a098a4b28f"}, "macAddress": "00A098A4B293", "fullDuplex": True,
"maximumFramePayloadSize": 9000, "currentInterfaceSpeed": "speed10gig", "maximumInterfaceSpeed": "speed10gig",
"linkStatus": "up", "supportedInterfaceSpeeds": ["speed1gig", "speed10gig"], "autoconfigSupport": False,
"copperCableDiagnosticsSupport": False}, "infinibandData": None},
"interfaceRef": "2201020000000000000000000000000000000000", "ipv6Enabled": True,
"ipv6Data": {"ipv6LocalAddresses": [
{"address": "FE8000000000000002A098FFFEA4B293",
"addressState": {"addressType": "typeInterface", "interfaceAddressState": "configured",
"routerAddressState": "__UNDEFINED"}}], "ipv6RoutableAddresses": [
{"address": "00000000000000000000000000000000",
"addressState": {"addressType": "typeInterface", "interfaceAddressState": "unconfigured",
"routerAddressState": "__UNDEFINED"}},
{"address": "00000000000000000000000000000000",
"addressState": {"addressType": "typeInterface", "interfaceAddressState": "unconfigured",
"routerAddressState": "__UNDEFINED"}}],
"ipv6PortRouterAddress": {"address": "00000000000000000000000000000000",
"addressState": {"addressType": "typeRouter", "interfaceAddressState": "__UNDEFINED",
"routerAddressState": "unknown"}},
"ipv6AddressConfigMethod": "configStateless", "ipv6OutboundPacketPriority": {"isEnabled": False, "value": 1},
"ipv6VlanId": {"isEnabled": False, "value": 1}, "ipv6HopLimit": 64, "ipv6NdReachableTime": 30000,
"ipv6NdRetransmitTime": 1000, "ipv6NdStaleTimeout": 30000, "ipv6DuplicateAddressDetectionAttempts": 1},
"physicalLocation": {"trayRef": "0000000000000000000000000000000000000000", "slot": 0,
"locationParent": {"refType": "generic", "controllerRef": None,
"symbolRef": "0000000000000000000000000000000000000000",
"typedReference": None}, "locationPosition": 0, "label": ""},
"protectionInformationCapable": True, "isIPv6Capable": True, "oneWayMaxRate": "1230000000",
"bidirectionalMaxRate": "2120000000", "iqn": "iqn.1992-08.com.netapp:2806.600a098000a4b28d000000005cf10481",
"controllerId": "070000000000000000000001",
"addressId": "iqn.1992-08.com.netapp:2806.600a098000a4b28d000000005cf10481",
"niceAddressId": "iqn.1992-08.com.netapp:2806.600a098000a4b28d000000005cf10481",
"interfaceId": "2201020000000000000000000000000000000000", "id": "2201020000000000000000000000000000000000"},
"sas": None, "sata": None, "scsi": None}],
"driveInterfaces": [
{"interfaceType": "sas", "fibre": None, "ib": None, "iscsi": None,
"sas": {"channel": 1, "currentInterfaceSpeed": "speed12gig", "maximumInterfaceSpeed": "speed12gig", "part": "LSISAS3008",
"revision": 172688896, "isDegraded": False,
"iocPort": {
"parent": {"type": "controller", "controller": "070000000000000000000001", "drive": None, "expander": None, "hostBoardRef": None},
"attachedDevice": {"channel": 1, "channelType": "driveside",
"sasAttachedDeviceData": {"type": "expander", "alternateController": None, "drive": None,
"expander": "2000000000000000000000630001000000000000",
"remoteHostPortAddress": None,
"localController": None, "physicalLocation": None}}, "state": "optimal",
"miswireType": "None", "channelPortRef": "1F01000001010000000000000000000000000000",
"sasPhys": [{"phyIdentifier": 4, "isOperational": True}, {"phyIdentifier": 5, "isOperational": True},
{"phyIdentifier": 6, "isOperational": True}, {"phyIdentifier": 7, "isOperational": True}],
"portTypeData": {"portType": "endDevice", "portIdentifier": "500A098A4B28D004", "routingType": "__UNDEFINED"},
"portMode": "internal",
"domainNumber": 1, "attachedChannelPortRef": "0000000000000000000000000000000000000000", "discoveryStatus": 0},
"interfaceRef": "2201000000000000000000000000000000000000",
"physicalLocation": {"trayRef": "0000000000000000000000000000000000000000", "slot": 0,
"locationParent": {"refType": "generic", "controllerRef": None,
"symbolRef": "0000000000000000000000000000000000000000", "typedReference": None},
"locationPosition": 0, "label": ""}, "protectionInformationCapable": True, "oneWayMaxRate": "4400000000",
"bidirectionalMaxRate": "8400000000", "controllerId": None, "addressId": "500A098A4B28D004", "niceAddressId": "500A098A4B28D004",
"interfaceId": "2201000000000000000000000000000000000000", "basePortAddress": "500A098A4B28D00",
"id": "2201000000000000000000000000000000000000"}, "sata": None, "scsi": None}],
"netInterfaces": [{"interfaceType": "ethernet",
"ethernet": {"interfaceName": "wan0", "channel": 1, "speed": 1000, "ip": 175178176, "alias": "ictm0718s01c1-a",
"macAddr": "00A098A4B28D", "gatewayIp": 175177985, "subnetMask": -256, "bootpUsed": False, "rloginEnabled": True,
"reserved1": "0000000000000000", "setupError": False, "reserved2": "",
"interfaceRef": "2800070000000000000000000001000000000000", "linkStatus": "up", "ipv4Enabled": True,
"ipv4Address": "10.113.1.192", "ipv4SubnetMask": "255.255.255.0", "ipv4AddressConfigMethod": "configStatic",
"ipv6Enabled": False, "ipv6LocalAddress": {"address": "00000000000000000000000000000000",
"addressState": {"addressType": "typeInterface",
"interfaceAddressState": "configured",
"routerAddressState": "__UNDEFINED"}},
"ipv6PortStaticRoutableAddress": {"address": "00000000000000000000000000000000",
"addressState": {"addressType": "typeInterface",
"interfaceAddressState": "__UNDEFINED",
"routerAddressState": "__UNDEFINED"}},
"ipv6PortRoutableAddresses": [], "ipv6AddressConfigMethod": "configStatic", "fullDuplex": True,
"supportedSpeedSettings": ["speedAutoNegotiated", "speed10MbitHalfDuplex", "speed10MbitFullDuplex",
"speed100MbitHalfDuplex", "speed100MbitFullDuplex", "speed1000MbitFullDuplex"],
"configuredSpeedSetting": "speedAutoNegotiated", "currentSpeed": "speed1gig",
"physicalLocation": {"trayRef": "0E00000000000000000000000000000000000000", "slot": 0,
"locationParent": {"refType": "controller", "controllerRef": "070000000000000000000001",
"symbolRef": None, "typedReference": None}, "locationPosition": 1,
"label": "P1"}, "ipv4GatewayAddress": "10.113.1.1",
"controllerRef": "070000000000000000000001", "controllerSlot": 1,
"dnsProperties": {
"acquisitionProperties": {"dnsAcquisitionType": "stat",
"dnsServers": [
{"addressType": "ipv4", "ipv4Address": "10.193.0.250", "ipv6Address": None},
{"addressType": "ipv4", "ipv4Address": "10.192.0.250", "ipv6Address": None}]},
"dhcpAcquiredDnsServers": []},
"ntpProperties": {
"acquisitionProperties": {"ntpAcquisitionType": "stat", "ntpServers": [
{"addrType": "ipvx", "domainName": None,
"ipvxAddress": {"addressType": "ipv4", "ipv4Address": "216.239.35.0", "ipv6Address": None}},
{"addrType": "ipvx", "domainName": None,
"ipvxAddress": {"addressType": "ipv4", "ipv4Address": "216.239.35.4", "ipv6Address": None}}]},
"dhcpAcquiredNtpServers": []},
"id": "2800070000000000000000000001000000000000"}}],
"inventory": [], "reserved1": "000000000000000000000000", "reserved2": "", "hostBoardID": "None", "physicalCacheMemorySize": 4864,
"readyToRemove": False, "boardSubmodelID": "319", "submodelSupported": True, "oemPartNumber": "E2800A-8GB", "partNumber": "111-02829+C0 ",
"rtrAttributes": {"cruType": "dedicated", "parentCru": None, "rtrAttributeData": {"hasReadyToRemoveIndicator": False, "readyToRemove": False}},
"bootTime": "1563988406", "modelName": "2806",
"networkSettings": {"ipv4DefaultRouterAddress": "10.113.1.1",
"ipv6DefaultRouterAddress": {"address": "00000000000000000000000000000000",
"addressState": {"addressType": "typeInterface",
"interfaceAddressState": "__UNDEFINED", "routerAddressState": "__UNDEFINED"}},
"ipv6CandidateDefaultRouterAddresses": [],
"remoteAccessEnabled": True,
"dnsProperties": {"acquisitionProperties": {"dnsAcquisitionType": "stat",
"dnsServers": [
{"addressType": "ipv4", "ipv4Address": "10.193.0.250", "ipv6Address": None},
{"addressType": "ipv4", "ipv4Address": "10.192.0.250", "ipv6Address": None}]},
"dhcpAcquiredDnsServers": []},
"ntpProperties": {
"acquisitionProperties": {
"ntpAcquisitionType": "stat", "ntpServers": [
{"addrType": "ipvx", "domainName": None,
"ipvxAddress": {"addressType": "ipv4", "ipv4Address": "216.239.35.0", "ipv6Address": None}},
{"addrType": "ipvx", "domainName": None,
"ipvxAddress": {"addressType": "ipv4", "ipv4Address": "216.239.35.4", "ipv6Address": None}}]},
"dhcpAcquiredNtpServers": []}},
"repairPolicy": {"removalData": {"removalMethod": "__UNDEFINED", "rtrAttributes": None}, "replacementMethod": "__UNDEFINED"},
"flashCacheMemorySize": 419430400, "ctrlIocDumpData": {"iocDumpNeedsRetrieved": False, "iocDumpTag": 0, "timeStamp": "0"},
"locateInProgress": False, "hasTrayIdentityIndicator": False, "controllerErrorMode": "notInErrorMode",
"codeVersions": [{"codeModule": "raid", "versionString": "08.42.30.05"}, {"codeModule": "hypervisor", "versionString": "08.42.30.05"},
{"codeModule": "management", "versionString": "11.42.0000.0026"}, {"codeModule": "iom", "versionString": "11.42.0G00.0001"},
{"codeModule": "bundle", "versionString": "08.42.30.05"}, {"codeModule": "bundleDisplay", "versionString": "11.40.3R2"}],
"id": "070000000000000000000001"}],
"drive": [{"offline": False, "hotSpare": False, "invalidDriveData": False, "available": True, "pfa": False,
"driveRef": "0100000050000396AC882ED10000000000000000", "status": "optimal", "cause": "None",
"interfaceType": {"driveType": "sas", "fibre": None,
"sas": {"deviceName": "50000396AC882ED1",
"drivePortAddresses": [{"channel": 2, "portIdentifier": "50000396AC882ED3"},
{"channel": 1, "portIdentifier": "50000396AC882ED2"}]},
"scsi": None},
"physicalLocation": {"trayRef": "0E00000000000000000000000000000000000000", "slot": 6,
"locationParent": {"refType": "genericTyped", "controllerRef": None, "symbolRef": None,
"typedReference": {"componentType": "tray",
"symbolRef": "0E00000000000000000000000000000000000000"}},
"locationPosition": 6, "label": "5"}, "manufacturer": "TOSHIBA ",
"manufacturerDate": "1447200000", "productID": "PX04SVQ160 ", "serialNumber": "Y530A001T5MD", "softwareVersion": "MSB6", "blkSize": 512,
"usableCapacity": "1599784443904", "rawCapacity": "1600321314816", "worldWideName": "50000396AC882ED10000000000000000",
"currentVolumeGroupRef": "0000000000000000000000000000000000000000", "sparedForDriveRef": "0000000000000000000000000000000000000000",
"mirrorDrive": "0000000000000000000000000000000000000000", "nonRedundantAccess": False, "workingChannel": -1, "volumeGroupIndex": -1,
"currentSpeed": "speed12gig", "maxSpeed": "speed12gig", "uncertified": False, "hasDegradedChannel": False, "degradedChannels": [],
"phyDriveType": "sas", "spindleSpeed": 0, "rtrAttributes": {"cruType": "dedicated", "parentCru": None,
"rtrAttributeData": {"hasReadyToRemoveIndicator": False,
"readyToRemove": False}}, "reserved": "",
"phyDriveTypeData": {"phyDriveType": "sas", "sataDriveAttributes": None}, "pfaReason": "None", "bypassSource": [],
"repairPolicy": {"removalData": {"removalMethod": "self", "rtrAttributes": {"hasReadyToRemoveIndicator": False, "readyToRemove": False}},
"replacementMethod": "self"}, "fdeCapable": True, "fdeEnabled": False, "fdeLocked": False,
"lockKeyID": "0000000000000000000000000000000000000000",
"ssdWearLife": {"averageEraseCountPercent": 18, "spareBlocksRemainingPercent": 91, "isWearLifeMonitoringSupported": True,
"percentEnduranceUsed": 18}, "driveMediaType": "ssd", "fpgaVersion": "",
"protectionInformationCapabilities": {"protectionInformationCapable": True, "protectionType": "type2Protection"},
"protectionInformationCapable": False, "protectionType": "type0Protection", "interposerPresent": False,
"interposerRef": "0000000000000000000000000000000000000000", "currentCommandAgingTimeout": 6, "defaultCommandAgingTimeout": 6,
"driveTemperature": {"currentTemp": 25, "refTemp": 64}, "blkSizePhysical": 4096, "lowestAlignedLBA": "0", "removed": False,
"locateInProgress": False, "fipsCapable": False, "firmwareVersion": "MSB6", "lockKeyIDValue": None,
"id": "0100000050000396AC882ED10000000000000000"},
{"offline": False, "hotSpare": False, "invalidDriveData": False, "available": True, "pfa": False,
"driveRef": "0100000050000396AC882EDD0000000000000000", "status": "optimal", "cause": "None",
"interfaceType": {"driveType": "sas", "fibre": None,
"sas": {"deviceName": "50000396AC882EDD",
"drivePortAddresses": [{"channel": 2, "portIdentifier": "50000396AC882EDF"},
{"channel": 1, "portIdentifier": "50000396AC882EDE"}]},
"scsi": None},
"physicalLocation": {"trayRef": "0E00000000000000000000000000000000000000", "slot": 8,
"locationParent": {"refType": "genericTyped", "controllerRef": None, "symbolRef": None,
"typedReference": {"componentType": "tray",
"symbolRef": "0E00000000000000000000000000000000000000"}},
"locationPosition": 8, "label": "7"}, "manufacturer": "TOSHIBA ",
"manufacturerDate": "1447200000", "productID": "PX04SVQ160 ", "serialNumber": "Y530A004T5MD", "softwareVersion": "MSB6", "blkSize": 512,
"usableCapacity": "1599784443904", "rawCapacity": "1600321314816", "worldWideName": "50000396AC882EDD0000000000000000",
"currentVolumeGroupRef": "0000000000000000000000000000000000000000", "sparedForDriveRef": "0000000000000000000000000000000000000000",
"mirrorDrive": "0000000000000000000000000000000000000000", "nonRedundantAccess": False, "workingChannel": -1, "volumeGroupIndex": -1,
"currentSpeed": "speed12gig", "maxSpeed": "speed12gig", "uncertified": False, "hasDegradedChannel": False, "degradedChannels": [],
"phyDriveType": "sas", "spindleSpeed": 0, "rtrAttributes": {"cruType": "dedicated", "parentCru": None,
"rtrAttributeData": {"hasReadyToRemoveIndicator": False,
"readyToRemove": False}}, "reserved": "",
"phyDriveTypeData": {"phyDriveType": "sas", "sataDriveAttributes": None}, "pfaReason": "None", "bypassSource": [],
"repairPolicy": {"removalData": {"removalMethod": "self", "rtrAttributes": {"hasReadyToRemoveIndicator": False, "readyToRemove": False}},
"replacementMethod": "self"}, "fdeCapable": True, "fdeEnabled": False, "fdeLocked": False,
"lockKeyID": "0000000000000000000000000000000000000000",
"ssdWearLife": {"averageEraseCountPercent": 18, "spareBlocksRemainingPercent": 91, "isWearLifeMonitoringSupported": True,
"percentEnduranceUsed": 18}, "driveMediaType": "ssd", "fpgaVersion": "",
"protectionInformationCapabilities": {"protectionInformationCapable": True, "protectionType": "type2Protection"},
"protectionInformationCapable": False, "protectionType": "type0Protection", "interposerPresent": False,
"interposerRef": "0000000000000000000000000000000000000000", "currentCommandAgingTimeout": 6, "defaultCommandAgingTimeout": 6,
"driveTemperature": {"currentTemp": 25, "refTemp": 64}, "blkSizePhysical": 4096, "lowestAlignedLBA": "0", "removed": False,
"locateInProgress": False, "fipsCapable": False, "firmwareVersion": "MSB6", "lockKeyIDValue": None,
"id": "0100000050000396AC882EDD0000000000000000"}],
"volumeGroup": [
{"sequenceNum": 1, "offline": False, "raidLevel": "raid6", "worldWideName": "600A098000A4B9D10000380A5D4AAC3C",
"volumeGroupRef": "04000000600A098000A4B9D10000380A5D4AAC3C", "reserved1": "000000000000000000000000", "reserved2": "",
"trayLossProtection": False, "label": "beegfs_storage_vg", "state": "complete", "spindleSpeedMatch": True, "spindleSpeed": 10500,
"isInaccessible": False, "securityType": "capable", "drawerLossProtection": False, "protectionInformationCapable": False,
"protectionInformationCapabilities": {"protectionInformationCapable": True, "protectionType": "type2Protection"},
"volumeGroupData": {"type": "unknown", "diskPoolData": None},
"usage": "standard", "driveBlockFormat": "allNative", "reservedSpaceAllocated": False, "securityLevel": "fde", "usedSpace": "1099511627776",
"totalRaidedSpace": "9597654597632",
"extents": [{"sectorOffset": "268435456", "rawCapacity": "8498142969856", "raidLevel": "raid6",
"volumeGroupRef": "04000000600A098000A4B9D10000380A5D4AAC3C", "freeExtentRef": "03000000600A098000A4B9D10000380A5D4AAC3C",
"reserved1": "000000000000000000000000", "reserved2": ""}],
"largestFreeExtentSize": "8498142969856", "raidStatus": "optimal", "freeSpace": "8498142969856", "drivePhysicalType": "sas",
"driveMediaType": "hdd", "normalizedSpindleSpeed": "spindleSpeed10k", "diskPool": False,
"id": "04000000600A098000A4B9D10000380A5D4AAC3C", "name": "beegfs_storage_vg"}], "volume": [
{"offline": False, "extremeProtection": False, "volumeHandle": 0, "raidLevel": "raid6", "sectorOffset": "0",
"worldWideName": "600A098000A4B28D00003E435D4AAC54", "label": "beegfs_storage_01_1", "blkSize": 512, "capacity": "1099511627776",
"reconPriority": 1, "segmentSize": 131072, "action": "None",
"cache": {"cwob": False, "enterpriseCacheDump": False, "mirrorActive": True, "mirrorEnable": True, "readCacheActive": False,
"readCacheEnable": False, "writeCacheActive": True, "writeCacheEnable": True, "cacheFlushModifier": "flush10Sec",
"readAheadMultiplier": 1}, "mediaScan": {"enable": True, "parityValidationEnable": True},
"volumeRef": "02000000600A098000A4B28D00003E435D4AAC54", "status": "optimal", "volumeGroupRef": "04000000600A098000A4B9D10000380A5D4AAC3C",
"currentManager": "070000000000000000000001", "preferredManager": "070000000000000000000001",
"perms": {"mapToLUN": True, "snapShot": True, "format": True, "reconfigure": True, "mirrorPrimary": True, "mirrorSecondary": True,
"copySource": True, "copyTarget": True, "readable": True, "writable": True, "rollback": True, "mirrorSync": True, "newImage": True,
"allowDVE": True, "allowDSS": True, "concatVolumeMember": False, "flashReadCache": True, "asyncMirrorPrimary": True,
"asyncMirrorSecondary": True, "pitGroup": True, "cacheParametersChangeable": True, "allowThinManualExpansion": False,
"allowThinGrowthParametersChange": False},
"mgmtClientAttribute": 0, "dssPreallocEnabled": False, "dssMaxSegmentSize": 0, "preReadRedundancyCheckEnabled": False,
"protectionInformationCapable": False, "protectionType": "type0Protection", "applicationTagOwned": True,
"repairedBlockCount": 0, "extendedUniqueIdentifier": "", "cacheMirroringValidateProtectionInformation": False,
"expectedProtectionInformationAppTag": 0, "volumeUse": "standardVolume", "volumeFull": False, "volumeCopyTarget": False, "volumeCopySource": False,
"pitBaseVolume": False, "asyncMirrorTarget": False, "asyncMirrorSource": False, "remoteMirrorSource": False, "remoteMirrorTarget": False,
"diskPool": False, "flashCached": False, "increasingBy": "0", "metadata": [], "dataAssurance": False, "objectType": "volume",
"listOfMappings": [
{"lunMappingRef": "88000000A1010000000000000000000000000000", "lun": 1, "ssid": 0, "perms": 15,
"volumeRef": "02000000600A098000A4B28D00003E435D4AAC54", "type": "host", "mapRef": "84000000600A098000A4B28D00303D065D430118",
"id": "88000000A1010000000000000000000000000000"}],
"mapped": True, "currentControllerId": "070000000000000000000001",
"cacheSettings": {"cwob": False, "enterpriseCacheDump": False, "mirrorActive": True, "mirrorEnable": True, "readCacheActive": False,
"readCacheEnable": False, "writeCacheActive": True, "writeCacheEnable": True, "cacheFlushModifier": "flush10Sec",
"readAheadMultiplier": 1},
"thinProvisioned": False, "preferredControllerId": "070000000000000000000001", "totalSizeInBytes": "1099511627776", "onlineVolumeCopy": False,
"wwn": "600A098000A4B28D00003E435D4AAC54", "name": "beegfs_storage_01_1", "id": "02000000600A098000A4B28D00003E435D4AAC54"}],
"storagePoolBundle": {"cluster": [], "host": [
{"hostRef": "84000000600A098000A4B28D00303D005D430107", "clusterRef": "0000000000000000000000000000000000000000", "label": "test",
"isSAControlled": False, "confirmLUNMappingCreation": False, "hostTypeIndex": 28, "protectionInformationCapableAccessMethod": True,
"isLargeBlockFormatHost": False, "isLun0Restricted": False, "ports": [],
"initiators": [
{"initiatorRef": "89000000600A098000A4B9D1003037005D4300F5",
"nodeName": {"ioInterfaceType": "iscsi", "iscsiNodeName": "iqn.iscsi_tests1", "remoteNodeWWN": None, "nvmeNodeName": None},
"alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "iscsi_test1",
"configuredAuthMethods": {"authMethodData": [{"authMethod": "None", "chapSecret": None}]},
"hostRef": "84000000600A098000A4B28D00303D005D430107", "initiatorInactive": False, "id": "89000000600A098000A4B9D1003037005D4300F5"}],
"hostSidePorts": [{"type": "iscsi", "address": "iqn.iscsi_tests1", "label": "iscsi_test1"}],
"id": "84000000600A098000A4B28D00303D005D430107", "name": "test"},
{"hostRef": "84000000600A098000A4B9D1003037035D4300F8", "clusterRef": "0000000000000000000000000000000000000000", "label": "test2",
"isSAControlled": True, "confirmLUNMappingCreation": False, "hostTypeIndex": 28, "protectionInformationCapableAccessMethod": True,
"isLargeBlockFormatHost": False, "isLun0Restricted": False, "ports": [],
"initiators": [
{"initiatorRef": "89000000600A098000A4B9D1003037075D4300F9",
"nodeName": {"ioInterfaceType": "iscsi", "iscsiNodeName": "iqn.iscsi_tests2", "remoteNodeWWN": None, "nvmeNodeName": None},
"alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "iscsi_test2",
"configuredAuthMethods": {"authMethodData": [{"authMethod": "None", "chapSecret": None}]},
"hostRef": "84000000600A098000A4B9D1003037035D4300F8", "initiatorInactive": False, "id": "89000000600A098000A4B9D1003037075D4300F9"}],
"hostSidePorts": [{"type": "iscsi", "address": "iqn.iscsi_tests2", "label": "iscsi_test2"}],
"id": "84000000600A098000A4B9D1003037035D4300F8", "name": "test2"},
{"hostRef": "84000000600A098000A4B28D00303D065D430118", "clusterRef": "0000000000000000000000000000000000000000", "label": "beegfs_storage1",
"isSAControlled": False, "confirmLUNMappingCreation": False, "hostTypeIndex": 28, "protectionInformationCapableAccessMethod": True,
"isLargeBlockFormatHost": False, "isLun0Restricted": False, "ports": [],
"initiators": [
{"initiatorRef": "89000000600A098000A4B28D00303CF55D4300E3",
"nodeName": {"ioInterfaceType": "iscsi", "iscsiNodeName": "iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818", "remoteNodeWWN": None,
"nvmeNodeName": None}, "alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "beegfs_storage1_iscsi_0",
"configuredAuthMethods": {"authMethodData": [{"authMethod": "None", "chapSecret": None}]},
"hostRef": "84000000600A098000A4B28D00303D065D430118", "initiatorInactive": False, "id": "89000000600A098000A4B28D00303CF55D4300E3"}],
"hostSidePorts": [{"type": "iscsi", "address": "iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818", "label": "beegfs_storage1_iscsi_0"}],
"id": "84000000600A098000A4B28D00303D065D430118", "name": "beegfs_storage1"},
{"hostRef": "84000000600A098000A4B9D10030370B5D430109", "clusterRef": "0000000000000000000000000000000000000000", "label": "beegfs_metadata1",
"isSAControlled": False, "confirmLUNMappingCreation": False, "hostTypeIndex": 28, "protectionInformationCapableAccessMethod": True,
"isLargeBlockFormatHost": False, "isLun0Restricted": False, "ports": [],
"initiators": [
{"initiatorRef": "89000000600A098000A4B28D00303CFC5D4300F7",
"nodeName": {"ioInterfaceType": "iscsi", "iscsiNodeName": "iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8", "remoteNodeWWN": None,
"nvmeNodeName": None}, "alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "beegfs_metadata1_iscsi_0",
"configuredAuthMethods": {"authMethodData": [{"authMethod": "None", "chapSecret": None}]},
"hostRef": "84000000600A098000A4B9D10030370B5D430109", "initiatorInactive": False, "id": "89000000600A098000A4B28D00303CFC5D4300F7"}],
"hostSidePorts": [{"type": "iscsi", "address": "iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8", "label": "beegfs_metadata1_iscsi_0"}],
"id": "84000000600A098000A4B9D10030370B5D430109", "name": "beegfs_metadata1"}], "lunMapping": [
{"lunMappingRef": "8800000000000000000000000000000000000000", "lun": 7, "ssid": 16384, "perms": 15,
"volumeRef": "21000000600A098000A4B28D000027EC5CF10481", "type": "all", "mapRef": "0000000000000000000000000000000000000000",
"id": "8800000000000000000000000000000000000000"},
{"lunMappingRef": "880000008B010000000000000000000000000000", "lun": 7, "ssid": 16384, "perms": 15,
"volumeRef": "21000000600A098000A4B28D000027EC5CF10481", "type": "host", "mapRef": "84000000600A098000A4B28D00303D065D430118",
"id": "880000008B010000000000000000000000000000"},
{"lunMappingRef": "8800000090010000000000000000000000000000", "lun": 7, "ssid": 16384, "perms": 15,
"volumeRef": "21000000600A098000A4B28D000027EC5CF10481", "type": "host", "mapRef": "84000000600A098000A4B9D10030370B5D430109",
"id": "8800000090010000000000000000000000000000"},
{"lunMappingRef": "8800000092010000000000000000000000000000", "lun": 7, "ssid": 16384, "perms": 15,
"volumeRef": "21000000600A098000A4B28D000027EC5CF10481", "type": "host", "mapRef": "84000000600A098000A4B28D00303D005D430107",
"id": "8800000092010000000000000000000000000000"}, {"lunMappingRef": "88000000A1010000000000000000000000000000", "lun": 1, "ssid": 0, "perms": 15,
"volumeRef": "02000000600A098000A4B28D00003E435D4AAC54", "type": "host",
"mapRef": "84000000600A098000A4B28D00303D065D430118",
"id": "88000000A1010000000000000000000000000000"}]}, "highLevelVolBundle": {"pit": []}}
EXPECTED_GET_ARRAY_FACTS = {'facts_from_proxy': False, 'netapp_controllers': [{'name': 'A', 'serial': '021619039162', 'status': 'optimal'}],
'netapp_disks': [
{'available': True, 'firmware_version': 'MSB6', 'id': '0100000050000396AC882ED10000000000000000', 'media_type': 'ssd',
'product_id': 'PX04SVQ160 ', 'serial_number': 'Y530A001T5MD', 'status': 'optimal',
'tray_ref': '0E00000000000000000000000000000000000000', 'usable_bytes': '1599784443904'},
{'available': True, 'firmware_version': 'MSB6', 'id': '0100000050000396AC882EDD0000000000000000', 'media_type': 'ssd',
'product_id': 'PX04SVQ160 ', 'serial_number': 'Y530A004T5MD', 'status': 'optimal',
'tray_ref': '0E00000000000000000000000000000000000000', 'usable_bytes': '1599784443904'}],
'netapp_driveside_interfaces': [{'controller': 'A', 'interface_speed': '12g', 'interface_type': 'sas'}],
'netapp_enabled_features': ['autoCodeSync', 'autoLunTransfer', 'bundleMigration', 'driveSlotLimit', 'flashReadCache',
'mixedDriveTypes', 'performanceTier', 'protectionInformation', 'raid6', 'secureVolume',
'ssdSupport', 'stagedDownload', 'storagePoolsType2', 'subLunsAllowed',
'totalNumberOfArvmMirrorsPerArray', 'totalNumberOfPitsPerArray',
'totalNumberOfThinVolumesPerArray'], 'netapp_host_groups': [],
'netapp_host_types': [{'index': 0, 'type': 'FactoryDefault'}, {'index': 1, 'type': 'W2KNETNCL'},
{'index': 27, 'type': 'LnxTPGSALUA_SF'}, {'index': 28, 'type': 'LnxDHALUA'}],
'netapp_hosts': [
{'group_id': '0000000000000000000000000000000000000000', 'host_type_index': 28,
'hosts_reference': '84000000600A098000A4B28D00303D005D430107',
'id': '84000000600A098000A4B28D00303D005D430107', 'name': 'test',
'posts': [{'address': 'iqn.iscsi_tests1', 'label': 'iscsi_test1', 'type': 'iscsi'}]},
{'group_id': '0000000000000000000000000000000000000000', 'host_type_index': 28,
'hosts_reference': '84000000600A098000A4B9D1003037035D4300F8',
'id': '84000000600A098000A4B9D1003037035D4300F8', 'name': 'test2',
'posts': [{'address': 'iqn.iscsi_tests2', 'label': 'iscsi_test2', 'type': 'iscsi'}]},
{'group_id': '0000000000000000000000000000000000000000', 'host_type_index': 28,
'hosts_reference': '84000000600A098000A4B28D00303D065D430118',
'id': '84000000600A098000A4B28D00303D065D430118', 'name': 'beegfs_storage1',
'posts': [{'address': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818', 'label': 'beegfs_storage1_iscsi_0',
'type': 'iscsi'}]},
{'group_id': '0000000000000000000000000000000000000000', 'host_type_index': 28,
'hosts_reference': '84000000600A098000A4B9D10030370B5D430109',
'id': '84000000600A098000A4B9D10030370B5D430109', 'name': 'beegfs_metadata1',
'posts': [{'address': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8', 'label': 'beegfs_metadata1_iscsi_0',
'type': 'iscsi'}]}],
'netapp_hostside_interfaces': [{'fc': [], 'ib': [],
'iscsi': [
{'controller': 'A', 'current_interface_speed': '10g', 'ipv4_address': '10.10.11.110',
'ipv4_enabled': True,
'ipv4_gateway': '0.0.0.0', 'ipv4_subnet_mask': '255.255.255.0', 'ipv6_enabled': True,
'iqn': 'iqn.1992-08.com.netapp:2806.600a098000a4b28d000000005cf10481', 'link_status': 'up',
'mtu': 9000,
'supported_interface_speeds': ['1g', '10g']}], 'sas': []}],
'netapp_management_interfaces': [
{'alias': 'ictm0718s01c1-a', 'channel': 1, 'controller': 'A', 'dns_config_method': 'stat',
'dns_servers': [{'addressType': 'ipv4', 'ipv4Address': '10.193.0.250', 'ipv6Address': None},
{'addressType': 'ipv4', 'ipv4Address': '10.192.0.250', 'ipv6Address': None}],
'ipv4_address': '10.113.1.192',
'ipv4_address_config_method': 'static', 'ipv4_enabled': True, 'ipv4_gateway': '10.113.1.1',
'ipv4_subnet_mask': '255.255.255.0', 'ipv6_enabled': False, 'link_status': 'up',
'mac_address': '00A098A4B28D', 'name': 'wan0', 'ntp_config_method': 'stat',
'ntp_servers': [
{'addrType': 'ipvx', 'domainName': None,
'ipvxAddress': {'addressType': 'ipv4', 'ipv4Address': '216.239.35.0', 'ipv6Address': None}},
{'addrType': 'ipvx', 'domainName': None,
'ipvxAddress': {'addressType': 'ipv4', 'ipv4Address': '216.239.35.4', 'ipv6Address': None}}],
'remote_ssh_access': True}],
'netapp_storage_array': {'cache_block_sizes': [4096, 8192, 16384, 32768], 'chassis_serial': '021633035190',
'firmware': '08.42.30.05', 'name': 'ictm0718s01c1',
'segment_sizes': [32768, 65536, 131072, 262144, 524288, 495616, 655360, 1982464],
'wwn': '600A098000A4B28D000000005CF10481'},
'netapp_storage_pools': [
{'available_capacity': '8498142969856', 'id': '04000000600A098000A4B9D10000380A5D4AAC3C', 'name': 'beegfs_storage_vg',
'total_capacity': '9597654597632', 'used_capacity': '1099511627776'}],
'netapp_volumes': [
{'capacity': '1099511627776', 'id': '02000000600A098000A4B28D00003E435D4AAC54', 'is_thin_provisioned': False,
'name': 'beegfs_storage_01_1', 'parent_storage_pool_id': '04000000600A098000A4B9D10000380A5D4AAC3C', 'workload': []}],
'netapp_volumes_by_initiators': {'beegfs_metadata1': [],
'beegfs_storage1': [{
'id': '02000000600A098000A4B28D00003E435D4AAC54',
'meta_data': {},
'name': 'beegfs_storage_01_1',
'workload_name': '',
'wwn': '600A098000A4B28D00003E435D4AAC54'}],
'test': [], 'test2': []},
'netapp_workload_tags': [
{'attributes': [{'key': 'profileId', 'value': 'ansible_workload_1'}], 'id': '4200000001000000000000000000000000000000',
'name': 'beegfs_metadata'},
{'attributes': [{'key': 'profileId', 'value': 'Other_1'}], 'id': '4200000002000000000000000000000000000000',
'name': 'other_workload_1'}], 'snapshot_images': [], 'ssid': '1'}
def _set_args(self, **kwargs):
module_args = self.REQUIRED_PARAMS.copy()
if kwargs is not None:
module_args.update(kwargs)
set_module_args(module_args)
def test_get_controllers_pass(self):
"""Verify get_controllers returns the expected results."""
self._set_args()
facts = Facts()
with mock.patch(self.REQUEST_FUNC, return_value=(200, ["070000000000000000000002", "070000000000000000000001"])):
self.assertEqual(facts.get_controllers(), {"070000000000000000000001": "A", "070000000000000000000002": "B"})
def test_get_controllers_fail(self):
"""Verify get_controllers throws the expected exceptions."""
self._set_args()
facts = Facts()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve controller list!"):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
facts.get_controllers()
def test_get_array_facts_pass(self):
"""Verify get_array_facts method returns expected results."""
self._set_args()
facts = Facts()
facts.is_embedded = lambda: True
with mock.patch(self.GET_CONTROLLERS_FUNC, return_value={"070000000000000000000001": "A", "070000000000000000000002": "B"}):
with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.GRAPH_RESPONSE), (200, self.WORKLOAD_RESPONSE)]):
self.assertEqual(facts.get_array_facts(), self.EXPECTED_GET_ARRAY_FACTS)

View file

@ -1,555 +0,0 @@
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
try:
from unittest.mock import patch, mock_open
except ImportError:
from mock import patch, mock_open
from ansible.module_utils import six
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_firmware import NetAppESeriesFirmware
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
if six.PY2:
builtin_path = "__builtin__.open"
else:
builtin_path = "builtins.open"
def mock_open_with_iter(*args, **kwargs):
mock = mock_open(*args, **kwargs)
if six.PY2:
mock.return_value.__iter__ = lambda x: iter(x.readline, "")
else:
mock.return_value.__iter__ = lambda x: x
mock.return_value.__next__ = lambda x: iter(x.readline, "")
return mock
class FirmwareTest(ModuleTestCase):
REQUIRED_PARAMS = {"api_username": "username",
"api_password": "password",
"api_url": "http://localhost/devmgr/v2",
"ssid": "1",
"validate_certs": "no"}
REQUEST_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_firmware.NetAppESeriesFirmware.request'
BASE_REQUEST_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_firmware.request'
CREATE_MULTIPART_FORMDATA_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_firmware.create_multipart_formdata'
SLEEP_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_firmware.sleep'
BUNDLE_HEADER = b'combined_content\x00\x00\x00\x04\x00\x00\x07\xf8#Engenio Downloadable Package\n#Tue Jun 04 11:46:48 CDT 2019\ncheckList=compatibleBoard' \
b'Map,compatibleSubmodelMap,compatibleFirmwareMap,fileManifest\ncompatibleSubmodelMap=261|true,262|true,263|true,264|true,276|true,277|t' \
b'rue,278|true,282|true,300|true,301|true,302|true,318|true,319|true,320|true,321|true,322|true,323|true,324|true,325|true,326|true,328|t' \
b'rue,329|true,330|true,331|true,332|true,333|true,338|true,339|true,340|true,341|true,342|true,343|true,344|true,345|true,346|true,347|t' \
b'rue,356|true,357|true,390|true\nnonDisplayableAttributeList=512\ndisplayableAttributeList=FILENAME|RCB_11.40.5_280x_5ceef00e.dlp,VERSI' \
b'ON|11.40.5\ndacStoreLimit=512\nfileManifest=metadata.tar|metadata|08.42.50.00.000|c04275f98fc2f07bd63126fc57cb0569|bundle|10240,084250' \
b'00_m3_e30_842_root.img|linux|08.42.50.00|367c5216e5c4b15b904a025bff69f039|linux|1342177280,RC_08425000_m3_e30_842_280x.img|linux_cfw|0' \
b'8.42.50.00|e6589b0a50b29ff34b34d3ced8ae3ccb|eos|1073741824,msw.img|sam|11.42.0000.0028|ef3ee5589ab4a019a3e6f83768364aa1|linux|41943040' \
b'0,iom.img|iom|11.42.0G00.0003|9bb740f8d3a4e62a0f2da2ec83c254c4|linux|8177664\nmanagementVersionList=devmgr.v1142api8.Manager\ncompatib' \
b'leFirmwareMap=08.30.*.*|true,08.30.*.30|false,08.30.*.31|false,08.30.*.32|false,08.30.*.33|false,08.30.*.34|false,08.30.*.35|false,08.' \
b'30.*.36|false,08.30.*.37|false,08.30.*.38|false,08.30.*.39|false,08.40.*.*|true,08.40.*.30|false,08.40.*.31|false,08.40.*.32|false,08.4' \
b'0.*.33|false,08.40.*.34|false,08.40.*.35|false,08.40.*.36|false,08.40.*.37|false,08.40.*.38|false,08.40.*.39|false,08.41.*.*|true,08.4' \
b'1.*.30|false,08.41.*.31|false,08.41.*.32|false,08.41.*.33|false,08.41.*.34|false,08.41.*.35|false,08.41.*.36|false,08.41.*.37|false,08' \
b'.41.*.38|false,08.41.*.39|false,08.42.*.*|true,08.42.*.30|false,08.42.*.31|false,08.42.*.32|false,08.42.*.33|false,08.42.*.34|false,08' \
b'.42.*.35|false,08.42.*.36|false,08.42.*.37|false,08.42.*.38|false,08.42.*.39|false\nversion=08.42.50.00.000\ntype=tar\nversionTag=comb' \
b'ined_content\n'
NVSRAM_HEADER = b'nvsram \x00\x00\x00\x01\x00\x00\x00\xa0\x00\x00\x00\x04280X\x00\x00\x00\x00\x00\x00\x00\x032801 2804 2806 \x00\x00' \
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x1bArapaho controller, 8.52 FW\x00\x00\x001dual controller configuration, with cac' \
b'he battery\x07\x81A\x08Config\x00\x00\x0008.52.00.00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\xdc\xaf\x00\x00' \
b'\x94\xc1\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\x00\x00\x00\x00 2801 2804 2806 \x00\x00\x00\x00\x00' \
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' \
b'\x00\x00\x00\x00\x00\x00Board\n .Board Name = "NetApp RAID Controller"\n .NVSRAM Configuration Number' \
b' = "N280X-852834-D02"\n\nUserCfg\n .Enable Synchronous Negotiation = 0x00 \n'
def _set_args(self, args=None):
module_args = self.REQUIRED_PARAMS.copy()
if args is not None:
module_args.update(args)
set_module_args(module_args)
def test_is_firmware_bundled_pass(self):
"""Determine whether firmware file is bundled."""
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
with patch(builtin_path, mock_open(read_data=b"firmwarexxxxxxxx")) as mock_file:
firmware = NetAppESeriesFirmware()
self.assertEqual(firmware.is_firmware_bundled(), False)
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
with patch(builtin_path, mock_open(read_data=self.BUNDLE_HEADER[:16])) as mock_file:
firmware = NetAppESeriesFirmware()
self.assertEqual(firmware.is_firmware_bundled(), True)
def test_is_firmware_bundles_fail(self):
"""Verify non-firmware fails."""
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
with patch(builtin_path, mock_open(read_data=b"xxxxxxxxxxxxxxxx")) as mock_file:
firmware = NetAppESeriesFirmware()
with self.assertRaisesRegexp(AnsibleFailJson, "Firmware file is invalid."):
firmware.is_firmware_bundled()
def test_firmware_version(self):
"""Verify correct firmware version is returned."""
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
firmware = NetAppESeriesFirmware()
firmware.is_firmware_bundled = lambda: True
with patch(builtin_path, mock_open_with_iter(read_data=self.BUNDLE_HEADER)) as mock_file:
self.assertEqual(firmware.firmware_version(), b"11.40.5")
def test_nvsram_version(self):
"""Verify correct nvsram version is returned."""
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
firmware = NetAppESeriesFirmware()
with patch(builtin_path, mock_open_with_iter(read_data=self.NVSRAM_HEADER)) as mock_file:
self.assertEqual(firmware.nvsram_version(), b"N280X-852834-D02")
def test_check_system_health_pass(self):
"""Validate check_system_health method."""
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
firmware = NetAppESeriesFirmware()
with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": "1"}),
(200, {"healthCheckRunning": True,
"results": [{"processingTimeMS": 0}]}),
(200, {"healthCheckRunning": False,
"results": [{"successful": True}]})]):
firmware.check_system_health()
def test_check_system_health_fail(self):
"""Validate check_system_health method throws proper exceptions."""
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
firmware = NetAppESeriesFirmware()
with patch("time.sleep", return_value=None):
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to initiate health check."):
with patch(self.REQUEST_FUNC, return_value=(404, Exception())):
firmware.check_system_health()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve health check status."):
with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": "1"}),
(404, Exception())]):
firmware.check_system_health()
with self.assertRaisesRegexp(AnsibleFailJson, "Health check failed to complete."):
with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": "1"}),
(200, {"healthCheckRunning": True,
"results": [{"processingTimeMS": 120001}]})]):
firmware.check_system_health()
def test_embedded_check_nvsram_compatibility_pass(self):
"""Verify embedded nvsram compatibility."""
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
firmware = NetAppESeriesFirmware()
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
with patch(self.REQUEST_FUNC, return_value=(200, {"signatureTestingPassed": True,
"fileCompatible": True,
"versionContents": [{"module": "nvsram",
"bundledVersion": "N280X-842834-D02",
"onboardVersion": "N280X-842834-D02"}]})):
firmware.embedded_check_nvsram_compatibility()
def test_embedded_check_nvsram_compatibility_fail(self):
"""Verify embedded nvsram compatibility fails with expected exceptions."""
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
firmware = NetAppESeriesFirmware()
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve NVSRAM compatibility results."):
with patch(self.REQUEST_FUNC, return_value=Exception()):
firmware.embedded_check_nvsram_compatibility()
with self.assertRaisesRegexp(AnsibleFailJson, "Invalid NVSRAM file."):
with patch(self.REQUEST_FUNC, return_value=(200, {"signatureTestingPassed": False,
"fileCompatible": False,
"versionContents": [{"module": "nvsram",
"bundledVersion": "N280X-842834-D02",
"onboardVersion": "N280X-842834-D02"}]})):
firmware.embedded_check_nvsram_compatibility()
with self.assertRaisesRegexp(AnsibleFailJson, "Incompatible NVSRAM file."):
with patch(self.REQUEST_FUNC, return_value=(200, {"signatureTestingPassed": True,
"fileCompatible": False,
"versionContents": [{"module": "nvsram",
"bundledVersion": "N280X-842834-D02",
"onboardVersion": "N280X-842834-D02"}]})):
firmware.embedded_check_nvsram_compatibility()
def test_embedded_check_firmware_compatibility_pass(self):
"""Verify embedded firmware compatibility."""
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
firmware = NetAppESeriesFirmware()
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
with patch(self.REQUEST_FUNC, return_value=(200, {
"signatureTestingPassed": True,
"fileCompatible": True,
"versionContents": [
{"module": "bundle", "bundledVersion": "08.42.50.00.000", "onboardVersion": "08.42.30.05"},
{"module": "bundleDisplay", "bundledVersion": "11.40.5", "onboardVersion": "11.40.3R2"},
{"module": "hypervisor", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
{"module": "raid", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
{"module": "management", "bundledVersion": "11.42.0000.0028", "onboardVersion": "11.42.0000.0026"},
{"module": "iom", "bundledVersion": "11.42.0G00.0003", "onboardVersion": "11.42.0G00.0001"}]})):
firmware.embedded_check_bundle_compatibility()
def test_embedded_check_firmware_compatibility_fail(self):
"""Verify embedded firmware compatibility fails with expected exceptions."""
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
firmware = NetAppESeriesFirmware()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve bundle compatibility results."):
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
with patch(self.REQUEST_FUNC, return_value=Exception()):
firmware.embedded_check_bundle_compatibility()
with self.assertRaisesRegexp(AnsibleFailJson, "Invalid firmware bundle file."):
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
with patch(self.REQUEST_FUNC, return_value=(200, {
"signatureTestingPassed": False,
"fileCompatible": True,
"versionContents": [
{"module": "bundle", "bundledVersion": "08.42.50.00.000", "onboardVersion": "08.42.30.05"},
{"module": "bundleDisplay", "bundledVersion": "11.40.5", "onboardVersion": "11.40.3R2"},
{"module": "hypervisor", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
{"module": "raid", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
{"module": "management", "bundledVersion": "11.42.0000.0028", "onboardVersion": "11.42.0000.0026"},
{"module": "iom", "bundledVersion": "11.42.0G00.0003", "onboardVersion": "11.42.0G00.0001"}]})):
firmware.embedded_check_bundle_compatibility()
with self.assertRaisesRegexp(AnsibleFailJson, "Incompatible firmware bundle file."):
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
with patch(self.REQUEST_FUNC, return_value=(200, {
"signatureTestingPassed": True,
"fileCompatible": False,
"versionContents": [
{"module": "bundle", "bundledVersion": "08.42.50.00.000", "onboardVersion": "08.42.30.05"},
{"module": "bundleDisplay", "bundledVersion": "11.40.5", "onboardVersion": "11.40.3R2"},
{"module": "hypervisor", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
{"module": "raid", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
{"module": "management", "bundledVersion": "11.42.0000.0028", "onboardVersion": "11.42.0000.0026"},
{"module": "iom", "bundledVersion": "11.42.0G00.0003", "onboardVersion": "11.42.0G00.0001"}]})):
firmware.embedded_check_bundle_compatibility()
with self.assertRaisesRegexp(AnsibleFailJson, "Downgrades are not permitted."):
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
with patch(self.REQUEST_FUNC, return_value=(200, {
"signatureTestingPassed": True,
"fileCompatible": True,
"versionContents": [
{"module": "bundle", "bundledVersion": "08.42.00.00.000", "onboardVersion": "08.50.30.05"},
{"module": "bundleDisplay", "bundledVersion": "11.40.5", "onboardVersion": "11.40.3R2"},
{"module": "hypervisor", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
{"module": "raid", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
{"module": "management", "bundledVersion": "11.42.0000.0028", "onboardVersion": "11.42.0000.0026"},
{"module": "iom", "bundledVersion": "11.42.0G00.0003", "onboardVersion": "11.42.0G00.0001"}]})):
firmware.embedded_check_bundle_compatibility()
with self.assertRaisesRegexp(AnsibleFailJson, "Downgrades are not permitted."):
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
with patch(self.REQUEST_FUNC, return_value=(200, {
"signatureTestingPassed": True,
"fileCompatible": True,
"versionContents": [
{"module": "bundle", "bundledVersion": "08.42.00.00.000", "onboardVersion": "09.20.30.05"},
{"module": "bundleDisplay", "bundledVersion": "11.40.5", "onboardVersion": "11.40.3R2"},
{"module": "hypervisor", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
{"module": "raid", "bundledVersion": "08.42.50.00", "onboardVersion": "08.42.30.05"},
{"module": "management", "bundledVersion": "11.42.0000.0028", "onboardVersion": "11.42.0000.0026"},
{"module": "iom", "bundledVersion": "11.42.0G00.0003", "onboardVersion": "11.42.0G00.0001"}]})):
firmware.embedded_check_bundle_compatibility()
def test_embedded_wait_for_upgrade_pass(self):
"""Verify controller reboot wait succeeds."""
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
firmware = NetAppESeriesFirmware()
firmware.firmware_version = lambda: b"11.40.3R2"
firmware.nvsram_version = lambda: b"N280X-842834-D02"
with patch(self.SLEEP_FUNC, return_value=None):
with patch(self.REQUEST_FUNC, return_value=(200, [{"fwVersion": "08.42.30.05", "nvsramVersion": "N280X-842834-D02",
"extendedSAData": {"codeVersions": [{"codeModule": "bundleDisplay",
"versionString": "11.40.3R2"}]}}])):
firmware.embedded_wait_for_upgrade()
def test_embedded_wait_for_upgrade_fail(self):
"""Verify controller reboot wait throws expected exceptions"""
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
firmware = NetAppESeriesFirmware()
with self.assertRaisesRegexp(AnsibleFailJson, "Timeout waiting for Santricity Web Services Embedded."):
with patch(self.SLEEP_FUNC, return_value=None):
with patch(self.BASE_REQUEST_FUNC, return_value=Exception()):
firmware.embedded_wait_for_upgrade()
def test_embedded_upgrade_pass(self):
"""Verify embedded upgrade function."""
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
with patch(self.SLEEP_FUNC, return_value=None):
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
firmware = NetAppESeriesFirmware()
with patch(self.REQUEST_FUNC, return_value=(200, "")):
with patch(self.BASE_REQUEST_FUNC, side_effect=[Exception(), Exception(), (200, "")]):
firmware.embedded_upgrade()
self.assertTrue(firmware.upgrade_in_progress)
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp", "wait_for_completion": True})
firmware = NetAppESeriesFirmware()
firmware.firmware_version = lambda: b"11.40.3R2"
firmware.nvsram_version = lambda: b"N280X-842834-D02"
with patch(self.REQUEST_FUNC, return_value=(200, [{"fwVersion": "08.42.30.05", "nvsramVersion": "N280X-842834-D02",
"extendedSAData": {"codeVersions": [{"codeModule": "bundleDisplay",
"versionString": "11.40.3R2"}]}}])):
firmware.embedded_upgrade()
self.assertFalse(firmware.upgrade_in_progress)
def test_embedded_upgrade_fail(self):
"""Verify embedded upgrade throws expected exception."""
self._set_args({"firmware": "test.dlp", "nvsram": "test.dlp"})
firmware = NetAppESeriesFirmware()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to upload and activate firmware."):
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("", {})):
with patch(self.REQUEST_FUNC, return_value=Exception()):
firmware.embedded_upgrade()
def test_check_nvsram_compatibility_pass(self):
"""Verify proxy nvsram compatibility."""
self._set_args({"firmware": "test.dlp", "nvsram": "test_nvsram.dlp"})
firmware = NetAppESeriesFirmware()
with patch(self.SLEEP_FUNC, return_value=None):
with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": 1}),
(200, {"checkRunning": True}),
(200, {"checkRunning": False,
"results": [{"nvsramFiles": [{"filename": "test_nvsram.dlp"}]}]})]):
firmware.proxy_check_nvsram_compatibility()
def test_check_nvsram_compatibility_fail(self):
"""Verify proxy nvsram compatibility throws expected exceptions."""
self._set_args({"firmware": "test.dlp", "nvsram": "test_nvsram.dlp"})
firmware = NetAppESeriesFirmware()
with patch(self.SLEEP_FUNC, return_value=None):
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to receive NVSRAM compatibility information."):
with patch(self.REQUEST_FUNC, return_value=Exception()):
firmware.proxy_check_nvsram_compatibility()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve NVSRAM status update from proxy."):
with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": 1}), Exception()]):
firmware.proxy_check_nvsram_compatibility()
with self.assertRaisesRegexp(AnsibleFailJson, "NVSRAM is not compatible."):
with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": 1}),
(200, {"checkRunning": True}),
(200, {"checkRunning": False,
"results": [{"nvsramFiles": [{"filename": "not_test_nvsram.dlp"}]}]})]):
firmware.proxy_check_nvsram_compatibility()
def test_check_firmware_compatibility_pass(self):
"""Verify proxy firmware compatibility."""
self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
firmware = NetAppESeriesFirmware()
with patch(self.SLEEP_FUNC, return_value=None):
with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": 1}),
(200, {"checkRunning": True}),
(200, {"checkRunning": False,
"results": [{"cfwFiles": [{"filename": "test_firmware.dlp"}]}]})]):
firmware.proxy_check_firmware_compatibility()
def test_check_firmware_compatibility_fail(self):
"""Verify proxy firmware compatibility throws expected exceptions."""
self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
firmware = NetAppESeriesFirmware()
with patch(self.SLEEP_FUNC, return_value=None):
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to receive firmware compatibility information."):
with patch(self.REQUEST_FUNC, return_value=Exception()):
firmware.proxy_check_firmware_compatibility()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve firmware status update from proxy."):
with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": 1}), Exception()]):
firmware.proxy_check_firmware_compatibility()
with self.assertRaisesRegexp(AnsibleFailJson, "Firmware bundle is not compatible."):
with patch(self.REQUEST_FUNC, side_effect=[(200, {"requestId": 1}),
(200, {"checkRunning": True}),
(200, {"checkRunning": False,
"results": [{"cfwFiles": [{"filename": "not_test_firmware.dlp"}]}]})]):
firmware.proxy_check_firmware_compatibility()
def test_proxy_upload_and_check_compatibility_pass(self):
"""Verify proxy_upload_and_check_compatibility"""
self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
firmware = NetAppESeriesFirmware()
firmware.proxy_check_nvsram_compatibility = lambda: None
firmware.proxy_check_firmware_compatibility = lambda: None
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("headers", "data")):
with patch(self.REQUEST_FUNC, side_effect=[(200, [{"version": "XX.XX.XX.XX", "filename": "test"},
{"version": "XXXXXXXXXX", "filename": "test.dlp"}]),
(200, None), (200, None)]):
firmware.proxy_upload_and_check_compatibility()
with patch(self.REQUEST_FUNC, return_value=(200, [{"version": "XX.XX.XX.XX", "filename": "test"},
{"version": "test_nvsram", "filename": "test_nvsram.dlp"},
{"version": "test", "filename": "test.dlp"},
{"filename": "test_firmware.dlp", "version": "test_firmware"}])):
firmware.proxy_upload_and_check_compatibility()
def test_proxy_upload_and_check_compatibility_fail(self):
"""Verify proxy_upload_and_check_compatibility throws expected exceptions."""
self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
firmware = NetAppESeriesFirmware()
firmware.proxy_check_nvsram_compatibility = lambda: None
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve existing existing firmware files."):
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("headers", "data")):
with patch(self.REQUEST_FUNC, return_value=Exception()):
firmware.proxy_upload_and_check_compatibility()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to upload NVSRAM file."):
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("headers", "data")):
with patch(self.REQUEST_FUNC, side_effect=[(200, [{"version": "XX.XX.XX.XX", "filename": "test"},
{"version": "XXXXXXXXXX", "filename": "test.dlp"}]),
Exception()]):
firmware.proxy_upload_and_check_compatibility()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to upload firmware bundle file."):
with patch(self.CREATE_MULTIPART_FORMDATA_FUNC, return_value=("headers", "data")):
with patch(self.REQUEST_FUNC, side_effect=[(200, [{"version": "XX.XX.XX.XX", "filename": "test"},
{"version": "XXXXXXXXXX", "filename": "test.dlp"}]),
(200, None), Exception()]):
firmware.proxy_upload_and_check_compatibility()
def test_proxy_check_upgrade_required_pass(self):
"""Verify proxy_check_upgrade_required."""
self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
firmware = NetAppESeriesFirmware()
firmware.firmware_version = lambda: b"08.42.50.00"
firmware.nvsram_version = lambda: b"nvsram_version"
with patch(self.REQUEST_FUNC, side_effect=[(200, [{"versionString": "08.42.50.00"}]), (200, ["nvsram_version"])]):
firmware.is_firmware_bundled = lambda: True
firmware.proxy_check_upgrade_required()
self.assertFalse(firmware.upgrade_required)
with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.42.50.00"]), (200, ["nvsram_version"])]):
firmware.is_firmware_bundled = lambda: False
firmware.proxy_check_upgrade_required()
self.assertFalse(firmware.upgrade_required)
firmware.firmware_version = lambda: b"08.42.50.00"
firmware.nvsram_version = lambda: b"not_nvsram_version"
with patch(self.REQUEST_FUNC, side_effect=[(200, [{"versionString": "08.42.50.00"}]), (200, ["nvsram_version"])]):
firmware.is_firmware_bundled = lambda: True
firmware.proxy_check_upgrade_required()
self.assertTrue(firmware.upgrade_required)
with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.42.50.00"]), (200, ["nvsram_version"])]):
firmware.is_firmware_bundled = lambda: False
firmware.proxy_check_upgrade_required()
self.assertTrue(firmware.upgrade_required)
firmware.firmware_version = lambda: b"08.52.00.00"
firmware.nvsram_version = lambda: b"nvsram_version"
with patch(self.REQUEST_FUNC, side_effect=[(200, [{"versionString": "08.42.50.00"}]), (200, ["nvsram_version"])]):
firmware.is_firmware_bundled = lambda: True
firmware.proxy_check_upgrade_required()
self.assertTrue(firmware.upgrade_required)
with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.42.50.00"]), (200, ["nvsram_version"])]):
firmware.is_firmware_bundled = lambda: False
firmware.proxy_check_upgrade_required()
self.assertTrue(firmware.upgrade_required)
firmware.firmware_version = lambda: b"08.52.00.00"
firmware.nvsram_version = lambda: b"not_nvsram_version"
with patch(self.REQUEST_FUNC, side_effect=[(200, [{"versionString": "08.42.50.00"}]), (200, ["nvsram_version"])]):
firmware.is_firmware_bundled = lambda: True
firmware.proxy_check_upgrade_required()
self.assertTrue(firmware.upgrade_required)
with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.42.50.00"]), (200, ["nvsram_version"])]):
firmware.is_firmware_bundled = lambda: False
firmware.proxy_check_upgrade_required()
self.assertTrue(firmware.upgrade_required)
def test_proxy_check_upgrade_required_fail(self):
"""Verify proxy_check_upgrade_required throws expected exceptions."""
self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
firmware = NetAppESeriesFirmware()
firmware.firmware_version = lambda: b"08.42.50.00"
firmware.nvsram_version = lambda: b"not_nvsram_version"
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve controller firmware information."):
with patch(self.REQUEST_FUNC, return_value=Exception()):
firmware.proxy_check_upgrade_required()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve storage system's NVSRAM version."):
with patch(self.REQUEST_FUNC, side_effect=[(200, [{"versionString": "08.42.50.00"}]), Exception()]):
firmware.is_firmware_bundled = lambda: True
firmware.proxy_check_upgrade_required()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve storage system's NVSRAM version."):
with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.42.50.00"]), Exception()]):
firmware.is_firmware_bundled = lambda: False
firmware.proxy_check_upgrade_required()
with self.assertRaisesRegexp(AnsibleFailJson, "Downgrades are not permitted."):
with patch(self.REQUEST_FUNC, side_effect=[(200, [{"versionString": "08.42.50.00"}]), (200, ["nvsram_version"])]):
firmware.firmware_version = lambda: b"08.40.00.00"
firmware.nvsram_version = lambda: "nvsram_version"
firmware.is_firmware_bundled = lambda: True
firmware.proxy_check_upgrade_required()
with self.assertRaisesRegexp(AnsibleFailJson, "Downgrades are not permitted."):
with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.42.50.00"]), (200, ["nvsram_version"])]):
firmware.is_firmware_bundled = lambda: False
firmware.proxy_check_upgrade_required()
def test_proxy_wait_for_upgrade_pass(self):
"""Verify proxy_wait_for_upgrade."""
with patch(self.SLEEP_FUNC, return_value=None):
self._set_args({"firmware": "test_firmware.dlp", "nvsram": "expected_nvsram.dlp"})
firmware = NetAppESeriesFirmware()
firmware.is_firmware_bundled = lambda: True
with patch(self.REQUEST_FUNC, side_effect=[(200, {"status": "not_done"}), (200, {"status": "complete"})]):
firmware.proxy_wait_for_upgrade("1")
firmware.is_firmware_bundled = lambda: False
firmware.firmware_version = lambda: b"08.50.00.00"
firmware.nvsram_version = lambda: b"expected_nvsram"
with patch(self.REQUEST_FUNC, side_effect=[(200, ["08.40.00.00"]), (200, ["not_expected_nvsram"]),
(200, ["08.50.00.00"]), (200, ["expected_nvsram"])]):
firmware.proxy_wait_for_upgrade("1")
def test_proxy_wait_for_upgrade_fail(self):
"""Verify proxy_wait_for_upgrade throws expected exceptions."""
with patch(self.SLEEP_FUNC, return_value=None):
self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
firmware = NetAppESeriesFirmware()
firmware.is_firmware_bundled = lambda: True
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve firmware upgrade status."):
with patch(self.REQUEST_FUNC, return_value=Exception()):
firmware.proxy_wait_for_upgrade("1")
firmware.is_firmware_bundled = lambda: False
with self.assertRaisesRegexp(AnsibleFailJson, "Timed out waiting for firmware upgrade to complete."):
with patch(self.REQUEST_FUNC, return_value=Exception()):
firmware.proxy_wait_for_upgrade("1")
firmware.is_firmware_bundled = lambda: True
with self.assertRaisesRegexp(AnsibleFailJson, "Firmware upgrade failed to complete."):
with patch(self.REQUEST_FUNC, side_effect=[(200, {"status": "not_done"}), (200, {"status": "failed"})]):
firmware.proxy_wait_for_upgrade("1")
def test_proxy_upgrade_fail(self):
"""Verify proxy_upgrade throws expected exceptions."""
self._set_args({"firmware": "test_firmware.dlp", "nvsram": "test_nvsram.dlp"})
firmware = NetAppESeriesFirmware()
firmware.is_firmware_bundled = lambda: True
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to initiate firmware upgrade."):
with patch(self.REQUEST_FUNC, return_value=Exception()):
firmware.proxy_upgrade()
firmware.is_firmware_bundled = lambda: False
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to initiate firmware upgrade."):
with patch(self.REQUEST_FUNC, return_value=Exception()):
firmware.proxy_upgrade()

View file

@ -1,76 +0,0 @@
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_global import GlobalSettings
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
from ansible_collections.community.general.tests.unit.compat import mock
class GlobalSettingsTest(ModuleTestCase):
REQUIRED_PARAMS = {
'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1',
}
REQ_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_global.request'
def _set_args(self, args=None):
module_args = self.REQUIRED_PARAMS.copy()
if args is not None:
module_args.update(args)
set_module_args(module_args)
def test_set_name(self):
"""Ensure we can successfully set the name"""
self._set_args(dict(name="x"))
expected = dict(name='y', status='online')
namer = GlobalSettings()
# Expecting an update
with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
with mock.patch.object(namer, 'get_name', return_value='y'):
update = namer.update_name()
self.assertTrue(update)
# Expecting no update
with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
with mock.patch.object(namer, 'get_name', return_value='x'):
update = namer.update_name()
self.assertFalse(update)
# Expecting an update, but no actual calls, since we're using check_mode=True
namer.check_mode = True
with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
with mock.patch.object(namer, 'get_name', return_value='y'):
update = namer.update_name()
self.assertEqual(0, req.called)
self.assertTrue(update)
def test_get_name(self):
"""Ensure we can successfully set the name"""
self._set_args()
expected = dict(name='y', status='online')
namer = GlobalSettings()
with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
name = namer.get_name()
self.assertEqual(name, expected['name'])
def test_get_name_fail(self):
"""Ensure we can successfully set the name"""
self._set_args()
expected = dict(name='y', status='offline')
namer = GlobalSettings()
with self.assertRaises(AnsibleFailJson):
with mock.patch(self.REQ_FUNC, side_effect=Exception()) as req:
name = namer.get_name()
with self.assertRaises(AnsibleFailJson):
with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
update = namer.update_name()

View file

@ -1,489 +0,0 @@
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_host import Host
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
try:
from unittest import mock
except ImportError:
import mock
class HostTest(ModuleTestCase):
REQUIRED_PARAMS = {
'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1',
'name': '1',
}
HOST = {
'name': '1',
'hostRef': '123',
'label': '1',
'id': '0' * 30,
'clusterRef': 40 * '0',
'hostTypeIndex': 28,
'hostSidePorts': [],
'initiators': [],
'ports': [],
}
HOST_ALT = {
'name': '2',
'label': '2',
'id': '1' * 30,
'clusterRef': '1',
'hostSidePorts': [],
'initiators': [],
'ports': [],
}
EXISTING_HOSTS = [
{"hostRef": "84000000600A098000A4B28D00303D065D430118", "clusterRef": "0000000000000000000000000000000000000000", "label": "beegfs_storage1",
"hostTypeIndex": 28, "ports": [], "initiators": [{"initiatorRef": "89000000600A098000A4B28D00303CF55D4300E3",
"nodeName": {"ioInterfaceType": "iscsi",
"iscsiNodeName": "iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818",
"remoteNodeWWN": None, "nvmeNodeName": None},
"alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "beegfs_storage1_iscsi_0",
"hostRef": "84000000600A098000A4B28D00303D065D430118",
"id": "89000000600A098000A4B28D00303CF55D4300E3"}],
"hostSidePorts": [{"type": "iscsi", "address": "iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818", "label": "beegfs_storage1_iscsi_0"}],
"id": "84000000600A098000A4B28D00303D065D430118", "name": "beegfs_storage1"},
{"hostRef": "84000000600A098000A4B9D10030370B5D430109", "clusterRef": "0000000000000000000000000000000000000000", "label": "beegfs_metadata1",
"hostTypeIndex": 28, "ports": [], "initiators": [{"initiatorRef": "89000000600A098000A4B28D00303CFC5D4300F7",
"nodeName": {"ioInterfaceType": "iscsi",
"iscsiNodeName": "iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8",
"remoteNodeWWN": None, "nvmeNodeName": None},
"alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "beegfs_metadata1_iscsi_0",
"hostRef": "84000000600A098000A4B9D10030370B5D430109",
"id": "89000000600A098000A4B28D00303CFC5D4300F7"}],
"hostSidePorts": [{"type": "iscsi", "address": "iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8", "label": "beegfs_metadata1_iscsi_0"}],
"id": "84000000600A098000A4B9D10030370B5D430109", "name": "beegfs_metadata1"},
{"hostRef": "84000000600A098000A4B9D10030370B5D430109", "clusterRef": "85000000600A098000A4B9D1003637135D483DEB", "label": "beegfs_metadata2",
"hostTypeIndex": 28, "ports": [], "initiators": [{"initiatorRef": "89000000600A098000A4B28D00303CFC5D4300F7",
"nodeName": {"ioInterfaceType": "iscsi",
"iscsiNodeName": "iqn.used_elsewhere",
"remoteNodeWWN": None, "nvmeNodeName": None},
"alias": {"ioInterfaceType": "iscsi", "iscsiAlias": ""}, "label": "beegfs_metadata2_iscsi_0",
"hostRef": "84000000600A098000A4B9D10030370B5D430109",
"id": "89000000600A098000A4B28D00303CFC5D4300F7"}],
"hostSidePorts": [{"type": "iscsi", "address": "iqn.used_elsewhere", "label": "beegfs_metadata2_iscsi_0"}],
"id": "84000000600A098000A4B9D10030370B5D430120", "name": "beegfs_metadata2"}]
HOST_GROUPS = [{"clusterRef": "85000000600A098000A4B9D1003637135D483DEB", "label": "test_group", "isSAControlled": False,
"confirmLUNMappingCreation": False, "protectionInformationCapableAccessMethod": True, "isLun0Restricted": False,
"id": "85000000600A098000A4B9D1003637135D483DEB", "name": "test_group"}]
HOST_TYPES = [{"name": "FactoryDefault", "index": 0, "code": "FactoryDefault"},
{"name": "Windows 2000/Server 2003/Server 2008 Non-Clustered", "index": 1, "code": "W2KNETNCL"},
{"name": "Solaris", "index": 2, "code": "SOL"},
{"name": "Linux", "index": 6, "code": "LNX"},
{"name": "LnxALUA", "index": 7, "code": "LnxALUA"},
{"name": "Windows 2000/Server 2003/Server 2008 Clustered", "index": 8, "code": "W2KNETCL"},
{"name": "LnxTPGSALUA_SF", "index": 27, "code": "LnxTPGSALUA_SF"},
{"name": "LnxDHALUA", "index": 28, "code": "LnxDHALUA"}]
REQ_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_host.request'
def _set_args(self, args):
module_args = self.REQUIRED_PARAMS.copy()
module_args.update(args)
set_module_args(module_args)
def test_host_exists_pass(self):
"""Verify host_exists produces expected results."""
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'new_host', 'host_type': 'linux dm-mp', 'force_port': False,
'ports': [{'label': 'new_host_port_1', 'type': 'fc', 'port': '0x08ef08ef08ef08ef'}]})
host = Host()
self.assertFalse(host.host_exists())
self._set_args({'state': 'present', 'name': 'does_not_exist', 'host_type': 'linux dm-mp',
'ports': [{'label': 'beegfs_storage1_iscsi_0', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
host = Host()
self.assertFalse(host.host_exists())
self._set_args({'state': 'present', 'name': 'beegfs_storage1', 'host_type': 'linux dm-mp',
'ports': [{'label': 'beegfs_storage1_iscsi_0', 'type': 'iscsi', 'port': 'iqn.differentiqn.org'}]})
host = Host()
self.assertTrue(host.host_exists())
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
'ports': [{'label': 'beegfs_metadata1_iscsi_0', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
host = Host()
self.assertTrue(host.host_exists())
def test_host_exists_fail(self):
"""Verify host_exists produces expected exceptions."""
self._set_args({'state': 'present', 'host_type': 'linux dm-mp', 'ports': [{'label': 'abc', 'type': 'iscsi', 'port': 'iqn:0'}]})
host = Host()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to determine host existence."):
with mock.patch(self.REQ_FUNC, return_value=Exception()):
host.host_exists()
def test_needs_update_pass(self):
"""Verify needs_update produces expected results."""
# No changes
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp',
'ports': [{'label': 'beegfs_metadata1_iscsi_0', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
host.host_exists()
self.assertFalse(host.needs_update())
# Change host type
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': False,
'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi', 'port': 'iqn.not_used'}]})
host = Host()
host.host_exists()
self.assertTrue(host.needs_update())
# Add port to host
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi', 'port': 'iqn.not_used'}]})
host = Host()
host.host_exists()
self.assertTrue(host.needs_update())
# Change port name
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
'ports': [{'label': 'beegfs_metadata1_iscsi_2', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
host.host_exists()
self.assertTrue(host.needs_update())
# take port from another host by force
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
host.host_exists()
self.assertTrue(host.needs_update())
def test_needs_update_fail(self):
"""Verify needs_update produces expected exceptions."""
with self.assertRaisesRegexp(AnsibleFailJson, "is associated with a different host."):
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
host.host_exists()
host.needs_update()
def test_valid_host_type_pass(self):
"""Validate the available host types."""
with mock.patch(self.REQ_FUNC, return_value=(200, self.HOST_TYPES)):
self._set_args({'state': 'present', 'host_type': '0'})
host = Host()
self.assertTrue(host.valid_host_type())
self._set_args({'state': 'present', 'host_type': '28'})
host = Host()
self.assertTrue(host.valid_host_type())
self._set_args({'state': 'present', 'host_type': 'windows'})
host = Host()
self.assertTrue(host.valid_host_type())
self._set_args({'state': 'present', 'host_type': 'linux dm-mp'})
host = Host()
self.assertTrue(host.valid_host_type())
def test_valid_host_type_fail(self):
"""Validate the available host types."""
with self.assertRaisesRegexp(AnsibleFailJson, "host_type must be either a host type name or host type index found integer the documentation"):
self._set_args({'state': 'present', 'host_type': 'non-host-type'})
host = Host()
with mock.patch(self.REQ_FUNC, return_value=(200, self.HOST_TYPES)):
with self.assertRaisesRegexp(AnsibleFailJson, "There is no host type with index"):
self._set_args({'state': 'present', 'host_type': '4'})
host = Host()
host.valid_host_type()
with mock.patch(self.REQ_FUNC, return_value=Exception()):
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to get host types."):
self._set_args({'state': 'present', 'host_type': '4'})
host = Host()
host.valid_host_type()
def test_group_id_pass(self):
"""Verify group_id produces expected results."""
with mock.patch(self.REQ_FUNC, return_value=(200, self.HOST_GROUPS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
self.assertEqual(host.group_id(), "0000000000000000000000000000000000000000")
self._set_args({'state': 'present', 'name': 'beegfs_metadata2', 'host_type': 'linux dm-mp', 'force_port': False, 'group': 'test_group',
'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
self.assertEqual(host.group_id(), "85000000600A098000A4B9D1003637135D483DEB")
def test_group_id_fail(self):
"""Verify group_id produces expected exceptions."""
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to get host groups."):
with mock.patch(self.REQ_FUNC, return_value=Exception()):
self._set_args({'state': 'present', 'name': 'beegfs_metadata2', 'host_type': 'linux dm-mp', 'force_port': False, 'group': 'test_group2',
'ports': [
{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi', 'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
host.group_id()
with self.assertRaisesRegexp(AnsibleFailJson, "No group with the name:"):
with mock.patch(self.REQ_FUNC, return_value=(200, self.HOST_GROUPS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata2', 'host_type': 'linux dm-mp', 'force_port': False, 'group': 'test_group2',
'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
host.group_id()
def test_assigned_host_ports_pass(self):
"""Verify assigned_host_ports gives expected results."""
# Add an unused port to host
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi', 'port': 'iqn.not_used'}]})
host = Host()
host.host_exists()
self.assertTrue(host.needs_update())
self.assertEqual(host.assigned_host_ports(), {})
# Change port name (force)
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
'ports': [{'label': 'beegfs_metadata1_iscsi_2', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
host.host_exists()
self.assertTrue(host.needs_update())
self.assertEqual(host.assigned_host_ports(), {'84000000600A098000A4B9D10030370B5D430109': ['89000000600A098000A4B28D00303CFC5D4300F7']})
# Change port type
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'fc', 'port': '08:ef:7e:24:52:a0'}]})
host = Host()
host.host_exists()
self.assertTrue(host.needs_update())
self.assertEqual(host.assigned_host_ports(), {})
# take port from another host by force
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi', 'port': 'iqn.used_elsewhere'}]})
host = Host()
host.host_exists()
self.assertTrue(host.needs_update())
self.assertEqual(host.assigned_host_ports(), {'84000000600A098000A4B9D10030370B5D430109': ['89000000600A098000A4B28D00303CFC5D4300F7']})
# take port from another host by force
with mock.patch(self.REQ_FUNC, side_effect=[(200, self.EXISTING_HOSTS), (200, {})]):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi', 'port': 'iqn.used_elsewhere'}]})
host = Host()
host.host_exists()
self.assertTrue(host.needs_update())
self.assertEqual(host.assigned_host_ports(apply_unassigning=True),
{'84000000600A098000A4B9D10030370B5D430109': ['89000000600A098000A4B28D00303CFC5D4300F7']})
def test_assigned_host_ports_fail(self):
"""Verify assigned_host_ports gives expected exceptions."""
# take port from another
with self.assertRaisesRegexp(AnsibleFailJson, "There are no host ports available OR there are not enough unassigned host ports"):
with mock.patch(self.REQ_FUNC, side_effect=[(200, self.EXISTING_HOSTS)]):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
'ports': [{'label': 'beegfs_metadata1_iscsi_2', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
host.host_exists()
self.assertTrue(host.needs_update())
host.assigned_host_ports(apply_unassigning=True)
# take port from another host and fail because force == False
with self.assertRaisesRegexp(AnsibleFailJson, "There are no host ports available OR there are not enough unassigned host ports"):
with mock.patch(self.REQ_FUNC, side_effect=[(200, self.EXISTING_HOSTS)]):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi', 'port': 'iqn.used_elsewhere'}]})
host = Host()
host.host_exists()
self.assertTrue(host.needs_update())
host.assigned_host_ports(apply_unassigning=True)
# take port from another host and fail because force == False
with self.assertRaisesRegexp(AnsibleFailJson, "There are no host ports available OR there are not enough unassigned host ports"):
with mock.patch(self.REQ_FUNC, side_effect=[(200, self.EXISTING_HOSTS)]):
self._set_args({'state': 'present', 'name': 'beegfs_metadata3', 'host_type': 'linux dm-mp', 'force_port': False,
'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi', 'port': 'iqn.used_elsewhere'}]})
host = Host()
host.host_exists()
host.assigned_host_ports(apply_unassigning=True)
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to unassign host port."):
with mock.patch(self.REQ_FUNC, side_effect=[(200, self.EXISTING_HOSTS), Exception()]):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': True,
'ports': [{'label': 'beegfs_metadata2_iscsi_0', 'type': 'iscsi', 'port': 'iqn.used_elsewhere'}]})
host = Host()
host.host_exists()
self.assertTrue(host.needs_update())
host.assigned_host_ports(apply_unassigning=True)
def test_update_host_pass(self):
"""Verify update_host produces expected results."""
# Change host type
with self.assertRaises(AnsibleExitJson):
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': True,
'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
host = Host()
host.build_success_payload = lambda x: {}
host.host_exists()
self.assertTrue(host.needs_update())
host.update_host()
# Change port iqn
with self.assertRaises(AnsibleExitJson):
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi', 'port': 'iqn.not_used'}]})
host = Host()
host.build_success_payload = lambda x: {}
host.host_exists()
self.assertTrue(host.needs_update())
host.update_host()
# Change port type to fc
with self.assertRaises(AnsibleExitJson):
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False,
'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'fc', 'port': '0x08ef08ef08ef08ef'}]})
host = Host()
host.build_success_payload = lambda x: {}
host.host_exists()
self.assertTrue(host.needs_update())
host.update_host()
# Change port name
with self.assertRaises(AnsibleExitJson):
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': True,
'ports': [{'label': 'beegfs_metadata1_iscsi_12', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
host.build_success_payload = lambda x: {}
host.host_exists()
self.assertTrue(host.needs_update())
host.update_host()
# Change group
with self.assertRaises(AnsibleExitJson):
with mock.patch(self.REQ_FUNC, return_value=(200, self.EXISTING_HOSTS)):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': False, 'group': 'test_group',
'ports': [{'label': 'beegfs_metadata1_iscsi_0', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
host.build_success_payload = lambda x: {}
host.group_id = lambda: "85000000600A098000A4B9D1003637135D483DEB"
host.host_exists()
self.assertTrue(host.needs_update())
host.update_host()
def test_update_host_fail(self):
"""Verify update_host produces expected exceptions."""
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to update host."):
with mock.patch(self.REQ_FUNC, side_effect=[(200, self.EXISTING_HOSTS), Exception()]):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': False, 'group': 'test_group',
'ports': [{'label': 'beegfs_metadata1_iscsi_0', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
host.build_success_payload = lambda x: {}
host.group_id = lambda: "85000000600A098000A4B9D1003637135D483DEB"
host.host_exists()
self.assertTrue(host.needs_update())
host.update_host()
def test_create_host_pass(self):
"""Verify create_host produces expected results."""
def _assigned_host_ports(apply_unassigning=False):
return None
with self.assertRaises(AnsibleExitJson):
with mock.patch(self.REQ_FUNC, return_value=(200, {'id': '84000000600A098000A4B9D10030370B5D430109'})):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': True, 'group': 'test_group',
'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
host = Host()
host.host_exists = lambda: False
host.assigned_host_ports = _assigned_host_ports
host.build_success_payload = lambda x: {}
host.group_id = lambda: "85000000600A098000A4B9D1003637135D483DEB"
host.create_host()
def test_create_host_fail(self):
"""Verify create_host produces expected exceptions."""
def _assigned_host_ports(apply_unassigning=False):
return None
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to create host."):
with mock.patch(self.REQ_FUNC, return_value=Exception()):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': True, 'group': 'test_group',
'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
host = Host()
host.host_exists = lambda: False
host.assigned_host_ports = _assigned_host_ports
host.build_success_payload = lambda x: {}
host.group_id = lambda: "85000000600A098000A4B9D1003637135D483DEB"
host.create_host()
with self.assertRaisesRegexp(AnsibleExitJson, "Host already exists."):
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': True, 'group': 'test_group',
'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
host = Host()
host.host_exists = lambda: True
host.assigned_host_ports = _assigned_host_ports
host.build_success_payload = lambda x: {}
host.group_id = lambda: "85000000600A098000A4B9D1003637135D483DEB"
host.create_host()
def test_remove_host_pass(self):
"""Verify remove_host produces expected results."""
with mock.patch(self.REQ_FUNC, return_value=(200, None)):
self._set_args({'state': 'absent', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False, 'group': 'test_group',
'ports': [{'label': 'beegfs_metadata1_iscsi_0', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
host.host_obj = {"id": "84000000600A098000A4B9D10030370B5D430109"}
host.remove_host()
def test_remove_host_fail(self):
"""Verify remove_host produces expected exceptions."""
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to remove host."):
with mock.patch(self.REQ_FUNC, return_value=Exception()):
self._set_args({'state': 'absent', 'name': 'beegfs_metadata1', 'host_type': 'linux dm-mp', 'force_port': False, 'group': 'test_group',
'ports': [{'label': 'beegfs_metadata1_iscsi_0', 'type': 'iscsi',
'port': 'iqn.1993-08.org.debian.beegfs-metadata:01:69e4efdf30b8'}]})
host = Host()
host.host_obj = {"id": "84000000600A098000A4B9D10030370B5D430109"}
host.remove_host()
def test_build_success_payload(self):
"""Validate success payload."""
def _assigned_host_ports(apply_unassigning=False):
return None
self._set_args({'state': 'present', 'name': 'beegfs_metadata1', 'host_type': 'windows', 'force_port': True, 'group': 'test_group',
'ports': [{'label': 'beegfs_metadata1_iscsi_1', 'type': 'iscsi', 'port': 'iqn.1993-08.org.debian.beegfs-storage1:01:b0621126818'}]})
host = Host()
self.assertEqual(host.build_success_payload(), {'api_url': 'http://localhost/', 'ssid': '1'})

View file

@ -1,163 +0,0 @@
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_hostgroup import NetAppESeriesHostGroup
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
try:
from unittest import mock
except ImportError:
import mock
class HostTest(ModuleTestCase):
REQUIRED_PARAMS = {"api_username": "rw",
"api_password": "password",
"api_url": "http://localhost",
"ssid": "1"}
REQ_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_hostgroup.NetAppESeriesHostGroup.request'
HOSTS_GET_RESPONSE = [
{"hostRef": "84000000600A098000A4B28D0030102E5C3DFC0F",
"clusterRef": "85000000600A098000A4B28D0036102C5C3DFC08", "id": "84000000600A098000A4B28D0030102E5C3DFC0F",
"name": "host1"},
{"hostRef": "84000000600A098000A4B28D003010315C3DFC11",
"clusterRef": "85000000600A098000A4B9D100360F765C3DFC1C", "id": "84000000600A098000A4B28D003010315C3DFC11",
"name": "host2"},
{"hostRef": "84000000600A098000A4B28D003010345C3DFC14",
"clusterRef": "85000000600A098000A4B9D100360F765C3DFC1C", "id": "84000000600A098000A4B28D003010345C3DFC14",
"name": "host3"}]
HOSTGROUPS_GET_RESPONSE = [
{"clusterRef": "85000000600A098000A4B28D0036102C5C3DFC08", "id": "85000000600A098000A4B28D0036102C5C3DFC08",
"name": "group1"},
{"clusterRef": "85000000600A098000A4B9D100360F765C3DFC1C", "id": "85000000600A098000A4B9D100360F765C3DFC1C",
"name": "group2"},
{"clusterRef": "85000000600A098000A4B9D100360F775C3DFC1E", "id": "85000000600A098000A4B9D100360F775C3DFC1E",
"name": "group3"}]
def _set_args(self, args):
self.module_args = self.REQUIRED_PARAMS.copy()
self.module_args.update(args)
set_module_args(self.module_args)
def test_hosts_fail(self):
"""Ensure that the host property method fails when self.request throws an exception."""
self._set_args({"state": "present", "name": "hostgroup1", "hosts": ["host1", "host2"]})
hostgroup_object = NetAppESeriesHostGroup()
with self.assertRaises(AnsibleFailJson):
with mock.patch(self.REQ_FUNC, return_value=Exception()):
hosts = hostgroup_object.hosts
with mock.patch(self.REQ_FUNC, return_value=(200, [])):
with self.assertRaisesRegexp(AnsibleFailJson, "Expected host does not exist"):
hosts = hostgroup_object.hosts
def test_hosts_pass(self):
"""Evaluate hosts property method for valid returned data structure."""
expected_host_list = ['84000000600A098000A4B28D003010315C3DFC11', '84000000600A098000A4B28D0030102E5C3DFC0F']
for hostgroup_hosts in [["host1", "host2"], ["84000000600A098000A4B28D0030102E5C3DFC0F",
"84000000600A098000A4B28D003010315C3DFC11"]]:
self._set_args({"state": "present", "name": "hostgroup1", "hosts": hostgroup_hosts})
hostgroup_object = NetAppESeriesHostGroup()
with mock.patch(self.REQ_FUNC, return_value=(200, self.HOSTS_GET_RESPONSE)):
for item in hostgroup_object.hosts:
self.assertTrue(item in expected_host_list)
# Create hostgroup with no hosts
self._set_args({"state": "present", "name": "hostgroup1"})
hostgroup_object = NetAppESeriesHostGroup()
with mock.patch(self.REQ_FUNC, return_value=(200, [])):
self.assertEqual(hostgroup_object.hosts, [])
def test_host_groups_fail(self):
"""Ensure that the host_groups property method fails when self.request throws an exception."""
self._set_args({"state": "present", "name": "hostgroup1", "hosts": ["host1", "host2"]})
hostgroup_object = NetAppESeriesHostGroup()
with self.assertRaises(AnsibleFailJson):
with mock.patch(self.REQ_FUNC, return_value=Exception()):
host_groups = hostgroup_object.host_groups
def test_host_groups_pass(self):
"""Evaluate host_groups property method for valid return data structure."""
expected_groups = [
{'hosts': ['84000000600A098000A4B28D0030102E5C3DFC0F'], 'id': '85000000600A098000A4B28D0036102C5C3DFC08',
'name': 'group1'},
{'hosts': ['84000000600A098000A4B28D003010315C3DFC11', '84000000600A098000A4B28D003010345C3DFC14'],
'id': '85000000600A098000A4B9D100360F765C3DFC1C', 'name': 'group2'},
{'hosts': [], 'id': '85000000600A098000A4B9D100360F775C3DFC1E', 'name': 'group3'}]
self._set_args({"state": "present", "name": "hostgroup1", "hosts": ["host1", "host2"]})
hostgroup_object = NetAppESeriesHostGroup()
with mock.patch(self.REQ_FUNC,
side_effect=[(200, self.HOSTGROUPS_GET_RESPONSE), (200, self.HOSTS_GET_RESPONSE)]):
self.assertEqual(hostgroup_object.host_groups, expected_groups)
@mock.patch.object(NetAppESeriesHostGroup, "host_groups")
@mock.patch.object(NetAppESeriesHostGroup, "hosts")
@mock.patch.object(NetAppESeriesHostGroup, "create_host_group")
@mock.patch.object(NetAppESeriesHostGroup, "update_host_group")
@mock.patch.object(NetAppESeriesHostGroup, "delete_host_group")
def test_apply_pass(self, fake_delete_host_group, fake_update_host_group, fake_create_host_group, fake_hosts,
fake_host_groups):
"""Apply desired host group state to the storage array."""
hosts_response = ['84000000600A098000A4B28D003010315C3DFC11', '84000000600A098000A4B28D0030102E5C3DFC0F']
host_groups_response = [
{'hosts': ['84000000600A098000A4B28D0030102E5C3DFC0F'], 'id': '85000000600A098000A4B28D0036102C5C3DFC08',
'name': 'group1'},
{'hosts': ['84000000600A098000A4B28D003010315C3DFC11', '84000000600A098000A4B28D003010345C3DFC14'],
'id': '85000000600A098000A4B9D100360F765C3DFC1C', 'name': 'group2'},
{'hosts': [], 'id': '85000000600A098000A4B9D100360F775C3DFC1E', 'name': 'group3'}]
fake_host_groups.return_value = host_groups_response
fake_hosts.return_value = hosts_response
fake_create_host_group.return_value = lambda x: "Host group created!"
fake_update_host_group.return_value = lambda x: "Host group updated!"
fake_delete_host_group.return_value = lambda x: "Host group deleted!"
# Test create new host group
self._set_args({"state": "present", "name": "hostgroup1", "hosts": ["host1", "host2"]})
hostgroup_object = NetAppESeriesHostGroup()
with self.assertRaises(AnsibleExitJson):
hostgroup_object.apply()
# Test make no changes to existing host group
self._set_args({"state": "present", "name": "group1", "hosts": ["host1"]})
hostgroup_object = NetAppESeriesHostGroup()
with self.assertRaises(AnsibleExitJson):
hostgroup_object.apply()
# Test add host to existing host group
self._set_args({"state": "present", "name": "group1", "hosts": ["host1", "host2"]})
hostgroup_object = NetAppESeriesHostGroup()
with self.assertRaises(AnsibleExitJson):
hostgroup_object.apply()
# Test delete existing host group
self._set_args({"state": "absent", "name": "group1"})
hostgroup_object = NetAppESeriesHostGroup()
with self.assertRaises(AnsibleExitJson):
hostgroup_object.apply()
@mock.patch.object(NetAppESeriesHostGroup, "host_groups")
@mock.patch.object(NetAppESeriesHostGroup, "hosts")
def test_apply_fail(self, fake_hosts, fake_host_groups):
"""Apply desired host group state to the storage array."""
hosts_response = ['84000000600A098000A4B28D003010315C3DFC11', '84000000600A098000A4B28D0030102E5C3DFC0F']
host_groups_response = [
{'hosts': ['84000000600A098000A4B28D0030102E5C3DFC0F'], 'id': '85000000600A098000A4B28D0036102C5C3DFC08',
'name': 'group1'},
{'hosts': ['84000000600A098000A4B28D003010315C3DFC11', '84000000600A098000A4B28D003010345C3DFC14'],
'id': '85000000600A098000A4B9D100360F765C3DFC1C', 'name': 'group2'},
{'hosts': [], 'id': '85000000600A098000A4B9D100360F775C3DFC1E', 'name': 'group3'}]
fake_host_groups.return_value = host_groups_response
fake_hosts.return_value = hosts_response
self._set_args(
{"state": "present", "id": "84000000600A098000A4B28D0030102E5C3DFC0F", "hosts": ["host1", "host2"]})
hostgroup_object = NetAppESeriesHostGroup()
with self.assertRaisesRegexp(AnsibleFailJson,
"The option name must be supplied when creating a new host group."):
hostgroup_object.apply()

View file

@ -1,245 +0,0 @@
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_iscsi_interface import IscsiInterface
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
import mock
class IscsiInterfaceTest(ModuleTestCase):
REQUIRED_PARAMS = {
'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1',
'state': 'disabled',
'name': 1,
'controller': 'A',
}
REQ_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_iscsi_interface.request'
def _set_args(self, args=None):
module_args = self.REQUIRED_PARAMS.copy()
if args is not None:
module_args.update(args)
set_module_args(module_args)
def test_validate_params(self):
"""Ensure we can pass valid parameters to the module"""
# Provide a range of valid values for each
for controller in ['A', 'B']:
for i in range(1, 10):
for mtu in [1500, 2500, 9000]:
self._set_args(dict(
state='disabled',
name=i,
controller=controller,
mtu=mtu,
))
iface = IscsiInterface()
def test_invalid_params(self):
"""Ensure that our input validation catches invalid parameters"""
# Currently a 'C' controller is invalid
self._set_args(dict(
state='disabled',
name=1,
controller="C",
))
with self.assertRaises(AnsibleFailJson) as result:
iface = IscsiInterface()
# Each of these mtu values are invalid
for mtu in [500, 1499, 9001]:
self._set_args({
'state': 'disabled',
'name': 1,
'controller': 'A',
'mtu': mtu
})
with self.assertRaises(AnsibleFailJson) as result:
iface = IscsiInterface()
def test_interfaces(self):
"""Validate that we are processing the interface list properly"""
self._set_args()
interfaces = [
dict(interfaceType='iscsi',
iscsi=dict()),
dict(interfaceType='iscsi',
iscsi=dict()),
dict(interfaceType='fc', )
]
# Ensure we filter out anything without an interfaceType of iscsi
expected = [iface['iscsi'] for iface in interfaces if iface['interfaceType'] == 'iscsi']
# We expect a single call to the API: retrieve the list of interfaces from the objectGraph.
with mock.patch(self.REQ_FUNC, return_value=(200, interfaces)):
iface = IscsiInterface()
interfaces = iface.interfaces
self.assertEqual(interfaces, expected)
def test_interfaces_fail(self):
"""Ensure we fail gracefully on an error to retrieve the interfaces"""
self._set_args()
with self.assertRaises(AnsibleFailJson) as result:
# Simulate a failed call to the API
with mock.patch(self.REQ_FUNC, side_effect=Exception("Failure")):
iface = IscsiInterface()
interfaces = iface.interfaces
def test_fetch_target_interface_bad_channel(self):
"""Ensure we fail correctly when a bad channel is provided"""
self._set_args()
interfaces = list(dict(channel=1, controllerId='1'))
with self.assertRaisesRegexp(AnsibleFailJson, r".*?channels include.*"):
with mock.patch.object(IscsiInterface, 'interfaces', return_value=interfaces):
iface = IscsiInterface()
interfaces = iface.fetch_target_interface()
def test_make_update_body_dhcp(self):
"""Ensure the update body generates correctly for a transition from static to dhcp"""
self._set_args(dict(state='enabled',
config_method='dhcp')
)
iface = dict(id='1',
ipv4Enabled=False,
ipv4Data=dict(ipv4AddressData=dict(ipv4Address="0.0.0.0",
ipv4SubnetMask="0.0.0.0",
ipv4GatewayAddress="0.0.0.0", ),
ipv4AddressConfigMethod='configStatic', ),
interfaceData=dict(ethernetData=dict(maximumFramePayloadSize=1500, ), ),
)
# Test a transition from static to dhcp
inst = IscsiInterface()
update, body = inst.make_update_body(iface)
self.assertTrue(update, msg="An update was expected!")
self.assertEqual(body['settings']['ipv4Enabled'][0], True)
self.assertEqual(body['settings']['ipv4AddressConfigMethod'][0], 'configDhcp')
def test_make_update_body_static(self):
"""Ensure the update body generates correctly for a transition from dhcp to static"""
iface = dict(id='1',
ipv4Enabled=False,
ipv4Data=dict(ipv4AddressConfigMethod='configDhcp',
ipv4AddressData=dict(ipv4Address="0.0.0.0",
ipv4SubnetMask="0.0.0.0",
ipv4GatewayAddress="0.0.0.0", ), ),
interfaceData=dict(ethernetData=dict(maximumFramePayloadSize=1500, ), ), )
self._set_args(dict(state='enabled',
config_method='static',
address='10.10.10.10',
subnet_mask='255.255.255.0',
gateway='1.1.1.1'))
inst = IscsiInterface()
update, body = inst.make_update_body(iface)
self.assertTrue(update, msg="An update was expected!")
self.assertEqual(body['settings']['ipv4Enabled'][0], True)
self.assertEqual(body['settings']['ipv4AddressConfigMethod'][0], 'configStatic')
self.assertEqual(body['settings']['ipv4Address'][0], '10.10.10.10')
self.assertEqual(body['settings']['ipv4SubnetMask'][0], '255.255.255.0')
self.assertEqual(body['settings']['ipv4GatewayAddress'][0], '1.1.1.1')
CONTROLLERS = dict(A='1', B='2')
def test_update_bad_controller(self):
"""Ensure a bad controller fails gracefully"""
self._set_args(dict(controller='B'))
inst = IscsiInterface()
with self.assertRaises(AnsibleFailJson) as result:
with mock.patch.object(inst, 'get_controllers', return_value=dict(A='1')) as get_controllers:
inst()
@mock.patch.object(IscsiInterface, 'get_controllers', return_value=CONTROLLERS)
def test_update(self, get_controllers):
"""Validate the good path"""
self._set_args()
inst = IscsiInterface()
with self.assertRaises(AnsibleExitJson):
with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
with mock.patch.object(inst, 'make_update_body', return_value=(True, {})):
inst()
request.assert_called_once()
@mock.patch.object(IscsiInterface, 'get_controllers', return_value=CONTROLLERS)
def test_update_not_required(self, get_controllers):
"""Ensure we don't trigger the update if one isn't required or if check mode is enabled"""
self._set_args()
# make_update_body will report that no change is required, so we should see no call to the API.
inst = IscsiInterface()
with self.assertRaises(AnsibleExitJson) as result:
with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
with mock.patch.object(inst, 'make_update_body', return_value=(False, {})):
inst()
request.assert_not_called()
self.assertFalse(result.exception.args[0]['changed'], msg="No change was expected.")
# Since check_mode is enabled, we will run everything normally, but not make a request to the API
# to perform the actual change.
inst = IscsiInterface()
inst.check_mode = True
with self.assertRaises(AnsibleExitJson) as result:
with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
with mock.patch.object(inst, 'make_update_body', return_value=(True, {})):
inst()
request.assert_not_called()
self.assertTrue(result.exception.args[0]['changed'], msg="A change was expected.")
@mock.patch.object(IscsiInterface, 'get_controllers', return_value=CONTROLLERS)
def test_update_fail_busy(self, get_controllers):
"""Ensure we fail correctly on receiving a busy response from the API."""
self._set_args()
inst = IscsiInterface()
with self.assertRaisesRegexp(AnsibleFailJson, r".*?busy.*") as result:
with mock.patch(self.REQ_FUNC, return_value=(422, dict(retcode="3"))) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
with mock.patch.object(inst, 'make_update_body', return_value=(True, {})):
inst()
request.assert_called_once()
@mock.patch.object(IscsiInterface, 'get_controllers', return_value=CONTROLLERS)
@mock.patch.object(IscsiInterface, 'make_update_body', return_value=(True, {}))
def test_update_fail(self, get_controllers, make_body):
"""Ensure we fail correctly on receiving a normal failure from the API."""
self._set_args()
inst = IscsiInterface()
# Test a 422 error with a non-busy status
with self.assertRaisesRegexp(AnsibleFailJson, r".*?Failed to modify.*") as result:
with mock.patch(self.REQ_FUNC, return_value=(422, mock.MagicMock())) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
inst()
request.assert_called_once()
# Test a 401 (authentication) error
with self.assertRaisesRegexp(AnsibleFailJson, r".*?Failed to modify.*") as result:
with mock.patch(self.REQ_FUNC, return_value=(401, mock.MagicMock())) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
inst()
request.assert_called_once()
# Test with a connection failure
with self.assertRaisesRegexp(AnsibleFailJson, r".*?Connection failure.*") as result:
with mock.patch(self.REQ_FUNC, side_effect=Exception()) as request:
with mock.patch.object(inst, 'fetch_target_interface', side_effect=[{}, mock.MagicMock()]):
inst()
request.assert_called_once()

View file

@ -1,132 +0,0 @@
# coding=utf-8
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_iscsi_target import IscsiTarget
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
import mock
from ansible_collections.community.general.tests.unit.compat.mock import PropertyMock
class IscsiTargetTest(ModuleTestCase):
REQUIRED_PARAMS = {
'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1',
'name': 'abc',
}
CHAP_SAMPLE = 'a' * 14
REQ_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_iscsi_target.request'
def _set_args(self, args=None):
module_args = self.REQUIRED_PARAMS.copy()
if args is not None:
module_args.update(args)
set_module_args(module_args)
def test_validate_params(self):
"""Ensure we can pass valid parameters to the module"""
for i in range(12, 57):
secret = 'a' * i
self._set_args(dict(chap=secret))
tgt = IscsiTarget()
def test_invalid_chap_secret(self):
for secret in [11 * 'a', 58 * 'a']:
with self.assertRaisesRegexp(AnsibleFailJson, r'.*?CHAP secret is not valid.*') as result:
self._set_args(dict(chap=secret))
tgt = IscsiTarget()
def test_apply_iscsi_settings(self):
"""Ensure that the presence of CHAP always triggers an update."""
self._set_args(dict(chap=self.CHAP_SAMPLE))
tgt = IscsiTarget()
# CHAP is enabled
fake = dict(alias=self.REQUIRED_PARAMS.get('name'), chap=True)
# We don't care about the return here
with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
with mock.patch.object(IscsiTarget, 'target', new_callable=PropertyMock) as call:
call.return_value = fake
self.assertTrue(tgt.apply_iscsi_settings())
self.assertTrue(request.called, msg="An update was expected!")
# Retest with check_mode enabled
tgt.check_mode = True
request.reset_mock()
self.assertTrue(tgt.apply_iscsi_settings())
self.assertFalse(request.called, msg="No update was expected in check_mode!")
def test_apply_iscsi_settings_no_change(self):
"""Ensure that we don't make unnecessary requests or updates"""
name = 'abc'
self._set_args(dict(alias=name))
fake = dict(alias=name, chap=False)
with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
with mock.patch.object(IscsiTarget, 'target', new_callable=PropertyMock) as call:
call.return_value = fake
tgt = IscsiTarget()
self.assertFalse(tgt.apply_iscsi_settings())
self.assertFalse(request.called, msg="No update was expected!")
def test_apply_iscsi_settings_fail(self):
"""Ensure we handle request failures cleanly"""
self._set_args()
fake = dict(alias='', chap=True)
with self.assertRaisesRegexp(AnsibleFailJson, r".*?update.*"):
with mock.patch(self.REQ_FUNC, side_effect=Exception) as request:
with mock.patch.object(IscsiTarget, 'target', new_callable=PropertyMock) as call:
call.return_value = fake
tgt = IscsiTarget()
tgt.apply_iscsi_settings()
def test_apply_target_changes(self):
"""Ensure that changes trigger an update."""
self._set_args(dict(ping=True, unnamed_discovery=True))
tgt = IscsiTarget()
# CHAP is enabled
fake = dict(ping=False, unnamed_discovery=False)
# We don't care about the return here
with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
with mock.patch.object(IscsiTarget, 'target', new_callable=PropertyMock) as call:
call.return_value = fake
self.assertTrue(tgt.apply_target_changes())
self.assertTrue(request.called, msg="An update was expected!")
# Retest with check_mode enabled
tgt.check_mode = True
request.reset_mock()
self.assertTrue(tgt.apply_target_changes())
self.assertFalse(request.called, msg="No update was expected in check_mode!")
def test_apply_target_changes_no_change(self):
"""Ensure that we don't make unnecessary requests or updates"""
self._set_args(dict(ping=True, unnamed_discovery=True))
fake = dict(ping=True, unnamed_discovery=True)
with mock.patch(self.REQ_FUNC, return_value=(200, "")) as request:
with mock.patch.object(IscsiTarget, 'target', new_callable=PropertyMock) as call:
call.return_value = fake
tgt = IscsiTarget()
self.assertFalse(tgt.apply_target_changes())
self.assertFalse(request.called, msg="No update was expected!")
def test_apply_target_changes_fail(self):
"""Ensure we handle request failures cleanly"""
self._set_args()
fake = dict(ping=False, unnamed_discovery=False)
with self.assertRaisesRegexp(AnsibleFailJson, r".*?update.*"):
with mock.patch(self.REQ_FUNC, side_effect=Exception) as request:
with mock.patch.object(IscsiTarget, 'target', new_callable=PropertyMock) as call:
call.return_value = fake
tgt = IscsiTarget()
tgt.apply_target_changes()

View file

@ -1,430 +0,0 @@
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
import os
import shutil
import tempfile
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_ldap import Ldap
from ansible_collections.community.general.tests.unit.modules.utils import ModuleTestCase, set_module_args, AnsibleFailJson, AnsibleExitJson
__metaclass__ = type
from ansible_collections.community.general.tests.unit.compat import mock
class LdapTest(ModuleTestCase):
REQUIRED_PARAMS = {
'api_username': 'admin',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1',
'state': 'absent',
}
REQ_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_ldap.request'
def setUp(self):
super(LdapTest, self).setUp()
self.temp_dir = tempfile.mkdtemp('ansible-test_netapp_e_ldap-')
self.REQUIRED_PARAMS['log_path'] = os.path.join(self.temp_dir, 'debug.log')
def tearDown(self):
super(LdapTest, self).tearDown()
shutil.rmtree(self.temp_dir)
def _make_ldap_instance(self):
self._set_args()
ldap = Ldap()
ldap.base_path = '/'
return ldap
def _set_args(self, **kwargs):
module_args = self.REQUIRED_PARAMS.copy()
module_args.update(kwargs)
set_module_args(module_args)
def test_init_defaults(self):
"""Validate a basic run with required arguments set."""
self._set_args(log_path=None,
state='present',
username='myBindAcct',
password='myBindPass',
server='ldap://example.com:384',
search_base='OU=Users,DC=example,DC=com',
role_mappings={'.*': ['storage.monitor']},
)
ldap = Ldap()
def test_init(self):
"""Validate a basic run with required arguments set."""
self._set_args(log_path=None)
ldap = Ldap()
def test_is_embedded(self):
"""Ensure we can properly detect the type of Web Services instance we're utilizing."""
self._set_args()
result = dict(runningAsProxy=False)
with mock.patch(self.REQ_FUNC, return_value=(200, result)):
ldap = Ldap()
embedded = ldap.is_embedded()
self.assertTrue(embedded)
result = dict(runningAsProxy=True)
with mock.patch(self.REQ_FUNC, return_value=(200, result)):
ldap = Ldap()
embedded = ldap.is_embedded()
self.assertFalse(embedded)
def test_is_embedded_fail(self):
"""Ensure we fail gracefully when fetching the About data."""
self._set_args()
with self.assertRaises(AnsibleFailJson):
with mock.patch(self.REQ_FUNC, side_effect=Exception):
ldap = Ldap()
ldap.is_embedded()
def test_get_full_configuration(self):
self._set_args()
resp = dict(result=None)
with mock.patch(self.REQ_FUNC, return_value=(200, resp)):
ldap = self._make_ldap_instance()
result = ldap.get_full_configuration()
self.assertEqual(resp, result)
def test_get_full_configuration_failure(self):
self._set_args()
resp = dict(result=None)
with self.assertRaises(AnsibleFailJson):
with mock.patch(self.REQ_FUNC, side_effect=Exception):
ldap = self._make_ldap_instance()
ldap.get_full_configuration()
def test_get_configuration(self):
self._set_args()
resp = dict(result=None)
with mock.patch(self.REQ_FUNC, return_value=(200, resp)):
ldap = self._make_ldap_instance()
result = ldap.get_configuration('')
self.assertEqual(resp, result)
with mock.patch(self.REQ_FUNC, return_value=(404, resp)):
ldap = self._make_ldap_instance()
result = ldap.get_configuration('')
self.assertIsNone(result)
def test_clear_configuration(self):
self._set_args()
# No changes are required if the domains are empty
config = dict(ldapDomains=[])
ldap = self._make_ldap_instance()
with mock.patch.object(ldap, 'get_full_configuration', return_value=config):
with mock.patch(self.REQ_FUNC, return_value=(204, None)):
msg, result = ldap.clear_configuration()
self.assertFalse(result)
config = dict(ldapDomains=['abc'])
# When domains exist, we need to clear
ldap = self._make_ldap_instance()
with mock.patch.object(ldap, 'get_full_configuration', return_value=config):
with mock.patch(self.REQ_FUNC, return_value=(204, None)) as req:
msg, result = ldap.clear_configuration()
self.assertTrue(result)
self.assertTrue(req.called)
# Valid check_mode makes no changes
req.reset_mock()
ldap.check_mode = True
msg, result = ldap.clear_configuration()
self.assertTrue(result)
self.assertFalse(req.called)
def test_clear_single_configuration(self):
self._set_args()
# No changes are required if the domains are empty
config = 'abc'
ldap = self._make_ldap_instance()
with mock.patch.object(ldap, 'get_configuration', return_value=config):
with mock.patch(self.REQ_FUNC, return_value=(204, None)) as req:
msg, result = ldap.clear_single_configuration()
self.assertTrue(result)
# Valid check_mode makes no changes
req.reset_mock()
ldap.check_mode = True
msg, result = ldap.clear_single_configuration()
self.assertTrue(result)
self.assertFalse(req.called)
# When domains exist, we need to clear
ldap = self._make_ldap_instance()
with mock.patch.object(ldap, 'get_configuration', return_value=None):
with mock.patch(self.REQ_FUNC, return_value=(204, None)) as req:
msg, result = ldap.clear_single_configuration()
self.assertFalse(result)
self.assertFalse(req.called)
def test_update_configuration(self):
self._set_args()
config = dict(id='abc')
body = dict(id='xyz')
ldap = self._make_ldap_instance()
with mock.patch.object(ldap, 'make_configuration', return_value=body):
with mock.patch.object(ldap, 'get_configuration', return_value=config):
with mock.patch(self.REQ_FUNC, return_value=(200, None)) as req:
msg, result = ldap.update_configuration()
self.assertTrue(result)
# Valid check_mode makes no changes
req.reset_mock()
ldap.check_mode = True
msg, result = ldap.update_configuration()
self.assertTrue(result)
self.assertFalse(req.called)
def test_update(self):
self._set_args()
ldap = self._make_ldap_instance()
with self.assertRaises(AnsibleExitJson):
with mock.patch.object(ldap, 'get_base_path', return_value='/'):
with mock.patch.object(ldap, 'update_configuration', return_value=('', True)) as update:
ldap.ldap = True
msg, result = ldap.update()
self.assertTrue(result)
self.assertTrue(update.called)
def test_update_disable(self):
self._set_args()
ldap = self._make_ldap_instance()
with self.assertRaises(AnsibleExitJson):
with mock.patch.object(ldap, 'get_base_path', return_value='/'):
with mock.patch.object(ldap, 'clear_single_configuration', return_value=('', True)) as update:
ldap.ldap = False
ldap.identifier = 'abc'
msg, result = ldap.update()
self.assertTrue(result)
self.assertTrue(update.called)
def test_update_disable_all(self):
self._set_args()
ldap = self._make_ldap_instance()
with self.assertRaises(AnsibleExitJson):
with mock.patch.object(ldap, 'get_base_path', return_value='/'):
with mock.patch.object(ldap, 'clear_configuration', return_value=('', True)) as update:
ldap.ldap = False
msg, result = ldap.update()
self.assertTrue(result)
self.assertTrue(update.called)
def test_get_configuration_failure(self):
self._set_args()
with self.assertRaises(AnsibleFailJson):
with mock.patch(self.REQ_FUNC, side_effect=Exception):
ldap = self._make_ldap_instance()
ldap.get_configuration('')
# We expect this for any code not in [200, 404]
with self.assertRaises(AnsibleFailJson):
with mock.patch(self.REQ_FUNC, return_value=(401, '')):
ldap = self._make_ldap_instance()
result = ldap.get_configuration('')
self.assertIsNone(result)
def test_make_configuration(self):
"""Validate the make_configuration method that translates Ansible params to the input body"""
data = dict(log_path=None,
state='present',
username='myBindAcct',
password='myBindPass',
server='ldap://example.com:384',
search_base='OU=Users,DC=example,DC=com',
role_mappings={'.*': ['storage.monitor']},
)
self._set_args(**data)
ldap = Ldap()
expected = dict(id='default',
bindLookupUser=dict(user=data['username'],
password=data['password'], ),
groupAttributes=['memberOf'],
ldapUrl=data['server'],
names=['example.com'],
searchBase=data['search_base'],
roleMapCollection=[{"groupRegex": ".*",
"ignoreCase": True,
"name": "storage.monitor"
}
],
userAttribute='sAMAccountName'
)
actual = ldap.make_configuration()
self.maxDiff = None
self.assertEqual(expected, actual)
#
# def test_get_config_on_demand_capable_false(self):
# """Ensure we fail correctly if ASUP is not available on this platform"""
# self._set_args()
#
# expected = dict(asupCapable=True, onDemandCapable=False)
# asup = Asup()
# # Expecting an update
# with self.assertRaisesRegexp(AnsibleFailJson, r"not supported"):
# with mock.patch(self.REQ_FUNC, return_value=(200, expected)):
# asup.get_configuration()
#
# def test_get_config(self):
# """Validate retrieving the ASUP configuration"""
# self._set_args()
#
# expected = dict(asupCapable=True, onDemandCapable=True)
# asup = Asup()
#
# with mock.patch(self.REQ_FUNC, return_value=(200, expected)):
# config = asup.get_configuration()
# self.assertEqual(config, expected)
#
# def test_update_configuration(self):
# """Validate retrieving the ASUP configuration"""
# self._set_args(dict(asup='present'))
#
# expected = dict()
# initial = dict(asupCapable=True,
# asupEnabled=True,
# onDemandEnabled=False,
# remoteDiagsEnabled=False,
# schedule=dict(daysOfWeek=[], dailyMinTime=0, weeklyMinTime=0, dailyMaxTime=24, weeklyMaxTime=24))
# asup = Asup()
#
# with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
# with mock.patch.object(asup, 'get_configuration', return_value=initial):
# updated = asup.update_configuration()
# self.assertTrue(req.called)
# self.assertTrue(updated)
#
# def test_update_configuration_asup_disable(self):
# """Validate retrieving the ASUP configuration"""
# self._set_args(dict(asup='absent'))
#
# expected = dict()
# initial = dict(asupCapable=True,
# asupEnabled=True,
# onDemandEnabled=False,
# remoteDiagsEnabled=False,
# schedule=dict(daysOfWeek=[], dailyMinTime=0, weeklyMinTime=0, dailyMaxTime=24, weeklyMaxTime=24))
# asup = Asup()
#
# with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
# with mock.patch.object(asup, 'get_configuration', return_value=initial):
# updated = asup.update_configuration()
# self.assertTrue(updated)
#
# self.assertTrue(req.called)
#
# # Ensure it was called with the right arguments
# called_with = req.call_args
# body = json.loads(called_with[1]['data'])
# self.assertFalse(body['asupEnabled'])
#
# def test_update_configuration_enable(self):
# """Validate retrieving the ASUP configuration"""
# self._set_args(dict(asup='enabled'))
#
# expected = dict()
# initial = dict(asupCapable=False,
# asupEnabled=False,
# onDemandEnabled=False,
# remoteDiagsEnabled=False,
# schedule=dict(daysOfWeek=[], dailyMinTime=0, weeklyMinTime=0, dailyMaxTime=24, weeklyMaxTime=24))
# asup = Asup()
#
# with mock.patch(self.REQ_FUNC, return_value=(200, expected)) as req:
# with mock.patch.object(asup, 'get_configuration', return_value=initial):
# updated = asup.update_configuration()
# self.assertTrue(updated)
#
# self.assertTrue(req.called)
#
# # Ensure it was called with the right arguments
# called_with = req.call_args
# body = json.loads(called_with[1]['data'])
# self.assertTrue(body['asupEnabled'])
# self.assertTrue(body['onDemandEnabled'])
# self.assertTrue(body['remoteDiagsEnabled'])
#
# def test_update_configuration_request_exception(self):
# """Validate exception handling when request throws an exception."""
# config_response = dict(asupEnabled=True,
# onDemandEnabled=True,
# remoteDiagsEnabled=True,
# schedule=dict(daysOfWeek=[],
# dailyMinTime=0,
# weeklyMinTime=0,
# dailyMaxTime=24,
# weeklyMaxTime=24))
#
# self._set_args(dict(state="enabled"))
# asup = Asup()
# with self.assertRaises(Exception):
# with mock.patch.object(asup, 'get_configuration', return_value=config_response):
# with mock.patch(self.REQ_FUNC, side_effect=Exception):
# asup.update_configuration()
#
# def test_init_schedule(self):
# """Validate schedule correct schedule initialization"""
# self._set_args(dict(state="enabled", active=True, days=["sunday", "monday", "tuesday"], start=20, end=24))
# asup = Asup()
#
# self.assertTrue(asup.asup)
# self.assertEqual(asup.days, ["sunday", "monday", "tuesday"]),
# self.assertEqual(asup.start, 1200)
# self.assertEqual(asup.end, 1439)
#
# def test_init_schedule_invalid(self):
# """Validate updating ASUP with invalid schedule fails test."""
# self._set_args(dict(state="enabled", active=True, start=22, end=20))
# with self.assertRaisesRegexp(AnsibleFailJson, r"start time is invalid"):
# Asup()
#
# def test_init_schedule_days_invalid(self):
# """Validate updating ASUP with invalid schedule fails test."""
# self._set_args(dict(state="enabled", active=True, days=["someday", "thataday", "nonday"]))
# with self.assertRaises(AnsibleFailJson):
# Asup()
#
# def test_update(self):
# """Validate updating ASUP with valid schedule passes"""
# initial = dict(asupCapable=True,
# onDemandCapable=True,
# asupEnabled=True,
# onDemandEnabled=False,
# remoteDiagsEnabled=False,
# schedule=dict(daysOfWeek=[], dailyMinTime=0, weeklyMinTime=0, dailyMaxTime=24, weeklyMaxTime=24))
# self._set_args(dict(state="enabled", active=True, days=["sunday", "monday", "tuesday"], start=10, end=20))
# asup = Asup()
# with self.assertRaisesRegexp(AnsibleExitJson, r"ASUP settings have been updated"):
# with mock.patch(self.REQ_FUNC, return_value=(200, dict(asupCapable=True))):
# with mock.patch.object(asup, "get_configuration", return_value=initial):
# asup.update()

View file

@ -1,682 +0,0 @@
# coding=utf-8
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_mgmt_interface import MgmtInterface
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
import mock
from ansible_collections.community.general.tests.unit.compat.mock import PropertyMock
class MgmtInterfaceTest(ModuleTestCase):
REQUIRED_PARAMS = {
'api_username': 'rw',
'api_password': 'password',
'api_url': 'http://localhost',
'ssid': '1',
}
TEST_DATA = [
{
"controllerRef": "070000000000000000000001",
"controllerSlot": 1,
"interfaceName": "wan0",
"interfaceRef": "2800070000000000000000000001000000000000",
"channel": 1,
"alias": "creG1g-AP-a",
"ipv4Enabled": True,
"ipv4Address": "10.1.1.10",
"linkStatus": "up",
"ipv4SubnetMask": "255.255.255.0",
"ipv4AddressConfigMethod": "configStatic",
"ipv4GatewayAddress": "10.1.1.1",
"ipv6Enabled": False,
"physicalLocation": {
"slot": 0,
},
"dnsProperties": {
"acquisitionProperties": {
"dnsAcquisitionType": "stat",
"dnsServers": [
{
"addressType": "ipv4",
"ipv4Address": "10.1.0.250",
},
{
"addressType": "ipv4",
"ipv4Address": "10.10.0.20",
}
]
},
"dhcpAcquiredDnsServers": []
},
"ntpProperties": {
"acquisitionProperties": {
"ntpAcquisitionType": "disabled",
"ntpServers": None
},
"dhcpAcquiredNtpServers": []
},
},
{
"controllerRef": "070000000000000000000001",
"controllerSlot": 1,
"interfaceName": "wan1",
"interfaceRef": "2800070000000000000000000001000000000000",
"channel": 2,
"alias": "creG1g-AP-a",
"ipv4Enabled": True,
"ipv4Address": "0.0.0.0",
"ipv4SubnetMask": "0.0.0.0",
"ipv4AddressConfigMethod": "configDhcp",
"ipv4GatewayAddress": "10.1.1.1",
"ipv6Enabled": False,
"physicalLocation": {
"slot": 1,
},
"dnsProperties": {
"acquisitionProperties": {
"dnsAcquisitionType": "stat",
"dnsServers": [
{
"addressType": "ipv4",
"ipv4Address": "10.1.0.250",
"ipv6Address": None
},
{
"addressType": "ipv4",
"ipv4Address": "10.10.0.20",
"ipv6Address": None
}
]
},
"dhcpAcquiredDnsServers": []
},
"ntpProperties": {
"acquisitionProperties": {
"ntpAcquisitionType": "disabled",
"ntpServers": None
},
"dhcpAcquiredNtpServers": []
},
},
{
"controllerRef": "070000000000000000000002",
"controllerSlot": 2,
"interfaceName": "wan0",
"interfaceRef": "2800070000000000000000000001000000000000",
"channel": 1,
"alias": "creG1g-AP-b",
"ipv4Enabled": True,
"ipv4Address": "0.0.0.0",
"ipv4SubnetMask": "0.0.0.0",
"ipv4AddressConfigMethod": "configDhcp",
"ipv4GatewayAddress": "10.1.1.1",
"ipv6Enabled": False,
"physicalLocation": {
"slot": 0,
},
"dnsProperties": {
"acquisitionProperties": {
"dnsAcquisitionType": "stat",
"dnsServers": [
{
"addressType": "ipv4",
"ipv4Address": "10.1.0.250",
"ipv6Address": None
}
]
},
"dhcpAcquiredDnsServers": []
},
"ntpProperties": {
"acquisitionProperties": {
"ntpAcquisitionType": "stat",
"ntpServers": [
{
"addrType": "ipvx",
"domainName": None,
"ipvxAddress": {
"addressType": "ipv4",
"ipv4Address": "10.13.1.5",
"ipv6Address": None
}
},
{
"addrType": "ipvx",
"domainName": None,
"ipvxAddress": {
"addressType": "ipv4",
"ipv4Address": "10.15.1.8",
"ipv6Address": None
}
}
]
},
"dhcpAcquiredNtpServers": []
},
},
{
"controllerRef": "070000000000000000000002",
"controllerSlot": 2,
"interfaceName": "wan1",
"interfaceRef": "2801070000000000000000000001000000000000",
"channel": 2,
"alias": "creG1g-AP-b",
"ipv4Enabled": True,
"ipv4Address": "0.0.0.0",
"ipv4SubnetMask": "0.0.0.0",
"ipv4AddressConfigMethod": "configDhcp",
"ipv4GatewayAddress": "10.1.1.1",
"ipv6Enabled": False,
"physicalLocation": {
"slot": 1,
},
"dnsProperties": {
"acquisitionProperties": {
"dnsAcquisitionType": "stat",
"dnsServers": [
{
"addressType": "ipv4",
"ipv4Address": "10.19.1.2",
"ipv6Address": None
}
]
},
"dhcpAcquiredDnsServers": []
},
"ntpProperties": {
"acquisitionProperties": {
"ntpAcquisitionType": "stat",
"ntpServers": [
{
"addrType": "ipvx",
"domainName": None,
"ipvxAddress": {
"addressType": "ipv4",
"ipv4Address": "10.13.1.5",
"ipv6Address": None
}
},
{
"addrType": "ipvx",
"domainName": None,
"ipvxAddress": {
"addressType": "ipv4",
"ipv4Address": "10.15.1.18",
"ipv6Address": None
}
}
]
},
"dhcpAcquiredNtpServers": []
},
},
]
REQ_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_mgmt_interface.request'
def _set_args(self, args=None):
module_args = self.REQUIRED_PARAMS.copy()
if args is not None:
module_args.update(args)
set_module_args(module_args)
def test_controller_property_pass(self):
"""Verify dictionary return from controller property."""
initial = {
"state": "enable",
"controller": "A",
"channel": "1",
"address": "192.168.1.1",
"subnet_mask": "255.255.255.1",
"config_method": "static"}
controller_request = [
{"physicalLocation": {"slot": 2},
"controllerRef": "070000000000000000000002",
"networkSettings": {"remoteAccessEnabled": True}},
{"physicalLocation": {"slot": 1},
"controllerRef": "070000000000000000000001",
"networkSettings": {"remoteAccessEnabled": False}}]
expected = {
'A': {'controllerRef': '070000000000000000000001',
'controllerSlot': 1, 'ssh': False},
'B': {'controllerRef': '070000000000000000000002',
'controllerSlot': 2, 'ssh': True}}
self._set_args(initial)
mgmt_interface = MgmtInterface()
with mock.patch(self.REQ_FUNC, return_value=(200, controller_request)):
response = mgmt_interface.controllers
self.assertTrue(response == expected)
def test_controller_property_fail(self):
"""Verify controllers endpoint request failure causes AnsibleFailJson exception."""
initial = {
"state": "enable",
"controller": "A",
"channel": "1",
"address": "192.168.1.1",
"subnet_mask": "255.255.255.1",
"config_method": "static"}
controller_request = [
{"physicalLocation": {"slot": 2},
"controllerRef": "070000000000000000000002",
"networkSettings": {"remoteAccessEnabled": True}},
{"physicalLocation": {"slot": 1},
"controllerRef": "070000000000000000000001",
"networkSettings": {"remoteAccessEnabled": False}}]
expected = {
'A': {'controllerRef': '070000000000000000000001',
'controllerSlot': 1, 'ssh': False},
'B': {'controllerRef': '070000000000000000000002',
'controllerSlot': 2, 'ssh': True}}
self._set_args(initial)
mgmt_interface = MgmtInterface()
with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve the controller settings."):
with mock.patch(self.REQ_FUNC, return_value=Exception):
response = mgmt_interface.controllers
def test_interface_property_match_pass(self):
"""Verify return value from interface property."""
initial = {
"state": "enable",
"controller": "A",
"channel": "1",
"address": "192.168.1.1",
"subnet_mask": "255.255.255.0",
"config_method": "static"}
controller_request = [
{"physicalLocation": {"slot": 2},
"controllerRef": "070000000000000000000002",
"networkSettings": {"remoteAccessEnabled": True}},
{"physicalLocation": {"slot": 1},
"controllerRef": "070000000000000000000001",
"networkSettings": {"remoteAccessEnabled": False}}]
expected = {
"dns_servers": [{"ipv4Address": "10.1.0.250", "addressType": "ipv4"},
{"ipv4Address": "10.10.0.20", "addressType": "ipv4"}],
"subnet_mask": "255.255.255.0",
"link_status": "up",
"ntp_servers": None,
"ntp_config_method": "disabled",
"controllerRef": "070000000000000000000001",
"config_method": "configStatic",
"enabled": True,
"gateway": "10.1.1.1",
"alias": "creG1g-AP-a",
"controllerSlot": 1,
"dns_config_method": "stat",
"id": "2800070000000000000000000001000000000000",
"address": "10.1.1.10",
"ipv6Enabled": False,
"channel": 1}
self._set_args(initial)
mgmt_interface = MgmtInterface()
with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TEST_DATA), (200, controller_request)]):
iface = mgmt_interface.interface
self.assertTrue(iface == expected)
def test_interface_property_request_exception_fail(self):
"""Verify ethernet-interfaces endpoint request failure results in AnsibleFailJson exception."""
initial = {
"state": "enable",
"controller": "A",
"channel": "1",
"address": "192.168.1.1",
"subnet_mask": "255.255.255.1",
"config_method": "static"}
controller_request = [
{"physicalLocation": {"slot": 2},
"controllerRef": "070000000000000000000002",
"networkSettings": {"remoteAccessEnabled": True}},
{"physicalLocation": {"slot": 1},
"controllerRef": "070000000000000000000001",
"networkSettings": {"remoteAccessEnabled": False}}]
self._set_args(initial)
mgmt_interface = MgmtInterface()
with self.assertRaisesRegexp(AnsibleFailJson, r"Failed to retrieve defined management interfaces."):
with mock.patch(self.REQ_FUNC, side_effect=[Exception, (200, controller_request)]):
iface = mgmt_interface.interface
def test_interface_property_no_match_fail(self):
"""Verify return value from interface property."""
initial = {
"state": "enable",
"controller": "A",
"name": "wrong_name",
"address": "192.168.1.1",
"subnet_mask": "255.255.255.1",
"config_method": "static"}
controller_request = [
{"physicalLocation": {"slot": 2},
"controllerRef": "070000000000000000000002",
"networkSettings": {"remoteAccessEnabled": True}},
{"physicalLocation": {"slot": 1},
"controllerRef": "070000000000000000000001",
"networkSettings": {"remoteAccessEnabled": False}}]
expected = {
"dns_servers": [{"ipv4Address": "10.1.0.20", "addressType": "ipv4"},
{"ipv4Address": "10.1.0.50", "addressType": "ipv4"}],
"subnet_mask": "255.255.255.0",
"ntp_servers": None,
"ntp_config_method": "disabled",
"controllerRef": "070000000000000000000001",
"config_method": "configStatic",
"enabled": True,
"gateway": "10.1.1.1",
"alias": "creG1g-AP-a",
"controllerSlot": 1,
"dns_config_method": "stat",
"id": "2800070000000000000000000001000000000000",
"address": "10.1.1.111",
"ipv6Enabled": False,
"channel": 1}
self._set_args(initial)
mgmt_interface = MgmtInterface()
with self.assertRaisesRegexp(AnsibleFailJson, r"We could not find an interface matching"):
with mock.patch(self.REQ_FUNC, side_effect=[(200, self.TEST_DATA), (200, controller_request)]):
iface = mgmt_interface.interface
def test_get_enable_interface_settings_enabled_pass(self):
"""Validate get_enable_interface_settings updates properly."""
initial = {
"state": "enable",
"controller": "A",
"name": "wrong_name",
"address": "192.168.1.1",
"subnet_mask": "255.255.255.1",
"config_method": "static"}
iface = {"enabled": False}
expected_iface = {}
self._set_args(initial)
mgmt_interface = MgmtInterface()
update, expected_iface, body = mgmt_interface.get_enable_interface_settings(iface, expected_iface, False, {})
self.assertTrue(update and expected_iface["enabled"] and body["ipv4Enabled"])
def test_get_enable_interface_settings_disabled_pass(self):
"""Validate get_enable_interface_settings updates properly."""
initial = {
"state": "disable",
"controller": "A",
"name": "wan0",
"address": "192.168.1.1",
"subnet_mask": "255.255.255.1",
"config_method": "static"}
iface = {"enabled": True}
expected_iface = {}
self._set_args(initial)
mgmt_interface = MgmtInterface()
update, expected_iface, body = mgmt_interface.get_enable_interface_settings(iface, expected_iface, False, {})
self.assertTrue(update and not expected_iface["enabled"] and not body["ipv4Enabled"])
def test_update_array_interface_ssh_pass(self):
"""Verify get_interface_settings gives the right static configuration response."""
initial = {
"state": "enable",
"controller": "A",
"name": "wan0",
"address": "192.168.1.1",
"subnet_mask": "255.255.255.1",
"config_method": "static",
"ssh": True}
iface = {"dns_servers": [{"ipv4Address": "10.1.0.20", "addressType": "ipv4"},
{"ipv4Address": "10.1.0.50", "addressType": "ipv4"}],
"subnet_mask": "255.255.255.0",
"link_status": "up",
"ntp_servers": None,
"ntp_config_method": "disabled",
"controllerRef": "070000000000000000000001",
"config_method": "configStatic",
"enabled": True,
"gateway": "10.1.1.1",
"alias": "creG1g-AP-a",
"controllerSlot": 1,
"dns_config_method": "stat",
"id": "2800070000000000000000000001000000000000",
"address": "10.1.1.111",
"ipv6Enabled": False,
"channel": 1}
settings = {"controllerRef": "070000000000000000000001",
"ssh": False}
self._set_args(initial)
mgmt_interface = MgmtInterface()
with mock.patch(self.REQ_FUNC, return_value=(200, None)):
update = mgmt_interface.update_array(settings, iface)
self.assertTrue(update)
def test_update_array_dns_static_ntp_disable_pass(self):
"""Verify get_interface_settings gives the right static configuration response."""
initial = {
"controller": "A",
"name": "wan0",
"dns_config_method": "static",
"dns_address": "192.168.1.1",
"dns_address_backup": "192.168.1.100",
"ntp_config_method": "disable"}
iface = {"dns_servers": [{"ipv4Address": "10.1.0.20", "addressType": "ipv4"},
{"ipv4Address": "10.1.0.50", "addressType": "ipv4"}],
"subnet_mask": "255.255.255.0",
"link_status": "up",
"ntp_servers": None,
"ntp_config_method": "disabled",
"controllerRef": "070000000000000000000001",
"config_method": "configStatic",
"enabled": True,
"gateway": "10.1.1.1",
"alias": "creG1g-AP-a",
"controllerSlot": 1,
"dns_config_method": "configDhcp",
"id": "2800070000000000000000000001000000000000",
"address": "10.1.1.111",
"ipv6Enabled": False,
"channel": 1}
settings = {"controllerRef": "070000000000000000000001",
"ssh": False}
self._set_args(initial)
mgmt_interface = MgmtInterface()
with mock.patch(self.REQ_FUNC, return_value=(200, None)):
update = mgmt_interface.update_array(settings, iface)
self.assertTrue(update)
def test_update_array_dns_dhcp_ntp_static_pass(self):
"""Verify get_interface_settings gives the right static configuration response."""
initial = {
"controller": "A",
"name": "wan0",
"ntp_config_method": "static",
"ntp_address": "192.168.1.1",
"ntp_address_backup": "192.168.1.100",
"dns_config_method": "dhcp"}
iface = {"dns_servers": [{"ipv4Address": "10.1.0.20", "addressType": "ipv4"},
{"ipv4Address": "10.1.0.50", "addressType": "ipv4"}],
"subnet_mask": "255.255.255.0",
"link_status": "up",
"ntp_servers": None,
"ntp_config_method": "disabled",
"controllerRef": "070000000000000000000001",
"config_method": "configStatic",
"enabled": True,
"gateway": "10.1.1.1",
"alias": "creG1g-AP-a",
"controllerSlot": 1,
"dns_config_method": "configStatic",
"id": "2800070000000000000000000001000000000000",
"address": "10.1.1.111",
"ipv6Enabled": False,
"channel": 1}
settings = {"controllerRef": "070000000000000000000001",
"ssh": False}
self._set_args(initial)
mgmt_interface = MgmtInterface()
with mock.patch(self.REQ_FUNC, return_value=(200, None)):
update = mgmt_interface.update_array(settings, iface)
self.assertTrue(update)
def test_update_array_dns_dhcp_ntp_static_no_change_pass(self):
"""Verify get_interface_settings gives the right static configuration response."""
initial = {
"controller": "A",
"name": "wan0",
"ntp_config_method": "dhcp",
"dns_config_method": "dhcp"}
iface = {"dns_servers": [{"ipv4Address": "10.1.0.20", "addressType": "ipv4"},
{"ipv4Address": "10.1.0.50", "addressType": "ipv4"}],
"subnet_mask": "255.255.255.0",
"ntp_servers": None,
"ntp_config_method": "dhcp",
"controllerRef": "070000000000000000000001",
"config_method": "static",
"enabled": True,
"gateway": "10.1.1.1",
"alias": "creG1g-AP-a",
"controllerSlot": 1,
"dns_config_method": "dhcp",
"id": "2800070000000000000000000001000000000000",
"address": "10.1.1.11",
"ipv6Enabled": False,
"channel": 1}
settings = {"controllerRef": "070000000000000000000001",
"ssh": False}
self._set_args(initial)
mgmt_interface = MgmtInterface()
with mock.patch(self.REQ_FUNC, return_value=(200, None)):
update = mgmt_interface.update_array(settings, iface)
self.assertFalse(update)
def test_update_array_ipv4_ipv6_disabled_fail(self):
"""Verify exception is thrown when both ipv4 and ipv6 would be disabled at the same time."""
initial = {
"state": "disable",
"controller": "A",
"name": "wan0",
"address": "192.168.1.1",
"subnet_mask": "255.255.255.1",
"config_method": "static",
"ssh": True}
iface = {"dns_servers": [{"ipv4Address": "10.1.0.20", "addressType": "ipv4"},
{"ipv4Address": "10.1.0.50", "addressType": "ipv4"}],
"subnet_mask": "255.255.255.0",
"ntp_servers": None,
"ntp_config_method": "disabled",
"controllerRef": "070000000000000000000001",
"config_method": "configStatic",
"enabled": True,
"gateway": "10.1.1.1",
"alias": "creG1g-AP-a",
"controllerSlot": 1,
"dns_config_method": "stat",
"id": "2800070000000000000000000001000000000000",
"address": "10.1.1.11",
"ipv6Enabled": False,
"channel": 1}
settings = {"controllerRef": "070000000000000000000001",
"ssh": False}
self._set_args(initial)
mgmt_interface = MgmtInterface()
with self.assertRaisesRegexp(AnsibleFailJson, r"This storage-system already has IPv6 connectivity disabled."):
with mock.patch(self.REQ_FUNC, return_value=(422, dict(ipv4Enabled=False, retcode="4", errorMessage=""))):
mgmt_interface.update_array(settings, iface)
def test_update_array_request_error_fail(self):
"""Verify exception is thrown when request results in an error."""
initial = {
"state": "disable",
"controller": "A",
"name": "wan0",
"address": "192.168.1.1",
"subnet_mask": "255.255.255.1",
"config_method": "static",
"ssh": True}
iface = {"dns_servers": [{"ipv4Address": "10.1.0.20", "addressType": "ipv4"},
{"ipv4Address": "10.1.0.50", "addressType": "ipv4"}],
"subnet_mask": "255.255.255.0",
"ntp_servers": None,
"ntp_config_method": "disabled",
"controllerRef": "070000000000000000000001",
"config_method": "configStatic",
"enabled": True,
"gateway": "10.1.1.1",
"alias": "creG1g-AP-a",
"controllerSlot": 1,
"dns_config_method": "stat",
"id": "2800070000000000000000000001000000000000",
"address": "10.1.1.111",
"ipv6Enabled": False,
"channel": 1}
settings = {"controllerRef": "070000000000000000000001",
"ssh": False}
self._set_args(initial)
mgmt_interface = MgmtInterface()
with self.assertRaisesRegexp(AnsibleFailJson, r"We failed to configure the management interface."):
with mock.patch(self.REQ_FUNC, return_value=(300, dict(ipv4Enabled=False, retcode="4", errorMessage=""))):
mgmt_interface.update_array(settings, iface)
def test_update_pass(self):
"""Validate update method completes."""
initial = {
"state": "enable",
"controller": "A",
"channel": "1",
"address": "192.168.1.1",
"subnet_mask": "255.255.255.1",
"config_method": "static",
"ssh": "yes"}
controller_request = [
{"physicalLocation": {"slot": 2},
"controllerRef": "070000000000000000000002",
"networkSettings": {"remoteAccessEnabled": True}},
{"physicalLocation": {"slot": 1},
"controllerRef": "070000000000000000000001",
"networkSettings": {"remoteAccessEnabled": False}}]
expected = {
"dns_servers": [{"ipv4Address": "10.1.0.20", "addressType": "ipv4"},
{"ipv4Address": "10.1.0.50", "addressType": "ipv4"}],
"subnet_mask": "255.255.255.0",
"ntp_servers": None,
"ntp_config_method": "disabled",
"controllerRef": "070000000000000000000001",
"config_method": "configStatic",
"enabled": True,
"gateway": "10.1.1.1",
"alias": "creG1g-AP-a",
"controllerSlot": 1,
"dns_config_method": "stat",
"id": "2800070000000000000000000001000000000000",
"address": "10.1.1.111",
"ipv6Enabled": False,
"channel": 1}
self._set_args(initial)
mgmt_interface = MgmtInterface()
with self.assertRaisesRegexp(AnsibleExitJson, r"The interface settings have been updated."):
with mock.patch(self.REQ_FUNC, side_effect=[(200, None), (200, controller_request), (200, self.TEST_DATA),
(200, controller_request), (200, self.TEST_DATA)]):
mgmt_interface.update()

View file

@ -1,724 +0,0 @@
# coding=utf-8
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleExitJson, AnsibleFailJson, ModuleTestCase, set_module_args
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_storagepool import NetAppESeriesStoragePool
try:
from unittest.mock import patch, PropertyMock
except ImportError:
from mock import patch, PropertyMock
class StoragePoolTest(ModuleTestCase):
REQUIRED_PARAMS = {"api_username": "username",
"api_password": "password",
"api_url": "http://localhost/devmgr/v2",
"ssid": "1",
"validate_certs": "no"}
STORAGE_POOL_DATA = [{"raidLevel": "raidDiskPool", "volumeGroupRef": "04000000600A098000A4B28D000017805C7BD4D8",
"securityType": "capable",
"protectionInformationCapabilities": {"protectionInformationCapable": True,
"protectionType": "type2Protection"},
"volumeGroupData": {"diskPoolData": {"reconstructionReservedDriveCount": 2}},
"totalRaidedSpace": "2735894167552", "name": "pool",
"id": "04000000600A098000A4B28D000017805C7BD4D8", "driveMediaType": "hdd"}]
DRIVES_DATA = [{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'hdd', 'id': '010000005000C500551ED1FF0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551EB1930000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551EAAE30000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551ECB1F0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551EB2930000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551ECB0B0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551EC6C70000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551E9BA70000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551ED7CF0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551ECB0F0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551E72870000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551E9DBB0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551EAC230000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551EA0BB0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': False, 'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8',
'driveMediaType': 'hdd', 'id': '010000005000C500551EAC4B0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'hdd', 'id': '010000005000C500551E7F2B0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'hdd', 'id': '010000005000C500551EC9270000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'hdd', 'id': '010000005000C500551EC97F0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'hdd', 'id': '010000005000C500551ECBFF0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'hdd', 'id': '010000005000C500551E9ED30000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'hdd', 'id': '010000005000C500551EA4CF0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'hdd', 'id': '010000005000C500551EA29F0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'hdd', 'id': '010000005000C500551ECDFB0000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'hdd', 'id': '010000005000C500551E99230000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'ssd', 'id': '010000005000C500551E9ED31000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'ssd', 'id': '010000005000C500551EA4CF2000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'ssd', 'id': '010000005000C500551EA29F3000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'ssd', 'id': '010000005000C500551ECDFB4000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sas', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'},
{'available': True, 'currentVolumeGroupRef': '0000000000000000000000000000000000000000',
'driveMediaType': 'ssd', 'id': '010000005000C500551E99235000000000000000', 'fdeCapable': True,
'hotSpare': False, 'invalidDriveData': False, 'nonRedundantAccess': False, 'pfa': False,
'phyDriveType': 'sata', 'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'rawCapacity': '300000000000', 'removed': False, 'status': 'optimal', 'uncertified': False,
'usableCapacity': '299463129088'}]
RAID6_CANDIDATE_DRIVES = {"volumeCandidate": [
{"raidLevel": "raid6", "trayLossProtection": False, "rawSize": "898389368832", "usableSize": "898388459520",
"driveCount": 5, "freeExtentRef": "0000000000000000000000000000000000000000", "driveRefList": {
"driveRef": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551EC9270000000000000000",
"010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
"010000005000C500551E9ED30000000000000000"]}, "candidateSelectionType": "count",
"spindleSpeedMatch": True, "spindleSpeed": 10000, "phyDriveType": "sas", "dssPreallocEnabled": False,
"securityType": "capable", "drawerLossProtection": False, "driveMediaType": "hdd",
"protectionInformationCapable": False,
"protectionInformationCapabilities": {"protectionInformationCapable": True,
"protectionType": "type2Protection"},
"volumeCandidateData": {"type": "traditional", "diskPoolVolumeCandidateData": None},
"driveBlockFormat": "allNative", "allocateReservedSpace": False, "securityLevel": "fde"},
{"raidLevel": "raid6", "trayLossProtection": False, "rawSize": "1197852491776", "usableSize": "1197851279360",
"driveCount": 6, "freeExtentRef": "0000000000000000000000000000000000000000", "driveRefList": {
"driveRef": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551EC9270000000000000000",
"010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
"010000005000C500551E9ED30000000000000000", "010000005000C500551EA4CF0000000000000000"]},
"candidateSelectionType": "count", "spindleSpeedMatch": True, "spindleSpeed": 10000, "phyDriveType": "sas",
"dssPreallocEnabled": False, "securityType": "capable", "drawerLossProtection": False, "driveMediaType": "hdd",
"protectionInformationCapable": False,
"protectionInformationCapabilities": {"protectionInformationCapable": True,
"protectionType": "type2Protection"},
"volumeCandidateData": {"type": "traditional", "diskPoolVolumeCandidateData": None},
"driveBlockFormat": "allNative", "allocateReservedSpace": False, "securityLevel": "fde"},
{"raidLevel": "raid6", "trayLossProtection": False, "rawSize": "1497315614720", "usableSize": "1497314099200",
"driveCount": 7, "freeExtentRef": "0000000000000000000000000000000000000000", "driveRefList": {
"driveRef": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551EC9270000000000000000",
"010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
"010000005000C500551E9ED30000000000000000", "010000005000C500551EA4CF0000000000000000",
"010000005000C500551ED1FF0000000000000000"]}, "candidateSelectionType": "count",
"spindleSpeedMatch": True, "spindleSpeed": 10000, "phyDriveType": "sas", "dssPreallocEnabled": False,
"securityType": "capable", "drawerLossProtection": False, "driveMediaType": "hdd",
"protectionInformationCapable": False,
"protectionInformationCapabilities": {"protectionInformationCapable": True,
"protectionType": "type2Protection"},
"volumeCandidateData": {"type": "traditional", "diskPoolVolumeCandidateData": None},
"driveBlockFormat": "allNative", "allocateReservedSpace": False, "securityLevel": "fde"},
{"raidLevel": "raid6", "trayLossProtection": False, "rawSize": "1796778737664", "usableSize": "1796776919040",
"driveCount": 8, "freeExtentRef": "0000000000000000000000000000000000000000", "driveRefList": {
"driveRef": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551EC9270000000000000000",
"010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
"010000005000C500551E9ED30000000000000000", "010000005000C500551EA4CF0000000000000000",
"010000005000C500551ED1FF0000000000000000", "010000005000C500551EA29F0000000000000000"]},
"candidateSelectionType": "count", "spindleSpeedMatch": True, "spindleSpeed": 10000, "phyDriveType": "sas",
"dssPreallocEnabled": False, "securityType": "capable", "drawerLossProtection": False, "driveMediaType": "hdd",
"protectionInformationCapable": False,
"protectionInformationCapabilities": {"protectionInformationCapable": True,
"protectionType": "type2Protection"},
"volumeCandidateData": {"type": "traditional", "diskPoolVolumeCandidateData": None},
"driveBlockFormat": "allNative", "allocateReservedSpace": False, "securityLevel": "fde"},
{"raidLevel": "raid6", "trayLossProtection": False, "rawSize": "2096241860608", "usableSize": "2096239738880",
"driveCount": 9, "freeExtentRef": "0000000000000000000000000000000000000000", "driveRefList": {
"driveRef": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551EC9270000000000000000",
"010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
"010000005000C500551E9ED30000000000000000", "010000005000C500551EA4CF0000000000000000",
"010000005000C500551ED1FF0000000000000000", "010000005000C500551EA29F0000000000000000",
"010000005000C500551ECDFB0000000000000000"]}, "candidateSelectionType": "count",
"spindleSpeedMatch": True, "spindleSpeed": 10000, "phyDriveType": "sas", "dssPreallocEnabled": False,
"securityType": "capable", "drawerLossProtection": False, "driveMediaType": "hdd",
"protectionInformationCapable": False,
"protectionInformationCapabilities": {"protectionInformationCapable": True,
"protectionType": "type2Protection"},
"volumeCandidateData": {"type": "traditional", "diskPoolVolumeCandidateData": None},
"driveBlockFormat": "allNative", "allocateReservedSpace": False, "securityLevel": "fde"},
{"raidLevel": "raid6", "trayLossProtection": False, "rawSize": "2395704983552", "usableSize": "2395702558720",
"driveCount": 10, "freeExtentRef": "0000000000000000000000000000000000000000", "driveRefList": {
"driveRef": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551EC9270000000000000000",
"010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
"010000005000C500551E9ED30000000000000000", "010000005000C500551EA4CF0000000000000000",
"010000005000C500551ED1FF0000000000000000", "010000005000C500551EA29F0000000000000000",
"010000005000C500551ECDFB0000000000000000", "010000005000C500551E99230000000000000000"]},
"candidateSelectionType": "count", "spindleSpeedMatch": True, "spindleSpeed": 10000, "phyDriveType": "sas",
"dssPreallocEnabled": False, "securityType": "capable", "drawerLossProtection": False, "driveMediaType": "hdd",
"protectionInformationCapable": False,
"protectionInformationCapabilities": {"protectionInformationCapable": True,
"protectionType": "type2Protection"},
"volumeCandidateData": {"type": "traditional", "diskPoolVolumeCandidateData": None},
"driveBlockFormat": "allNative", "allocateReservedSpace": False, "securityLevel": "fde"}], "returnCode": "ok"}
EXPANSION_DDP_DRIVES_LIST = ["010000005000C500551ED1FF0000000000000000", "010000005000C500551E7F2B0000000000000000",
"010000005000C500551EC9270000000000000000", "010000005000C500551EC97F0000000000000000",
"010000005000C500551ECBFF0000000000000000", "010000005000C500551E9ED30000000000000000",
"010000005000C500551EA4CF0000000000000000", "010000005000C500551EA29F0000000000000000",
"010000005000C500551ECDFB0000000000000000", "010000005000C500551E99230000000000000000",
"010000005000C500551E9ED31000000000000000", "010000005000C500551EA4CF2000000000000000",
"010000005000C500551EA29F3000000000000000", "010000005000C500551ECDFB4000000000000000",
"010000005000C500551E99235000000000000000"]
EXPANSION_DDP_DRIVE_DATA = {"returnCode": "ok", "candidates": [
{"drives": ["010000005000C500551E7F2B0000000000000000"], "trayLossProtection": False, "wastedCapacity": "0",
"spindleSpeedMatch": True, "drawerLossProtection": False, "usableCapacity": "299463129088",
"driveBlockFormat": "allNative"},
{"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000"],
"trayLossProtection": False, "wastedCapacity": "0", "spindleSpeedMatch": True, "drawerLossProtection": False,
"usableCapacity": "598926258176", "driveBlockFormat": "allNative"},
{"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
"010000005000C500551E9ED30000000000000000"], "trayLossProtection": False, "wastedCapacity": "0",
"spindleSpeedMatch": True, "drawerLossProtection": False, "usableCapacity": "898389387264",
"driveBlockFormat": "allNative"},
{"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
"010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000"],
"trayLossProtection": False, "wastedCapacity": "0", "spindleSpeedMatch": True, "drawerLossProtection": False,
"usableCapacity": "1197852516352", "driveBlockFormat": "allNative"},
{"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
"010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000",
"010000005000C500551EA4CF0000000000000000"], "trayLossProtection": False, "wastedCapacity": "0",
"spindleSpeedMatch": True, "drawerLossProtection": False, "usableCapacity": "1497315645440",
"driveBlockFormat": "allNative"},
{"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
"010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000",
"010000005000C500551EA4CF0000000000000000", "010000005000C500551EC9270000000000000000"],
"trayLossProtection": False, "wastedCapacity": "0", "spindleSpeedMatch": True, "drawerLossProtection": False,
"usableCapacity": "1796778774528", "driveBlockFormat": "allNative"},
{"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
"010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000",
"010000005000C500551EA4CF0000000000000000", "010000005000C500551EC9270000000000000000",
"010000005000C500551EC97F0000000000000000"], "trayLossProtection": False, "wastedCapacity": "0",
"spindleSpeedMatch": True, "drawerLossProtection": False, "usableCapacity": "2096241903616",
"driveBlockFormat": "allNative"},
{"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
"010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000",
"010000005000C500551EA4CF0000000000000000", "010000005000C500551EC9270000000000000000",
"010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000"],
"trayLossProtection": False, "wastedCapacity": "0", "spindleSpeedMatch": True, "drawerLossProtection": False,
"usableCapacity": "2395705032704", "driveBlockFormat": "allNative"},
{"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
"010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000",
"010000005000C500551EA4CF0000000000000000", "010000005000C500551EC9270000000000000000",
"010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
"010000005000C500551ECDFB0000000000000000"], "trayLossProtection": False, "wastedCapacity": "0",
"spindleSpeedMatch": True, "drawerLossProtection": False, "usableCapacity": "2695168161792",
"driveBlockFormat": "allNative"},
{"drives": ["010000005000C500551E7F2B0000000000000000", "010000005000C500551E99230000000000000000",
"010000005000C500551E9ED30000000000000000", "010000005000C500551EA29F0000000000000000",
"010000005000C500551EA4CF0000000000000000", "010000005000C500551EC9270000000000000000",
"010000005000C500551EC97F0000000000000000", "010000005000C500551ECBFF0000000000000000",
"010000005000C500551ECDFB0000000000000000", "010000005000C500551ED1FF0000000000000000"],
"trayLossProtection": False, "wastedCapacity": "0", "spindleSpeedMatch": True, "drawerLossProtection": False,
"usableCapacity": "2994631290880", "driveBlockFormat": "allNative"}]}
REQUEST_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_storagepool.request'
NETAPP_REQUEST_FUNC = 'ansible_collections.netapp.ontap.plugins.module_utils.netapp.NetAppESeriesModule.request'
VALIDATE_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_storagepool.NetAppESeriesModule.validate_instance'
DRIVES_PROPERTY = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_storagepool.NetAppESeriesStoragePool.drives'
STORAGE_POOL_PROPERTY = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_storagepool.NetAppESeriesStoragePool.storage_pool'
def _set_args(self, args=None):
module_args = self.REQUIRED_PARAMS.copy()
if args is not None:
module_args.update(args)
set_module_args(module_args)
def _initialize_dummy_instance(self, alt_args=None):
"""Initialize a dummy instance of NetAppESeriesStoragePool for the purpose of testing individual methods."""
args = {"state": "absent", "name": "storage_pool"}
if alt_args:
args.update(alt_args)
self._set_args(args)
return NetAppESeriesStoragePool()
def test_drives_fail(self):
"""Verify exception is thrown."""
with patch(self.NETAPP_REQUEST_FUNC) as netapp_request:
netapp_request.return_value = Exception()
storagepool = self._initialize_dummy_instance()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to fetch disk drives."):
drives = storagepool.drives
def test_available_drives(self):
"""Verify all drives returned are available"""
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
storagepool = self._initialize_dummy_instance()
self.assertEqual(storagepool.available_drives,
['010000005000C500551ED1FF0000000000000000', '010000005000C500551E7F2B0000000000000000',
'010000005000C500551EC9270000000000000000', '010000005000C500551EC97F0000000000000000',
'010000005000C500551ECBFF0000000000000000', '010000005000C500551E9ED30000000000000000',
'010000005000C500551EA4CF0000000000000000', '010000005000C500551EA29F0000000000000000',
'010000005000C500551ECDFB0000000000000000', '010000005000C500551E99230000000000000000',
'010000005000C500551E9ED31000000000000000', '010000005000C500551EA4CF2000000000000000',
'010000005000C500551EA29F3000000000000000', '010000005000C500551ECDFB4000000000000000',
'010000005000C500551E99235000000000000000'])
def test_available_drive_types(self):
"""Verify all drive types are returned in most common first order."""
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
storagepool = self._initialize_dummy_instance()
self.assertEqual(storagepool.available_drive_types[0], "hdd")
self.assertEqual(storagepool.available_drive_types[1], "ssd")
def test_available_drive_interface_types(self):
"""Verify all interface types are returned in most common first order."""
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
storagepool = self._initialize_dummy_instance()
self.assertEqual(storagepool.available_drive_interface_types[0], "sas")
self.assertEqual(storagepool.available_drive_interface_types[1], "sata")
def test_storage_pool_drives(self):
"""Verify storage pool drive collection."""
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
storagepool = self._initialize_dummy_instance(
{"state": "present", "name": "pool", "criteria_drive_count": "12", "raid_level": "raidDiskPool"})
storagepool.pool_detail = self.STORAGE_POOL_DATA[0]
self.assertEqual(storagepool.storage_pool_drives, [
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551EB1930000000000000000'},
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551EAAE30000000000000000'},
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551ECB1F0000000000000000'},
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551EB2930000000000000000'},
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551ECB0B0000000000000000'},
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551EC6C70000000000000000'},
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551E9BA70000000000000000'},
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551ED7CF0000000000000000'},
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551ECB0F0000000000000000'},
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551E72870000000000000000'},
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551E9DBB0000000000000000'},
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551EAC230000000000000000'},
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551EA0BB0000000000000000'},
{'available': False, 'pfa': False, 'driveMediaType': 'hdd', 'uncertified': False,
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'}, 'fdeCapable': True,
'currentVolumeGroupRef': '04000000600A098000A4B28D000017805C7BD4D8', 'invalidDriveData': False,
'nonRedundantAccess': False, 'hotSpare': False, 'status': 'optimal', 'rawCapacity': '300000000000',
'usableCapacity': '299463129088', 'phyDriveType': 'sas', 'removed': False,
'id': '010000005000C500551EAC4B0000000000000000'}])
def test_get_ddp_capacity(self):
"""Evaluate returned capacity from get_ddp_capacity method."""
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
storagepool = self._initialize_dummy_instance(
{"state": "present", "name": "pool", "criteria_drive_count": "12", "raid_level": "raidDiskPool"})
storagepool.pool_detail = self.STORAGE_POOL_DATA[0]
self.assertAlmostEqual(storagepool.get_ddp_capacity(self.EXPANSION_DDP_DRIVES_LIST), 6038680353645,
places=-2) # Allows for python version/architecture computational differences
def test_get_candidate_drives(self):
"""Verify correct candidate list is returned."""
with patch(self.NETAPP_REQUEST_FUNC) as netapp_request:
netapp_request.return_value = (200, self.RAID6_CANDIDATE_DRIVES)
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
storagepool = self._initialize_dummy_instance(
{"state": "present", "name": "raid6_vg", "criteria_drive_count": "6", "raid_level": "raid6"})
self.assertEqual(storagepool.get_candidate_drives(),
{'candidateSelectionType': 'count', 'driveMediaType': 'hdd',
'protectionInformationCapabilities': {'protectionInformationCapable': True,
'protectionType': 'type2Protection'},
'dssPreallocEnabled': False, 'phyDriveType': 'sas', 'allocateReservedSpace': False,
'trayLossProtection': False, 'raidLevel': 'raid6', 'spindleSpeed': 10000,
'securityType': 'capable', 'securityLevel': 'fde', 'spindleSpeedMatch': True,
'driveBlockFormat': 'allNative', 'protectionInformationCapable': False,
'freeExtentRef': '0000000000000000000000000000000000000000', 'driveCount': 6,
'driveRefList': {'driveRef': ['010000005000C500551E7F2B0000000000000000',
'010000005000C500551EC9270000000000000000',
'010000005000C500551EC97F0000000000000000',
'010000005000C500551ECBFF0000000000000000',
'010000005000C500551E9ED30000000000000000',
'010000005000C500551EA4CF0000000000000000']},
'rawSize': '1197852491776', 'usableSize': '1197851279360',
'drawerLossProtection': False,
'volumeCandidateData': {'type': 'traditional', 'diskPoolVolumeCandidateData': None}})
def test_get_expansion_candidate_drives(self):
"""Verify correct drive list is returned"""
with patch(self.NETAPP_REQUEST_FUNC) as netapp_request:
netapp_request.return_value = (200, self.EXPANSION_DDP_DRIVE_DATA)
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
storagepool = self._initialize_dummy_instance(
{"state": "present", "name": "pool", "criteria_drive_count": "20", "raid_level": "raidDiskPool"})
storagepool.pool_detail = self.STORAGE_POOL_DATA[0]
self.assertEqual(storagepool.get_expansion_candidate_drives(), [
{'drawerLossProtection': False, 'trayLossProtection': False,
'drives': ['010000005000C500551E7F2B0000000000000000', '010000005000C500551E99230000000000000000',
'010000005000C500551E9ED30000000000000000', '010000005000C500551EA29F0000000000000000',
'010000005000C500551EA4CF0000000000000000', '010000005000C500551EC9270000000000000000'],
'spindleSpeedMatch': True, 'driveBlockFormat': 'allNative', 'usableCapacity': '1796778774528',
'wastedCapacity': '0'}])
def test_get_maximum_reserve_drive_count(self):
"""Ensure maximum reserve drive count is accurately calculated."""
with patch(self.NETAPP_REQUEST_FUNC) as netapp_request:
netapp_request.return_value = (200, self.EXPANSION_DDP_DRIVE_DATA)
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
storagepool = self._initialize_dummy_instance(
{"state": "present", "name": "pool", "criteria_drive_count": "20", "raid_level": "raidDiskPool"})
storagepool.pool_detail = self.STORAGE_POOL_DATA[0]
self.assertEqual(storagepool.get_maximum_reserve_drive_count(), 5)
def test_apply_check_mode_unchange(self):
"""Verify that the changes are appropriately determined."""
# Absent storage pool required to be absent
with self.assertRaisesRegexp(AnsibleExitJson, "'changed': False"):
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
storage_pool.return_value = {}
storagepool = self._initialize_dummy_instance(
{"state": "absent", "name": "not-a-pool", "erase_secured_drives": False,
"criteria_drive_count": "14", "raid_level": "raidDiskPool"})
storagepool.module.check_mode = True
storagepool.is_drive_count_valid = lambda x: True
storagepool.apply()
# Present storage pool with no changes
with self.assertRaisesRegexp(AnsibleExitJson, "'changed': False"):
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
storage_pool.return_value = self.STORAGE_POOL_DATA[0]
storagepool = self._initialize_dummy_instance(
{"state": "present", "name": "pool", "erase_secured_drives": False,
"criteria_drive_count": "14", "raid_level": "raidDiskPool"})
storagepool.module.check_mode = True
storagepool.is_drive_count_valid = lambda x: True
storagepool.apply()
def test_apply_check_mode_change(self):
"""Verify that the changes are appropriately determined."""
# Remove absent storage pool
with self.assertRaisesRegexp(AnsibleExitJson, "'changed': True"):
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
storage_pool.return_value = self.STORAGE_POOL_DATA[0]
storagepool = self._initialize_dummy_instance(
{"state": "absent", "name": "pool", "erase_secured_drives": False, "criteria_drive_count": "14",
"raid_level": "raidDiskPool"})
storagepool.module.check_mode = True
storagepool.is_drive_count_valid = lambda x: True
storagepool.apply()
# Expand present storage pool
with self.assertRaisesRegexp(AnsibleExitJson, "'changed': True"):
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
storage_pool.return_value = self.STORAGE_POOL_DATA[0]
storagepool = self._initialize_dummy_instance(
{"state": "present", "name": "pool", "erase_secured_drives": False,
"criteria_drive_count": "15", "raid_level": "raidDiskPool"})
storagepool.module.check_mode = True
storagepool.is_drive_count_valid = lambda x: True
storagepool.expand_storage_pool = lambda check_mode: (True, 100)
storagepool.migrate_raid_level = lambda check_mode: False
storagepool.secure_storage_pool = lambda check_mode: False
storagepool.set_reserve_drive_count = lambda check_mode: False
storagepool.apply()
# Migrate present storage pool raid level
with self.assertRaisesRegexp(AnsibleExitJson, "'changed': True"):
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
storage_pool.return_value = self.STORAGE_POOL_DATA[0]
storagepool = self._initialize_dummy_instance(
{"state": "present", "name": "pool", "erase_secured_drives": False,
"criteria_drive_count": "15", "raid_level": "raidDiskPool"})
storagepool.module.check_mode = True
storagepool.is_drive_count_valid = lambda x: True
storagepool.expand_storage_pool = lambda check_mode: (False, 0)
storagepool.migrate_raid_level = lambda check_mode: True
storagepool.secure_storage_pool = lambda check_mode: False
storagepool.set_reserve_drive_count = lambda check_mode: False
storagepool.apply()
# Secure present storage pool
with self.assertRaisesRegexp(AnsibleExitJson, "'changed': True"):
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
storage_pool.return_value = self.STORAGE_POOL_DATA[0]
storagepool = self._initialize_dummy_instance(
{"state": "present", "name": "pool", "erase_secured_drives": False,
"criteria_drive_count": "15", "raid_level": "raidDiskPool"})
storagepool.module.check_mode = True
storagepool.is_drive_count_valid = lambda x: True
storagepool.expand_storage_pool = lambda check_mode: (False, 0)
storagepool.migrate_raid_level = lambda check_mode: False
storagepool.secure_storage_pool = lambda check_mode: True
storagepool.set_reserve_drive_count = lambda check_mode: False
storagepool.apply()
# Change present storage pool reserve drive count
with self.assertRaisesRegexp(AnsibleExitJson, "'changed': True"):
with patch(self.DRIVES_PROPERTY, new_callable=PropertyMock) as drives:
drives.return_value = self.DRIVES_DATA
with patch(self.STORAGE_POOL_PROPERTY, new_callable=PropertyMock) as storage_pool:
storage_pool.return_value = self.STORAGE_POOL_DATA[0]
storagepool = self._initialize_dummy_instance(
{"state": "present", "name": "pool", "erase_secured_drives": False,
"criteria_drive_count": "15", "raid_level": "raidDiskPool"})
storagepool.module.check_mode = True
storagepool.is_drive_count_valid = lambda x: True
storagepool.expand_storage_pool = lambda check_mode: (False, 0)
storagepool.migrate_raid_level = lambda check_mode: False
storagepool.secure_storage_pool = lambda check_mode: False
storagepool.set_reserve_drive_count = lambda check_mode: True
storagepool.apply()

View file

@ -1,123 +0,0 @@
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_syslog import Syslog
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleFailJson, ModuleTestCase, set_module_args
__metaclass__ = type
from ansible_collections.community.general.tests.unit.compat import mock
class AsupTest(ModuleTestCase):
REQUIRED_PARAMS = {
"api_username": "rw",
"api_password": "password",
"api_url": "http://localhost",
}
REQ_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_syslog.request'
def _set_args(self, args=None):
module_args = self.REQUIRED_PARAMS.copy()
if args is not None:
module_args.update(args)
set_module_args(module_args)
def test_test_configuration_fail(self):
"""Validate test_configuration fails when request exception is thrown."""
initial = {"state": "present",
"ssid": "1",
"address": "192.168.1.1",
"port": "514",
"protocol": "udp",
"components": ["auditLog"]}
self._set_args(initial)
syslog = Syslog()
with self.assertRaisesRegexp(AnsibleFailJson, r"We failed to send test message!"):
with mock.patch(self.REQ_FUNC, side_effect=Exception()):
with mock.patch("time.sleep", return_value=None): # mocking sleep is not working
syslog.test_configuration(self.REQUIRED_PARAMS)
def test_update_configuration_record_match_pass(self):
"""Verify existing syslog server record match does not issue update request."""
initial = {"state": "present",
"ssid": "1",
"address": "192.168.1.1",
"port": "514",
"protocol": "udp",
"components": ["auditLog"]}
expected = [{"id": "123456",
"serverAddress": "192.168.1.1",
"port": 514,
"protocol": "udp",
"components": [{"type": "auditLog"}]}]
self._set_args(initial)
syslog = Syslog()
with mock.patch(self.REQ_FUNC, side_effect=[(200, expected), (200, None)]):
updated = syslog.update_configuration()
self.assertFalse(updated)
def test_update_configuration_record_partial_match_pass(self):
"""Verify existing syslog server record partial match results in an update request."""
initial = {"state": "present",
"ssid": "1",
"address": "192.168.1.1",
"port": "514",
"protocol": "tcp",
"components": ["auditLog"]}
expected = [{"id": "123456",
"serverAddress": "192.168.1.1",
"port": 514,
"protocol": "udp",
"components": [{"type": "auditLog"}]}]
self._set_args(initial)
syslog = Syslog()
with mock.patch(self.REQ_FUNC, side_effect=[(200, expected), (200, None)]):
updated = syslog.update_configuration()
self.assertTrue(updated)
def test_update_configuration_record_no_match_pass(self):
"""Verify existing syslog server record partial match results in an update request."""
initial = {"state": "present",
"ssid": "1",
"address": "192.168.1.1",
"port": "514",
"protocol": "tcp",
"components": ["auditLog"]}
expected = [{"id": "123456",
"serverAddress": "192.168.1.100",
"port": 514,
"protocol": "udp",
"components": [{"type": "auditLog"}]}]
self._set_args(initial)
syslog = Syslog()
with mock.patch(self.REQ_FUNC, side_effect=[(200, expected), (200, dict(id=1234))]):
updated = syslog.update_configuration()
self.assertTrue(updated)
def test_update_configuration_record_no_match_defaults_pass(self):
"""Verify existing syslog server record partial match results in an update request."""
initial = {"state": "present",
"ssid": "1",
"address": "192.168.1.1",
"port": "514",
"protocol": "tcp",
"components": ["auditLog"]}
expected = [{"id": "123456",
"serverAddress": "192.168.1.100",
"port": 514,
"protocol": "udp",
"components": [{"type": "auditLog"}]}]
self._set_args(initial)
syslog = Syslog()
with mock.patch(self.REQ_FUNC, side_effect=[(200, expected), (200, dict(id=1234))]):
updated = syslog.update_configuration()
self.assertTrue(updated)

View file

@ -1,870 +0,0 @@
# coding=utf-8
# (c) 2018, NetApp Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
try:
from unittest import mock
except ImportError:
import mock
from ansible_collections.netapp.ontap.plugins.module_utils.netapp import NetAppESeriesModule
from ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_volume import NetAppESeriesVolume
from ansible_collections.community.general.tests.unit.modules.utils import AnsibleFailJson, ModuleTestCase, set_module_args
class NetAppESeriesVolumeTest(ModuleTestCase):
REQUIRED_PARAMS = {"api_username": "username",
"api_password": "password",
"api_url": "http://localhost/devmgr/v2",
"ssid": "1",
"validate_certs": "no"}
THIN_VOLUME_RESPONSE = [{"capacity": "1288490188800",
"volumeRef": "3A000000600A098000A4B28D000010475C405428",
"status": "optimal",
"protectionType": "type1Protection",
"maxVirtualCapacity": "281474976710656",
"initialProvisionedCapacity": "4294967296",
"currentProvisionedCapacity": "4294967296",
"provisionedCapacityQuota": "1305670057984",
"growthAlertThreshold": 85,
"expansionPolicy": "automatic",
"flashCached": False,
"metadata": [{"key": "workloadId", "value": "4200000001000000000000000000000000000000"},
{"key": "volumeTypeId", "value": "volume"}],
"dataAssurance": True,
"segmentSize": 131072,
"diskPool": True,
"listOfMappings": [],
"mapped": False,
"currentControllerId": "070000000000000000000001",
"cacheSettings": {"readCacheEnable": True, "writeCacheEnable": True,
"readAheadMultiplier": 0},
"name": "thin_volume",
"id": "3A000000600A098000A4B28D000010475C405428"}]
VOLUME_GET_RESPONSE = [{"offline": False,
"raidLevel": "raid6",
"capacity": "214748364800",
"reconPriority": 1,
"segmentSize": 131072,
"volumeRef": "02000000600A098000A4B9D100000F095C2F7F31",
"status": "optimal",
"protectionInformationCapable": False,
"protectionType": "type0Protection",
"diskPool": True,
"flashCached": False,
"metadata": [{"key": "workloadId", "value": "4200000002000000000000000000000000000000"},
{"key": "volumeTypeId", "value": "Clare"}],
"dataAssurance": False,
"currentControllerId": "070000000000000000000002",
"cacheSettings": {"readCacheEnable": True, "writeCacheEnable": False,
"readAheadMultiplier": 0},
"thinProvisioned": False,
"totalSizeInBytes": "214748364800",
"name": "Matthew",
"id": "02000000600A098000A4B9D100000F095C2F7F31"},
{"offline": False,
"raidLevel": "raid6",
"capacity": "107374182400",
"reconPriority": 1,
"segmentSize": 131072,
"volumeRef": "02000000600A098000A4B28D00000FBE5C2F7F26",
"status": "optimal",
"protectionInformationCapable": False,
"protectionType": "type0Protection",
"diskPool": True,
"flashCached": False,
"metadata": [{"key": "workloadId", "value": "4200000002000000000000000000000000000000"},
{"key": "volumeTypeId", "value": "Samantha"}],
"dataAssurance": False,
"currentControllerId": "070000000000000000000001",
"cacheSettings": {"readCacheEnable": True, "writeCacheEnable": False,
"readAheadMultiplier": 0},
"thinProvisioned": False,
"totalSizeInBytes": "107374182400",
"name": "Samantha",
"id": "02000000600A098000A4B28D00000FBE5C2F7F26"},
{"offline": False,
"raidLevel": "raid6",
"capacity": "107374182400",
"segmentSize": 131072,
"volumeRef": "02000000600A098000A4B9D100000F0B5C2F7F40",
"status": "optimal",
"protectionInformationCapable": False,
"protectionType": "type0Protection",
"volumeGroupRef": "04000000600A098000A4B9D100000F085C2F7F26",
"diskPool": True,
"flashCached": False,
"metadata": [{"key": "workloadId", "value": "4200000002000000000000000000000000000000"},
{"key": "volumeTypeId", "value": "Micah"}],
"dataAssurance": False,
"currentControllerId": "070000000000000000000002",
"cacheSettings": {"readCacheEnable": True, "writeCacheEnable": False,
"readAheadMultiplier": 0},
"thinProvisioned": False,
"totalSizeInBytes": "107374182400",
"name": "Micah",
"id": "02000000600A098000A4B9D100000F0B5C2F7F40"}]
STORAGE_POOL_GET_RESPONSE = [{"offline": False,
"raidLevel": "raidDiskPool",
"volumeGroupRef": "04000000600A",
"securityType": "capable",
"protectionInformationCapable": False,
"protectionInformationCapabilities": {"protectionInformationCapable": True,
"protectionType": "type2Protection"},
"volumeGroupData": {"type": "diskPool",
"diskPoolData": {"reconstructionReservedDriveCount": 1,
"reconstructionReservedAmt": "296889614336",
"reconstructionReservedDriveCountCurrent": 1,
"poolUtilizationWarningThreshold": 0,
"poolUtilizationCriticalThreshold": 85,
"poolUtilizationState": "utilizationOptimal",
"unusableCapacity": "0",
"degradedReconstructPriority": "high",
"criticalReconstructPriority": "highest",
"backgroundOperationPriority": "low",
"allocGranularity": "4294967296"}},
"reservedSpaceAllocated": False,
"securityLevel": "fde",
"usedSpace": "863288426496",
"totalRaidedSpace": "2276332666880",
"raidStatus": "optimal",
"freeSpace": "1413044240384",
"drivePhysicalType": "sas",
"driveMediaType": "hdd",
"diskPool": True,
"id": "04000000600A098000A4B9D100000F085C2F7F26",
"name": "employee_data_storage_pool"},
{"offline": False,
"raidLevel": "raid1",
"volumeGroupRef": "04000000600A098000A4B28D00000FBD5C2F7F19",
"state": "complete",
"securityType": "capable",
"drawerLossProtection": False,
"protectionInformationCapable": False,
"protectionInformationCapabilities": {"protectionInformationCapable": True,
"protectionType": "type2Protection"},
"volumeGroupData": {"type": "unknown", "diskPoolData": None},
"reservedSpaceAllocated": False,
"securityLevel": "fde",
"usedSpace": "322122547200",
"totalRaidedSpace": "598926258176",
"raidStatus": "optimal",
"freeSpace": "276803710976",
"drivePhysicalType": "sas",
"driveMediaType": "hdd",
"diskPool": False,
"id": "04000000600A098000A4B28D00000FBD5C2F7F19",
"name": "database_storage_pool"}]
GET_LONG_LIVED_OPERATION_RESPONSE = [
{"returnCode": "ok",
"longLivedOpsProgress": [
{"volAction": "initializing", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
"init": {"volumeRef": "02000000600A098000A4B9D1000037315D494C6F", "pending": False, "percentComplete": 1, "timeToCompletion": 20},
"format": None, "volCreation": None, "volDeletion": None},
{"volAction": "initializing", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
"init": {"volumeRef": "02000000600A098000A4B28D00003D2C5D494C87", "pending": False, "percentComplete": 0, "timeToCompletion": 18},
"volCreation": None, "volDeletion": None}]},
{"returnCode": "ok",
"longLivedOpsProgress": [
{"volAction": "complete", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
"init": {"volumeRef": "02000000600A098000A4B9D1000037315D494C6F", "pending": False, "percentComplete": 1, "timeToCompletion": 20},
"format": None, "volCreation": None, "volDeletion": None},
{"volAction": "initializing", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
"init": {"volumeRef": "02000000600A098000A4B28D00003D2C5D494C87", "pending": False, "percentComplete": 0, "timeToCompletion": 18},
"volCreation": None, "volDeletion": None}]},
{"returnCode": "ok",
"longLivedOpsProgress": [
{"volAction": "initializing", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
"init": {"volumeRef": "02000000600A098000A4B9D1000037315D494C6F", "pending": False, "percentComplete": 1, "timeToCompletion": 20},
"format": None, "volCreation": None, "volDeletion": None},
{"volAction": "complete", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
"init": {"volumeRef": "02000000600A098000A4B28D00003D2C5D494C87", "pending": False, "percentComplete": 0, "timeToCompletion": 18},
"volCreation": None, "volDeletion": None}]},
{"returnCode": "ok",
"longLivedOpsProgress": [
{"volAction": "complete", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
"init": {"volumeRef": "02000000600A098000A4B9D1000037315D494C6F", "pending": False, "percentComplete": 1, "timeToCompletion": 20},
"format": None, "volCreation": None, "volDeletion": None},
{"volAction": "complete", "reconstruct": None, "volExpansion": None, "volAndCapExpansion": None,
"init": {"volumeRef": "02000000600A098000A4B28D00003D2C5D494C87", "pending": False, "percentComplete": 0, "timeToCompletion": 18},
"volCreation": None, "volDeletion": None}]}]
WORKLOAD_GET_RESPONSE = [{"id": "4200000001000000000000000000000000000000", "name": "general_workload_1",
"workloadAttributes": [{"key": "profileId", "value": "Other_1"}]},
{"id": "4200000002000000000000000000000000000000", "name": "employee_data",
"workloadAttributes": [{"key": "use", "value": "EmployeeData"},
{"key": "location", "value": "ICT"},
{"key": "private", "value": "public"},
{"key": "profileId", "value": "ansible_workload_1"}]},
{"id": "4200000003000000000000000000000000000000", "name": "customer_database",
"workloadAttributes": [{"key": "use", "value": "customer_information"},
{"key": "location", "value": "global"},
{"key": "profileId", "value": "ansible_workload_2"}]},
{"id": "4200000004000000000000000000000000000000", "name": "product_database",
"workloadAttributes": [{"key": "use", "value": "production_information"},
{"key": "security", "value": "private"},
{"key": "location", "value": "global"},
{"key": "profileId", "value": "ansible_workload_4"}]}]
REQUEST_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_volume.NetAppESeriesVolume.request'
GET_VOLUME_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_volume.NetAppESeriesVolume.get_volume'
SLEEP_FUNC = 'ansible_collections.community.general.plugins.modules.storage.netapp.netapp_e_volume.sleep'
def _set_args(self, args=None):
module_args = self.REQUIRED_PARAMS.copy()
if args is not None:
module_args.update(args)
set_module_args(module_args)
def test_module_arguments_pass(self):
"""Ensure valid arguments successful create a class instance."""
arg_sets = [{"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "tb",
"thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 10},
{"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "gb",
"thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 1024,
"thin_volume_growth_alert_threshold": 99},
{"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "gb",
"thin_provision": True, "thin_volume_repo_size": 64},
{"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "kb",
"thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 67108864}]
# validate size normalization
for arg_set in arg_sets:
self._set_args(arg_set)
volume_object = NetAppESeriesVolume()
self.assertEqual(volume_object.size_b, volume_object.convert_to_aligned_bytes(arg_set["size"]))
self.assertEqual(volume_object.thin_volume_repo_size_b, volume_object.convert_to_aligned_bytes(arg_set["thin_volume_repo_size"]))
self.assertEqual(volume_object.thin_volume_expansion_policy, "automatic")
if "thin_volume_max_repo_size" not in arg_set.keys():
self.assertEqual(volume_object.thin_volume_max_repo_size_b, volume_object.convert_to_aligned_bytes(arg_set["size"]))
else:
self.assertEqual(volume_object.thin_volume_max_repo_size_b,
volume_object.convert_to_aligned_bytes(arg_set["thin_volume_max_repo_size"]))
# validate metadata form
self._set_args(
{"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 10, "workload_name": "workload1",
"metadata": {"availability": "public", "security": "low"}})
volume_object = NetAppESeriesVolume()
for entry in volume_object.metadata:
self.assertTrue(entry in [{'value': 'low', 'key': 'security'}, {'value': 'public', 'key': 'availability'}])
def test_module_arguments_fail(self):
"""Ensure invalid arguments values do not create a class instance."""
arg_sets = [{"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "tb",
"thin_provision": True, "thin_volume_repo_size": 260},
{"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 10000, "size_unit": "tb",
"thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 10},
{"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 10000, "size_unit": "gb",
"thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 9},
{"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 10000, "size_unit": "gb",
"thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 100}]
for arg_set in arg_sets:
with self.assertRaises(AnsibleFailJson):
self._set_args(arg_set)
print(arg_set)
volume_object = NetAppESeriesVolume()
def test_get_volume_pass(self):
"""Evaluate the get_volume method."""
with mock.patch(self.REQUEST_FUNC,
side_effect=[(200, self.VOLUME_GET_RESPONSE), (200, self.THIN_VOLUME_RESPONSE)]):
self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100})
volume_object = NetAppESeriesVolume()
self.assertEqual(volume_object.get_volume(),
[entry for entry in self.VOLUME_GET_RESPONSE if entry["name"] == "Matthew"][0])
with mock.patch(self.REQUEST_FUNC,
side_effect=[(200, self.VOLUME_GET_RESPONSE), (200, self.THIN_VOLUME_RESPONSE)]):
self._set_args({"state": "present", "name": "NotAVolume", "storage_pool_name": "pool", "size": 100})
volume_object = NetAppESeriesVolume()
self.assertEqual(volume_object.get_volume(), {})
def test_get_volume_fail(self):
"""Evaluate the get_volume exception paths."""
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to obtain list of thick volumes."):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100})
volume_object = NetAppESeriesVolume()
volume_object.get_volume()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to obtain list of thin volumes."):
with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.VOLUME_GET_RESPONSE), Exception()]):
self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100})
volume_object = NetAppESeriesVolume()
volume_object.get_volume()
def tests_wait_for_volume_availability_pass(self):
"""Ensure wait_for_volume_availability completes as expected."""
self._set_args({"state": "present", "name": "NewVolume", "storage_pool_name": "employee_data_storage_pool", "size": 100,
"wait_for_initialization": True})
volume_object = NetAppESeriesVolume()
with mock.patch(self.SLEEP_FUNC, return_value=None):
with mock.patch(self.GET_VOLUME_FUNC, side_effect=[False, False, True]):
volume_object.wait_for_volume_availability()
def tests_wait_for_volume_availability_fail(self):
"""Ensure wait_for_volume_availability throws the expected exceptions."""
self._set_args({"state": "present", "name": "NewVolume", "storage_pool_name": "employee_data_storage_pool", "size": 100,
"wait_for_initialization": True})
volume_object = NetAppESeriesVolume()
volume_object.get_volume = lambda: False
with self.assertRaisesRegexp(AnsibleFailJson, "Timed out waiting for the volume"):
with mock.patch(self.SLEEP_FUNC, return_value=None):
volume_object.wait_for_volume_availability()
def tests_wait_for_volume_action_pass(self):
"""Ensure wait_for_volume_action completes as expected."""
self._set_args({"state": "present", "name": "NewVolume", "storage_pool_name": "employee_data_storage_pool", "size": 100,
"wait_for_initialization": True})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"id": "02000000600A098000A4B9D1000037315D494C6F",
"storageVolumeRef": "02000000600A098000A4B9D1000037315DXXXXXX"}
with mock.patch(self.SLEEP_FUNC, return_value=None):
with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.GET_LONG_LIVED_OPERATION_RESPONSE[0]),
(200, self.GET_LONG_LIVED_OPERATION_RESPONSE[1]),
(200, self.GET_LONG_LIVED_OPERATION_RESPONSE[2]),
(200, self.GET_LONG_LIVED_OPERATION_RESPONSE[3])]):
volume_object.wait_for_volume_action()
self._set_args({"state": "present", "name": "NewVolume", "storage_pool_name": "employee_data_storage_pool", "size": 100,
"wait_for_initialization": True})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"id": "02000000600A098000A4B9D1000037315DXXXXXX",
"storageVolumeRef": "02000000600A098000A4B9D1000037315D494C6F"}
with mock.patch(self.SLEEP_FUNC, return_value=None):
with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.GET_LONG_LIVED_OPERATION_RESPONSE[0]),
(200, self.GET_LONG_LIVED_OPERATION_RESPONSE[1]),
(200, self.GET_LONG_LIVED_OPERATION_RESPONSE[2]),
(200, self.GET_LONG_LIVED_OPERATION_RESPONSE[3])]):
volume_object.wait_for_volume_action()
def tests_wait_for_volume_action_fail(self):
"""Ensure wait_for_volume_action throws the expected exceptions."""
self._set_args({"state": "present", "name": "NewVolume", "storage_pool_name": "employee_data_storage_pool", "size": 100,
"wait_for_initialization": True})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"id": "02000000600A098000A4B9D1000037315DXXXXXX",
"storageVolumeRef": "02000000600A098000A4B9D1000037315D494C6F"}
with mock.patch(self.SLEEP_FUNC, return_value=None):
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to get volume expansion progress."):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
volume_object.wait_for_volume_action()
with self.assertRaisesRegexp(AnsibleFailJson, "Expansion action failed to complete."):
with mock.patch(self.REQUEST_FUNC, return_value=(200, self.GET_LONG_LIVED_OPERATION_RESPONSE[0])):
volume_object.wait_for_volume_action(timeout=300)
def test_get_storage_pool_pass(self):
"""Evaluate the get_storage_pool method."""
with mock.patch(self.REQUEST_FUNC, return_value=(200, self.STORAGE_POOL_GET_RESPONSE)):
self._set_args({"state": "present", "name": "NewVolume", "storage_pool_name": "employee_data_storage_pool",
"size": 100})
volume_object = NetAppESeriesVolume()
self.assertEqual(volume_object.get_storage_pool(), [entry for entry in self.STORAGE_POOL_GET_RESPONSE if
entry["name"] == "employee_data_storage_pool"][0])
self._set_args(
{"state": "present", "name": "NewVolume", "storage_pool_name": "NotAStoragePool", "size": 100})
volume_object = NetAppESeriesVolume()
self.assertEqual(volume_object.get_storage_pool(), {})
def test_get_storage_pool_fail(self):
"""Evaluate the get_storage_pool exception paths."""
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to obtain list of storage pools."):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100})
volume_object = NetAppESeriesVolume()
volume_object.get_storage_pool()
def test_check_storage_pool_sufficiency_pass(self):
"""Ensure passing logic."""
self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100})
volume_object = NetAppESeriesVolume()
volume_object.pool_detail = [entry for entry in self.STORAGE_POOL_GET_RESPONSE
if entry["name"] == "employee_data_storage_pool"][0]
volume_object.check_storage_pool_sufficiency()
def test_check_storage_pool_sufficiency_fail(self):
"""Validate exceptions are thrown for insufficient storage pool resources."""
self._set_args({"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "tb",
"thin_provision": True, "thin_volume_repo_size": 64, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 10})
volume_object = NetAppESeriesVolume()
with self.assertRaisesRegexp(AnsibleFailJson, "Requested storage pool"):
volume_object.check_storage_pool_sufficiency()
with self.assertRaisesRegexp(AnsibleFailJson,
"Thin provisioned volumes can only be created on raid disk pools."):
volume_object.pool_detail = [entry for entry in self.STORAGE_POOL_GET_RESPONSE
if entry["name"] == "database_storage_pool"][0]
volume_object.volume_detail = {}
volume_object.check_storage_pool_sufficiency()
with self.assertRaisesRegexp(AnsibleFailJson, "requires the storage pool to be DA-compatible."):
volume_object.pool_detail = {"diskPool": True,
"protectionInformationCapabilities": {"protectionType": "type0Protection",
"protectionInformationCapable": False}}
volume_object.volume_detail = {}
volume_object.data_assurance_enabled = True
volume_object.check_storage_pool_sufficiency()
volume_object.pool_detail = {"diskPool": True,
"protectionInformationCapabilities": {"protectionType": "type2Protection",
"protectionInformationCapable": True}}
volume_object.check_storage_pool_sufficiency()
self._set_args({"state": "present", "name": "vol", "storage_pool_name": "pool", "size": 100, "size_unit": "tb",
"thin_provision": False})
volume_object = NetAppESeriesVolume()
with self.assertRaisesRegexp(AnsibleFailJson,
"Not enough storage pool free space available for the volume's needs."):
volume_object.pool_detail = {"freeSpace": 10, "diskPool": True,
"protectionInformationCapabilities": {"protectionType": "type2Protection",
"protectionInformationCapable": True}}
volume_object.volume_detail = {"totalSizeInBytes": 100}
volume_object.data_assurance_enabled = True
volume_object.size_b = 1
volume_object.check_storage_pool_sufficiency()
def test_update_workload_tags_pass(self):
"""Validate updating workload tags."""
test_sets = [[{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100}, False],
[{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
"workload_name": "employee_data"}, False],
[{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
"workload_name": "customer_database",
"metadata": {"use": "customer_information", "location": "global"}}, False],
[{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
"workload_name": "customer_database",
"metadata": {"use": "customer_information"}}, True],
[{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
"workload_name": "customer_database",
"metadata": {"use": "customer_information", "location": "local"}}, True],
[{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
"workload_name": "customer_database",
"metadata": {"use": "customer_information", "location": "global", "importance": "no"}}, True],
[{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
"workload_name": "newWorkload",
"metadata": {"for_testing": "yes"}}, True],
[{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
"workload_name": "newWorkload"}, True]]
for test in test_sets:
self._set_args(test[0])
volume_object = NetAppESeriesVolume()
with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.WORKLOAD_GET_RESPONSE), (200, {"id": 1})]):
self.assertEqual(volume_object.update_workload_tags(), test[1])
def test_update_workload_tags_fail(self):
"""Validate updating workload tags fails appropriately."""
self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
"workload_name": "employee_data"})
volume_object = NetAppESeriesVolume()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to retrieve storage array workload tags."):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
volume_object.update_workload_tags()
self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
"workload_name": "employee_data", "metadata": {"key": "not-use", "value": "EmployeeData"}})
volume_object = NetAppESeriesVolume()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to create new workload tag."):
with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.WORKLOAD_GET_RESPONSE), Exception()]):
volume_object.update_workload_tags()
self._set_args({"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100,
"workload_name": "employee_data2", "metadata": {"key": "use", "value": "EmployeeData"}})
volume_object = NetAppESeriesVolume()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to create new workload tag."):
with mock.patch(self.REQUEST_FUNC, side_effect=[(200, self.WORKLOAD_GET_RESPONSE), Exception()]):
volume_object.update_workload_tags()
def test_get_volume_property_changes_pass(self):
"""Verify correct dictionary is returned"""
# no property changes
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
"read_cache_enable": True, "write_cache_enable": True,
"read_ahead_enable": True, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"metadata": [],
"cacheSettings": {"cwob": False, "readCacheEnable": True, "writeCacheEnable": True,
"readAheadMultiplier": 1}, "flashCached": True,
"segmentSize": str(128 * 1024)}
self.assertEqual(volume_object.get_volume_property_changes(), dict())
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
"read_cache_enable": True, "write_cache_enable": True,
"read_ahead_enable": True, "thin_provision": True, "thin_volume_repo_size": 64,
"thin_volume_max_repo_size": 1000, "thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"metadata": [],
"cacheSettings": {"cwob": False, "readCacheEnable": True, "writeCacheEnable": True,
"readAheadMultiplier": 1},
"flashCached": True, "growthAlertThreshold": "90",
"expansionPolicy": "automatic", "segmentSize": str(128 * 1024)}
self.assertEqual(volume_object.get_volume_property_changes(), dict())
# property changes
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
"read_cache_enable": True, "write_cache_enable": True,
"read_ahead_enable": True, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"metadata": [],
"cacheSettings": {"cwob": False, "readCacheEnable": False, "writeCacheEnable": True,
"readAheadMultiplier": 1}, "flashCached": True,
"segmentSize": str(128 * 1024)}
self.assertEqual(volume_object.get_volume_property_changes(),
{"metaTags": [], 'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True},
'flashCache': True})
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
"read_cache_enable": True, "write_cache_enable": True, "cache_without_batteries": False,
"read_ahead_enable": True, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"metadata": [],
"cacheSettings": {"cwob": False, "readCacheEnable": True, "writeCacheEnable": False,
"readAheadMultiplier": 1}, "flashCached": True,
"segmentSize": str(128 * 1024)}
self.assertEqual(volume_object.get_volume_property_changes(),
{"metaTags": [], 'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True},
'flashCache': True})
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
"read_cache_enable": True, "write_cache_enable": True, "cache_without_batteries": True,
"read_ahead_enable": True, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"metadata": [],
"cacheSettings": {"cwob": False, "readCacheEnable": True, "writeCacheEnable": True,
"readAheadMultiplier": 1}, "flashCached": False,
"segmentSize": str(128 * 1024)}
self.assertEqual(volume_object.get_volume_property_changes(),
{"metaTags": [], 'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True, "cacheWithoutBatteries": True},
'flashCache': True})
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
"read_cache_enable": True, "write_cache_enable": True, "cache_without_batteries": True,
"read_ahead_enable": False, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"metadata": [],
"cacheSettings": {"cwob": False, "readCacheEnable": True, "writeCacheEnable": True,
"readAheadMultiplier": 1}, "flashCached": False,
"segmentSize": str(128 * 1024)}
self.assertEqual(volume_object.get_volume_property_changes(), {"metaTags": [],
'cacheSettings': {'readCacheEnable': True,
'writeCacheEnable': True,
'readAheadEnable': False,
"cacheWithoutBatteries": True},
'flashCache': True})
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
"read_cache_enable": True, "write_cache_enable": True,
"read_ahead_enable": True, "thin_provision": True, "thin_volume_repo_size": 64,
"thin_volume_max_repo_size": 1000, "thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"metadata": [],
"cacheSettings": {"cwob": True, "readCacheEnable": True, "writeCacheEnable": True,
"readAheadMultiplier": 1},
"flashCached": True, "growthAlertThreshold": "95",
"expansionPolicy": "automatic", "segmentSize": str(128 * 1024)}
self.assertEqual(volume_object.get_volume_property_changes(),
{"metaTags": [], 'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True},
'growthAlertThreshold': 90, 'flashCache': True})
def test_get_volume_property_changes_fail(self):
"""Verify correct exception is thrown"""
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "ssd_cache_enabled": True,
"read_cache_enable": True, "write_cache_enable": True, "read_ahead_enable": True, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {
"cacheSettings": {"cwob": False, "readCacheEnable": True, "writeCacheEnable": True, "readAheadMultiplier": 1},
"flashCached": True, "segmentSize": str(512 * 1024)}
with self.assertRaisesRegexp(AnsibleFailJson, "Existing volume segment size is"):
volume_object.get_volume_property_changes()
def test_get_expand_volume_changes_pass(self):
"""Verify expansion changes."""
# thick volumes
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"capacity": str(50 * 1024 * 1024 * 1024), "thinProvisioned": False}
self.assertEqual(volume_object.get_expand_volume_changes(),
{"sizeUnit": "bytes", "expansionSize": 100 * 1024 * 1024 * 1024})
# thin volumes
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
"thin_volume_expansion_policy": "automatic", "thin_volume_repo_size": 64,
"thin_volume_max_repo_size": 1000, "thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"capacity": str(50 * 1024 * 1024 * 1024), "thinProvisioned": True,
"expansionPolicy": "automatic",
"provisionedCapacityQuota": str(1000 * 1024 * 1024 * 1024)}
self.assertEqual(volume_object.get_expand_volume_changes(),
{"sizeUnit": "bytes", "newVirtualSize": 100 * 1024 * 1024 * 1024})
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
"thin_volume_expansion_policy": "automatic", "thin_volume_repo_size": 64,
"thin_volume_max_repo_size": 1000, "thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"capacity": str(100 * 1024 * 1024 * 1024), "thinProvisioned": True,
"expansionPolicy": "automatic",
"provisionedCapacityQuota": str(500 * 1024 * 1024 * 1024)}
self.assertEqual(volume_object.get_expand_volume_changes(),
{"sizeUnit": "bytes", "newRepositorySize": 1000 * 1024 * 1024 * 1024})
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
"thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 504, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"capacity": str(100 * 1024 * 1024 * 1024), "thinProvisioned": True,
"expansionPolicy": "manual",
"currentProvisionedCapacity": str(500 * 1024 * 1024 * 1024)}
self.assertEqual(volume_object.get_expand_volume_changes(),
{"sizeUnit": "bytes", "newRepositorySize": 504 * 1024 * 1024 * 1024})
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
"thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 756, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"capacity": str(100 * 1024 * 1024 * 1024), "thinProvisioned": True,
"expansionPolicy": "manual",
"currentProvisionedCapacity": str(500 * 1024 * 1024 * 1024)}
self.assertEqual(volume_object.get_expand_volume_changes(),
{"sizeUnit": "bytes", "newRepositorySize": 756 * 1024 * 1024 * 1024})
def test_get_expand_volume_changes_fail(self):
"""Verify exceptions are thrown."""
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"capacity": str(1000 * 1024 * 1024 * 1024)}
with self.assertRaisesRegexp(AnsibleFailJson, "Reducing the size of volumes is not permitted."):
volume_object.get_expand_volume_changes()
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
"thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 502, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"capacity": str(100 * 1024 * 1024 * 1024), "thinProvisioned": True,
"expansionPolicy": "manual",
"currentProvisionedCapacity": str(500 * 1024 * 1024 * 1024)}
with self.assertRaisesRegexp(AnsibleFailJson, "The thin volume repository increase must be between or equal"):
volume_object.get_expand_volume_changes()
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
"thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"capacity": str(100 * 1024 * 1024 * 1024), "thinProvisioned": True,
"expansionPolicy": "manual",
"currentProvisionedCapacity": str(500 * 1024 * 1024 * 1024)}
with self.assertRaisesRegexp(AnsibleFailJson, "The thin volume repository increase must be between or equal"):
volume_object.get_expand_volume_changes()
def test_create_volume_pass(self):
"""Verify volume creation."""
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.pool_detail = {"id": "12345"}
with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
volume_object.create_volume()
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
"thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.pool_detail = {"id": "12345"}
with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
volume_object.create_volume()
def test_create_volume_fail(self):
"""Verify exceptions thrown."""
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.pool_detail = {"id": "12345"}
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to create volume."):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
volume_object.create_volume()
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
"thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.pool_detail = {"id": "12345"}
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to create thin volume."):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
volume_object.create_volume()
def test_update_volume_properties_pass(self):
"""verify property update."""
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.pool_detail = {"id": "12345"}
volume_object.wait_for_volume_availability = lambda: None
volume_object.get_volume = lambda: {"id": "12345'"}
volume_object.get_volume_property_changes = lambda: {
'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True}, 'growthAlertThreshold': 90,
'flashCached': True}
volume_object.workload_id = "4200000001000000000000000000000000000000"
with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
self.assertTrue(volume_object.update_volume_properties())
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
"thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.pool_detail = {"id": "12345"}
volume_object.wait_for_volume_availability = lambda: None
volume_object.get_volume = lambda: {"id": "12345'"}
volume_object.get_volume_property_changes = lambda: {
'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True}, 'growthAlertThreshold': 90,
'flashCached': True}
volume_object.workload_id = "4200000001000000000000000000000000000000"
with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
self.assertTrue(volume_object.update_volume_properties())
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.pool_detail = {"metadata": [{"key": "workloadId", "value": "12345"}]}
volume_object.wait_for_volume_availability = lambda: None
volume_object.get_volume = lambda: {"id": "12345'"}
volume_object.get_volume_property_changes = lambda: {}
volume_object.workload_id = "4200000001000000000000000000000000000000"
self.assertFalse(volume_object.update_volume_properties())
def test_update_volume_properties_fail(self):
"""Verify exceptions are thrown."""
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.pool_detail = {"id": "12345"}
volume_object.wait_for_volume_availability = lambda: None
volume_object.get_volume = lambda: {"id": "12345'"}
volume_object.get_volume_property_changes = lambda: {
'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True}, 'growthAlertThreshold': 90,
'flashCached': True}
volume_object.workload_id = "4200000001000000000000000000000000000000"
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to update volume properties."):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
self.assertTrue(volume_object.update_volume_properties())
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
"thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.pool_detail = {"id": "12345"}
volume_object.wait_for_volume_availability = lambda: None
volume_object.get_volume = lambda: {"id": "12345'"}
volume_object.get_volume_property_changes = lambda: {
'cacheSettings': {'readCacheEnable': True, 'writeCacheEnable': True}, 'growthAlertThreshold': 90,
'flashCached': True}
volume_object.workload_id = "4200000001000000000000000000000000000000"
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to update thin volume properties."):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
self.assertTrue(volume_object.update_volume_properties())
def test_expand_volume_pass(self):
"""Verify volume expansion."""
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.get_expand_volume_changes = lambda: {"sizeUnit": "bytes",
"expansionSize": 100 * 1024 * 1024 * 1024}
volume_object.volume_detail = {"id": "12345", "thinProvisioned": True}
with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
volume_object.expand_volume()
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
"thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.get_expand_volume_changes = lambda: {"sizeUnit": "bytes",
"expansionSize": 100 * 1024 * 1024 * 1024}
volume_object.volume_detail = {"id": "12345", "thinProvisioned": True}
with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
volume_object.expand_volume()
def test_expand_volume_fail(self):
"""Verify exceptions are thrown."""
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.get_expand_volume_changes = lambda: {"sizeUnit": "bytes",
"expansionSize": 100 * 1024 * 1024 * 1024}
volume_object.volume_detail = {"id": "12345", "thinProvisioned": False}
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to expand volume."):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
volume_object.expand_volume()
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True})
volume_object = NetAppESeriesVolume()
volume_object.get_expand_volume_changes = lambda: {"sizeUnit": "bytes",
"expansionSize": 100 * 1024 * 1024 * 1024}
volume_object.volume_detail = {"id": "12345", "thinProvisioned": True}
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to expand thin volume."):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
volume_object.expand_volume()
def test_delete_volume_pass(self):
"""Verify volume deletion."""
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"id": "12345"}
with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
volume_object.delete_volume()
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True,
"thin_volume_expansion_policy": "manual", "thin_volume_repo_size": 760, "thin_volume_max_repo_size": 1000,
"thin_volume_growth_alert_threshold": 90})
volume_object = NetAppESeriesVolume()
volume_object.volume_detail = {"id": "12345"}
with mock.patch(self.REQUEST_FUNC, return_value=(200, {})):
volume_object.delete_volume()
def test_delete_volume_fail(self):
"""Verify exceptions are thrown."""
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": False})
volume_object = NetAppESeriesVolume()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to delete volume."):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
volume_object.delete_volume()
self._set_args(
{"state": "present", "name": "Matthew", "storage_pool_name": "pool", "size": 100, "thin_provision": True})
volume_object = NetAppESeriesVolume()
with self.assertRaisesRegexp(AnsibleFailJson, "Failed to delete thin volume."):
with mock.patch(self.REQUEST_FUNC, return_value=Exception()):
volume_object.delete_volume()

View file

@ -67,7 +67,6 @@ retry ansible-galaxy -vvv collection install f5networks.f5_modules
retry ansible-galaxy -vvv collection install fortinet.fortios
retry ansible-galaxy -vvv collection install cisco.aci
retry ansible-galaxy -vvv collection install google.cloud
retry ansible-galaxy -vvv collection install netapp.ontap
# unit tests
retry ansible-galaxy -vvv collection install cisco.meraki