mirror of
https://github.com/ansible-collections/community.general.git
synced 2025-04-25 11:51:26 -07:00
Netbox site module (#53219)
* Created netbox_interface module and updated netbox_utils * Fixed PEP issues and documentation issue for ASN type * tenant API endpoint doesn't support slug currently, changed to name, fixed user issue * Updated documentation to include types * Updated argument_spec to include required sub options of data * Fixed formatting of argument spec - missing parenthesis in data options * Fixed syntax error * refactored to use shared functions create()/delete()/update() * Fixed PEP issues
This commit is contained in:
parent
eb7735000e
commit
0b1470a3c9
2 changed files with 616 additions and 77 deletions
|
@ -7,16 +7,27 @@ __metaclass__ = type
|
||||||
|
|
||||||
API_APPS_ENDPOINTS = dict(
|
API_APPS_ENDPOINTS = dict(
|
||||||
circuits=[],
|
circuits=[],
|
||||||
dcim=["device_roles", "device_types", "devices", "interfaces", "platforms", "racks", "sites"],
|
dcim=[
|
||||||
|
"devices",
|
||||||
|
"device_roles",
|
||||||
|
"device_types",
|
||||||
|
"devices",
|
||||||
|
"interfaces",
|
||||||
|
"platforms",
|
||||||
|
"racks",
|
||||||
|
"regions",
|
||||||
|
"sites",
|
||||||
|
],
|
||||||
extras=[],
|
extras=[],
|
||||||
ipam=["ip_addresses", "prefixes", "vrfs"],
|
ipam=["ip_addresses", "prefixes", "roles", "vlans", "vlan_groups", "vrfs"],
|
||||||
secrets=[],
|
secrets=[],
|
||||||
tenancy=["tenants", "tenant_groups"],
|
tenancy=["tenants", "tenant_groups"],
|
||||||
virtualization=["clusters"]
|
virtualization=["clusters"],
|
||||||
)
|
)
|
||||||
|
|
||||||
QUERY_TYPES = dict(
|
QUERY_TYPES = dict(
|
||||||
cluster="name",
|
cluster="name",
|
||||||
|
devices="name",
|
||||||
device_role="slug",
|
device_role="slug",
|
||||||
device_type="slug",
|
device_type="slug",
|
||||||
manufacturer="slug",
|
manufacturer="slug",
|
||||||
|
@ -28,17 +39,23 @@ QUERY_TYPES = dict(
|
||||||
primary_ip6="address",
|
primary_ip6="address",
|
||||||
rack="slug",
|
rack="slug",
|
||||||
region="slug",
|
region="slug",
|
||||||
|
role="slug",
|
||||||
site="slug",
|
site="slug",
|
||||||
tenant="slug",
|
tenant="name",
|
||||||
tenant_group="slug",
|
tenant_group="slug",
|
||||||
vrf="name"
|
time_zone="timezone",
|
||||||
|
vlan="name",
|
||||||
|
vlan_group="slug",
|
||||||
|
vrf="name",
|
||||||
)
|
)
|
||||||
|
|
||||||
CONVERT_TO_ID = dict(
|
CONVERT_TO_ID = dict(
|
||||||
cluster="clusters",
|
cluster="clusters",
|
||||||
|
device="devices",
|
||||||
device_role="device_roles",
|
device_role="device_roles",
|
||||||
device_type="device_types",
|
device_type="device_types",
|
||||||
interface="interfaces",
|
interface="interfaces",
|
||||||
|
lag="interfaces",
|
||||||
nat_inside="ip_addresses",
|
nat_inside="ip_addresses",
|
||||||
nat_outside="ip_addresses",
|
nat_outside="ip_addresses",
|
||||||
platform="platforms",
|
platform="platforms",
|
||||||
|
@ -46,65 +63,193 @@ CONVERT_TO_ID = dict(
|
||||||
primary_ip4="ip_addresses",
|
primary_ip4="ip_addresses",
|
||||||
primary_ip6="ip_addresses",
|
primary_ip6="ip_addresses",
|
||||||
rack="racks",
|
rack="racks",
|
||||||
|
region="regions",
|
||||||
|
role="roles",
|
||||||
site="sites",
|
site="sites",
|
||||||
|
tagged_vlans="vlans",
|
||||||
tenant="tenants",
|
tenant="tenants",
|
||||||
tenant_group="tenant_groups",
|
tenant_group="tenant_groups",
|
||||||
vrf="vrfs"
|
untagged_vlan="vlans",
|
||||||
|
vlan="vlans",
|
||||||
|
vlan_group="vlan_groups",
|
||||||
|
vrf="vrfs",
|
||||||
)
|
)
|
||||||
|
|
||||||
FACE_ID = dict(
|
FACE_ID = dict(front=0, rear=1)
|
||||||
front=0,
|
|
||||||
rear=1
|
NO_DEFAULT_ID = set(
|
||||||
|
[
|
||||||
|
"device",
|
||||||
|
"lag",
|
||||||
|
"primary_ip",
|
||||||
|
"primary_ip4",
|
||||||
|
"primary_ip6",
|
||||||
|
"role",
|
||||||
|
"vlan",
|
||||||
|
"vrf",
|
||||||
|
"nat_inside",
|
||||||
|
"nat_outside",
|
||||||
|
"region",
|
||||||
|
"untagged_vlan",
|
||||||
|
"tagged_vlans",
|
||||||
|
"tenant",
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
NO_DEFAULT_ID = set([
|
DEVICE_STATUS = dict(offline=0, active=1, planned=2, staged=3, failed=4, inventory=5)
|
||||||
"primary_ip",
|
|
||||||
"primary_ip4",
|
|
||||||
"primary_ip6",
|
|
||||||
"vrf",
|
|
||||||
"nat_inside",
|
|
||||||
"nat_outside"
|
|
||||||
])
|
|
||||||
|
|
||||||
DEVICE_STATUS = dict(
|
IP_ADDRESS_STATUS = dict(active=1, reserved=2, deprecated=3, dhcp=5)
|
||||||
offline=0,
|
|
||||||
active=1,
|
|
||||||
planned=2,
|
|
||||||
staged=3,
|
|
||||||
failed=4,
|
|
||||||
inventory=5
|
|
||||||
)
|
|
||||||
|
|
||||||
IP_ADDRESS_STATUS = dict(
|
|
||||||
active=1,
|
|
||||||
reserved=2,
|
|
||||||
deprecated=3,
|
|
||||||
dhcp=5
|
|
||||||
)
|
|
||||||
|
|
||||||
IP_ADDRESS_ROLE = dict(
|
IP_ADDRESS_ROLE = dict(
|
||||||
loopback=10,
|
loopback=10, secondary=20, anycast=30, vip=40, vrrp=41, hsrp=42, glbp=43, carp=44
|
||||||
secondary=20,
|
|
||||||
anycast=30,
|
|
||||||
vip=40,
|
|
||||||
vrrp=41,
|
|
||||||
hsrp=42,
|
|
||||||
glbp=43,
|
|
||||||
carp=44
|
|
||||||
)
|
)
|
||||||
|
|
||||||
PREFIX_STATUS = dict(
|
PREFIX_STATUS = dict(container=0, active=1, reserved=2, deprecated=3)
|
||||||
container=0,
|
|
||||||
active=1,
|
|
||||||
reserved=2,
|
|
||||||
deprecated=3
|
|
||||||
)
|
|
||||||
|
|
||||||
VLAN_STATUS = dict(
|
VLAN_STATUS = dict(active=1, reserved=2, deprecated=3)
|
||||||
active=1,
|
|
||||||
reserved=2,
|
SITE_STATUS = dict(active=1, planned=2, retired=4)
|
||||||
deprecated=3
|
|
||||||
)
|
INTF_FORM_FACTOR = {
|
||||||
|
"virtual": 0,
|
||||||
|
"link aggregation group (lag)": 200,
|
||||||
|
"100base-tx (10/100me)": 800,
|
||||||
|
"1000base-t (1ge)": 1000,
|
||||||
|
"10gbase-t (10ge)": 1150,
|
||||||
|
"10gbase-cx4 (10ge)": 1170,
|
||||||
|
"gbic (1ge)": 1050,
|
||||||
|
"sfp (1ge)": 1100,
|
||||||
|
"sfp+ (10ge)": 1200,
|
||||||
|
"xfp (10ge)": 1300,
|
||||||
|
"xenpak (10ge)": 1310,
|
||||||
|
"x2 (10ge)": 1320,
|
||||||
|
"sfp28 (25ge)": 1350,
|
||||||
|
"qsfp+ (40ge)": 1400,
|
||||||
|
"cfp (100ge)": 1500,
|
||||||
|
"cfp2 (100ge)": 1510,
|
||||||
|
"cfp2 (200ge)": 1650,
|
||||||
|
"cfp4 (100ge)": 1520,
|
||||||
|
"cisco cpak (100ge)": 1550,
|
||||||
|
"qsfp28 (100ge)": 1600,
|
||||||
|
"qsfp56 (200ge)": 1700,
|
||||||
|
"qsfp-dd (400ge)": 1750,
|
||||||
|
"ieee 802.11a": 2600,
|
||||||
|
"ieee 802.11b/g": 2610,
|
||||||
|
"ieee 802.11n": 2620,
|
||||||
|
"ieee 802.11ac": 2630,
|
||||||
|
"ieee 802.11ad": 2640,
|
||||||
|
"gsm": 2810,
|
||||||
|
"cdma": 2820,
|
||||||
|
"lte": 2830,
|
||||||
|
"oc-3/stm-1": 6100,
|
||||||
|
"oc-12/stm-4": 6200,
|
||||||
|
"oc-48/stm-16": 6300,
|
||||||
|
"oc-192/stm-64": 6400,
|
||||||
|
"oc-768/stm-256": 6500,
|
||||||
|
"oc-1920/stm-640": 6600,
|
||||||
|
"oc-3840/stm-1234": 6700,
|
||||||
|
"sfp (1gfc)": 3010,
|
||||||
|
"sfp (2gfc)": 3020,
|
||||||
|
"sfp (4gfc)": 3040,
|
||||||
|
"sfp+ (8gfc)": 3080,
|
||||||
|
"sfp+ (16gfc)": 3160,
|
||||||
|
"sfp28 (32gfc)": 3320,
|
||||||
|
"qsfp28 (128gfc)": 3400,
|
||||||
|
"t1 (1.544 mbps)": 4000,
|
||||||
|
"e1 (2.048 mbps)": 4010,
|
||||||
|
"t3 (45 mbps)": 4040,
|
||||||
|
"e3 (34 mbps)": 4050,
|
||||||
|
"cisco stackwise": 5000,
|
||||||
|
"cisco stackwise plus": 5050,
|
||||||
|
"cisco flexstack": 5100,
|
||||||
|
"cisco flexstack plus": 5150,
|
||||||
|
"juniper vcp": 5200,
|
||||||
|
"extreme summitstack": 5300,
|
||||||
|
"extreme summitstack-128": 5310,
|
||||||
|
"extreme summitstack-256": 5320,
|
||||||
|
"extreme summitstack-512": 5330,
|
||||||
|
"other": 32767,
|
||||||
|
}
|
||||||
|
|
||||||
|
INTF_MODE = {"access": 100, "tagged": 200, "tagged all": 300}
|
||||||
|
|
||||||
|
ALLOWED_QUERY_PARAMS = {
|
||||||
|
"interface": set(["name", "device"]),
|
||||||
|
"lag": set(["name"]),
|
||||||
|
"nat_inside": set(["vrf", "address"]),
|
||||||
|
"vlan": set(["name", "site", "vlan_group", "tenant"]),
|
||||||
|
"untagged_vlan": set(["name", "site", "vlan_group", "tenant"]),
|
||||||
|
"tagged_vlans": set(["name", "site", "vlan_group", "tenant"]),
|
||||||
|
}
|
||||||
|
|
||||||
|
QUERY_PARAMS_IDS = set(["vrf", "site", "vlan_group", "tenant"])
|
||||||
|
|
||||||
|
|
||||||
|
def _build_diff(before=None, after=None):
|
||||||
|
return {"before": before, "after": after}
|
||||||
|
|
||||||
|
|
||||||
|
def create_netbox_object(nb_endpoint, data, check_mode):
|
||||||
|
"""Create a Netbox object.
|
||||||
|
:returns tuple(serialized_nb_obj, diff): tuple of the serialized created
|
||||||
|
Netbox object and the Ansible diff.
|
||||||
|
"""
|
||||||
|
if check_mode:
|
||||||
|
serialized_nb_obj = data
|
||||||
|
else:
|
||||||
|
serialized_nb_obj = nb_endpoint.create(data).serialize()
|
||||||
|
|
||||||
|
diff = _build_diff(before={"state": "absent"}, after={"state": "present"})
|
||||||
|
return serialized_nb_obj, diff
|
||||||
|
|
||||||
|
|
||||||
|
def delete_netbox_object(nb_obj, check_mode):
|
||||||
|
"""Delete a Netbox object.
|
||||||
|
:returns tuple(serialized_nb_obj, diff): tuple of the serialized deleted
|
||||||
|
Netbox object and the Ansible diff.
|
||||||
|
"""
|
||||||
|
if not check_mode:
|
||||||
|
nb_obj.delete()
|
||||||
|
|
||||||
|
diff = _build_diff(before={"state": "present"}, after={"state": "absent"})
|
||||||
|
return nb_obj.serialize(), diff
|
||||||
|
|
||||||
|
|
||||||
|
def update_netbox_object(nb_obj, data, check_mode):
|
||||||
|
"""Update a Netbox object.
|
||||||
|
:returns tuple(serialized_nb_obj, diff): tuple of the serialized updated
|
||||||
|
Netbox object and the Ansible diff.
|
||||||
|
"""
|
||||||
|
serialized_nb_obj = nb_obj.serialize()
|
||||||
|
updated_obj = serialized_nb_obj.copy()
|
||||||
|
updated_obj.update(data)
|
||||||
|
if serialized_nb_obj == updated_obj:
|
||||||
|
return serialized_nb_obj, None
|
||||||
|
else:
|
||||||
|
data_before, data_after = {}, {}
|
||||||
|
for key in data:
|
||||||
|
if serialized_nb_obj[key] != updated_obj[key]:
|
||||||
|
data_before[key] = serialized_nb_obj[key]
|
||||||
|
data_after[key] = updated_obj[key]
|
||||||
|
|
||||||
|
if not check_mode:
|
||||||
|
nb_obj.update(data)
|
||||||
|
udpated_obj = nb_obj.serialize()
|
||||||
|
|
||||||
|
diff = _build_diff(before=data_before, after=data_after)
|
||||||
|
return updated_obj, diff
|
||||||
|
|
||||||
|
|
||||||
|
def _get_query_param_id(nb, match, child):
|
||||||
|
endpoint = CONVERT_TO_ID[match]
|
||||||
|
app = find_app(endpoint)
|
||||||
|
nb_app = getattr(nb, app)
|
||||||
|
nb_endpoint = getattr(nb_app, endpoint)
|
||||||
|
result = nb_endpoint.get(**{QUERY_TYPES.get(match): child[match]})
|
||||||
|
if result:
|
||||||
|
return result.id
|
||||||
|
else:
|
||||||
|
return child
|
||||||
|
|
||||||
|
|
||||||
def find_app(endpoint):
|
def find_app(endpoint):
|
||||||
|
@ -114,6 +259,28 @@ def find_app(endpoint):
|
||||||
return nb_app
|
return nb_app
|
||||||
|
|
||||||
|
|
||||||
|
def build_query_params(nb, parent, module_data, child):
|
||||||
|
query_dict = dict()
|
||||||
|
query_params = ALLOWED_QUERY_PARAMS.get(parent)
|
||||||
|
matches = query_params.intersection(set(child.keys()))
|
||||||
|
for match in matches:
|
||||||
|
if match in QUERY_PARAMS_IDS:
|
||||||
|
value = _get_query_param_id(nb, match, child)
|
||||||
|
query_dict.update({match + "_id": value})
|
||||||
|
else:
|
||||||
|
value = child.get(match)
|
||||||
|
query_dict.update({match: value})
|
||||||
|
|
||||||
|
if parent == "lag":
|
||||||
|
query_dict.update({"form_factor": 200})
|
||||||
|
if isinstance(module_data["device"], int):
|
||||||
|
query_dict.update({"device_id": module_data["device"]})
|
||||||
|
else:
|
||||||
|
query_dict.update({"device": module_data["device"]})
|
||||||
|
|
||||||
|
return query_dict
|
||||||
|
|
||||||
|
|
||||||
def find_ids(nb, data):
|
def find_ids(nb, data):
|
||||||
for k, v in data.items():
|
for k, v in data.items():
|
||||||
if k in CONVERT_TO_ID:
|
if k in CONVERT_TO_ID:
|
||||||
|
@ -123,38 +290,62 @@ def find_ids(nb, data):
|
||||||
nb_app = getattr(nb, app)
|
nb_app = getattr(nb, app)
|
||||||
nb_endpoint = getattr(nb_app, endpoint)
|
nb_endpoint = getattr(nb_app, endpoint)
|
||||||
|
|
||||||
if k == "interface":
|
if isinstance(v, dict):
|
||||||
query_id = nb_endpoint.get(**{"name": v["name"], "device": v["device"]})
|
query_params = build_query_params(nb, k, data, v)
|
||||||
elif k == "nat_inside":
|
query_id = nb_endpoint.get(**query_params)
|
||||||
if v.get("vrf"):
|
|
||||||
vrf_id = nb.ipam.vrfs.get(**{"name": v["vrf"]})
|
elif isinstance(v, list):
|
||||||
query_id = nb_endpoint.get(**{"address": v["address"], "vrf_id": vrf_id.id})
|
id_list = list()
|
||||||
else:
|
for index in v:
|
||||||
try:
|
norm_data = normalize_data(index)
|
||||||
query_id = nb_endpoint.get(**{"address": v["address"]})
|
temp_dict = build_query_params(nb, k, data, norm_data)
|
||||||
except ValueError:
|
query_id = nb_endpoint.get(**temp_dict)
|
||||||
return {"failed": "Multiple results found while searching for %s: %s - Specify a VRF within %s" % (k, v["address"], k)}
|
if query_id:
|
||||||
else:
|
id_list.append(query_id.id)
|
||||||
query_id = nb_endpoint.get(**{QUERY_TYPES.get(k, "q"): search})
|
else:
|
||||||
|
return ValueError("%s not found" % (index))
|
||||||
|
|
||||||
if query_id:
|
|
||||||
data[k] = query_id.id
|
|
||||||
elif k in NO_DEFAULT_ID:
|
|
||||||
pass
|
|
||||||
else:
|
else:
|
||||||
data[k] = 1
|
try:
|
||||||
|
query_id = nb_endpoint.get(**{QUERY_TYPES.get(k, "q"): search})
|
||||||
|
except ValueError:
|
||||||
|
return ValueError(
|
||||||
|
"Multiple results found while searching for key: %s" % (k)
|
||||||
|
)
|
||||||
|
|
||||||
|
if isinstance(v, list):
|
||||||
|
data[k] = id_list
|
||||||
|
elif query_id:
|
||||||
|
data[k] = query_id.id
|
||||||
|
else:
|
||||||
|
raise ValueError("Could not resolve id of %s: %s" % (k, v))
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def normalize_data(data):
|
def normalize_data(data):
|
||||||
for k, v in data.items():
|
for k, v in data.items():
|
||||||
data_type = QUERY_TYPES.get(k, "q")
|
if isinstance(v, dict):
|
||||||
if data_type == "slug":
|
for subk, subv in v.items():
|
||||||
if "-" in v:
|
sub_data_type = QUERY_TYPES.get(subk, "q")
|
||||||
data[k] = v.replace(" ", "").lower()
|
if sub_data_type == "slug":
|
||||||
elif " " in v:
|
if "-" in subv:
|
||||||
data[k] = v.replace(" ", "-").lower()
|
data[k][subk] = subv.replace(" ", "").lower()
|
||||||
else:
|
elif " " in subv:
|
||||||
data[k] = v.lower()
|
data[k][subk] = subv.replace(" ", "-").lower()
|
||||||
|
else:
|
||||||
|
data[k][subk] = subv.lower()
|
||||||
|
else:
|
||||||
|
data_type = QUERY_TYPES.get(k, "q")
|
||||||
|
if data_type == "slug":
|
||||||
|
if "-" in v:
|
||||||
|
data[k] = v.replace(" ", "").lower()
|
||||||
|
elif " " in v:
|
||||||
|
data[k] = v.replace(" ", "-").lower()
|
||||||
|
else:
|
||||||
|
data[k] = v.lower()
|
||||||
|
elif data_type == "timezone":
|
||||||
|
if " " in v:
|
||||||
|
data[k] = v.replace(" ", "_")
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
348
lib/ansible/modules/net_tools/netbox/netbox_site.py
Normal file
348
lib/ansible/modules/net_tools/netbox/netbox_site.py
Normal file
|
@ -0,0 +1,348 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# Copyright: (c) 2018, Mikhail Yohman (@fragmentedpacket) <mikhail.yohman@gmail.com>
|
||||||
|
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
__metaclass__ = type
|
||||||
|
|
||||||
|
ANSIBLE_METADATA = {"metadata_version": "1.1",
|
||||||
|
"status": ["preview"],
|
||||||
|
"supported_by": "community"}
|
||||||
|
|
||||||
|
DOCUMENTATION = r"""
|
||||||
|
---
|
||||||
|
module: netbox_site
|
||||||
|
short_description: Creates or removes sites from Netbox
|
||||||
|
description:
|
||||||
|
- Creates or removes sites from Netbox
|
||||||
|
notes:
|
||||||
|
- Tags should be defined as a YAML list
|
||||||
|
- This should be ran with connection C(local) and hosts C(localhost)
|
||||||
|
author:
|
||||||
|
- Mikhail Yohman (@FragmentedPacket)
|
||||||
|
requirements:
|
||||||
|
- pynetbox
|
||||||
|
version_added: "2.8"
|
||||||
|
options:
|
||||||
|
netbox_url:
|
||||||
|
description:
|
||||||
|
- URL of the Netbox instance resolvable by Ansible control host
|
||||||
|
required: true
|
||||||
|
type: str
|
||||||
|
netbox_token:
|
||||||
|
description:
|
||||||
|
- The token created within Netbox to authorize API access
|
||||||
|
required: true
|
||||||
|
type: str
|
||||||
|
data:
|
||||||
|
description:
|
||||||
|
- Defines the site configuration
|
||||||
|
suboptions:
|
||||||
|
name:
|
||||||
|
description:
|
||||||
|
- Name of the site to be created
|
||||||
|
required: true
|
||||||
|
type: str
|
||||||
|
status:
|
||||||
|
description:
|
||||||
|
- Status of the site
|
||||||
|
choices:
|
||||||
|
- Active
|
||||||
|
- Planned
|
||||||
|
- Retired
|
||||||
|
type: str
|
||||||
|
region:
|
||||||
|
description:
|
||||||
|
- The region that the site should be associated with
|
||||||
|
type: str
|
||||||
|
tenant:
|
||||||
|
description:
|
||||||
|
- The tenant the site will be assigned to
|
||||||
|
type: str
|
||||||
|
facility:
|
||||||
|
description:
|
||||||
|
- Data center provider or facility, ex. Equinix NY7
|
||||||
|
type: str
|
||||||
|
asn:
|
||||||
|
description:
|
||||||
|
- The ASN associated with the site
|
||||||
|
type: int
|
||||||
|
time_zone:
|
||||||
|
description:
|
||||||
|
- Timezone associated with the site, ex. America/Denver
|
||||||
|
type: str
|
||||||
|
description:
|
||||||
|
description:
|
||||||
|
- The description of the prefix
|
||||||
|
type: str
|
||||||
|
physical_address:
|
||||||
|
description:
|
||||||
|
- Physical address of site
|
||||||
|
type: str
|
||||||
|
shipping_address:
|
||||||
|
description:
|
||||||
|
- Shipping address of site
|
||||||
|
type: str
|
||||||
|
latitude:
|
||||||
|
description:
|
||||||
|
- Latitude in decimal format
|
||||||
|
type: int
|
||||||
|
longitude:
|
||||||
|
description:
|
||||||
|
- Longitude in decimal format
|
||||||
|
type: int
|
||||||
|
contact_name:
|
||||||
|
description:
|
||||||
|
- Name of contact for site
|
||||||
|
type: str
|
||||||
|
contact_phone:
|
||||||
|
description:
|
||||||
|
- Contact phone number for site
|
||||||
|
type: str
|
||||||
|
contact_email:
|
||||||
|
description:
|
||||||
|
- Contact email for site
|
||||||
|
type: str
|
||||||
|
comments:
|
||||||
|
description:
|
||||||
|
- Comments for the site. This can be markdown syntax
|
||||||
|
type: str
|
||||||
|
tags:
|
||||||
|
description:
|
||||||
|
- Any tags that the prefix may need to be associated with
|
||||||
|
type: list
|
||||||
|
custom_fields:
|
||||||
|
description:
|
||||||
|
- must exist in Netbox
|
||||||
|
type: dict
|
||||||
|
required: true
|
||||||
|
state:
|
||||||
|
description:
|
||||||
|
- Use C(present) or C(absent) for adding or removing.
|
||||||
|
choices: [ absent, present ]
|
||||||
|
default: present
|
||||||
|
type: str
|
||||||
|
validate_certs:
|
||||||
|
description:
|
||||||
|
- |
|
||||||
|
If C(no), SSL certificates will not be validated.
|
||||||
|
This should only be used on personally controlled sites using self-signed certificates.
|
||||||
|
default: "yes"
|
||||||
|
type: bool
|
||||||
|
"""
|
||||||
|
|
||||||
|
EXAMPLES = r"""
|
||||||
|
- name: "Test Netbox site module"
|
||||||
|
connection: local
|
||||||
|
hosts: localhost
|
||||||
|
gather_facts: False
|
||||||
|
tasks:
|
||||||
|
- name: Create site within Netbox with only required information
|
||||||
|
netbox_site:
|
||||||
|
netbox_url: http://netbox.local
|
||||||
|
netbox_token: thisIsMyToken
|
||||||
|
data:
|
||||||
|
name: Test - Colorado
|
||||||
|
state: present
|
||||||
|
|
||||||
|
- name: Delete site within netbox
|
||||||
|
netbox_site:
|
||||||
|
netbox_url: http://netbox.local
|
||||||
|
netbox_token: thisIsMyToken
|
||||||
|
data:
|
||||||
|
name: Test - Colorado
|
||||||
|
state: absent
|
||||||
|
|
||||||
|
- name: Create site with all parameters
|
||||||
|
netbox_site:
|
||||||
|
netbox_url: http://netbox.local
|
||||||
|
netbox_token: thisIsMyToken
|
||||||
|
data:
|
||||||
|
name: Test - California
|
||||||
|
status: Planned
|
||||||
|
region: Test Region
|
||||||
|
tenant: Test Tenant
|
||||||
|
facility: EquinoxCA7
|
||||||
|
asn: 65001
|
||||||
|
time_zone: America/Los Angeles
|
||||||
|
description: This is a test description
|
||||||
|
physical_address: Hollywood, CA, 90210
|
||||||
|
shipping_address: Hollywood, CA, 90210
|
||||||
|
latitude: 10.100000
|
||||||
|
longitude: 12.200000
|
||||||
|
contact_name: Jenny
|
||||||
|
contact_phone: 867-5309
|
||||||
|
contact_email: jenny@changednumber.com
|
||||||
|
comments: ### Placeholder
|
||||||
|
state: present
|
||||||
|
"""
|
||||||
|
|
||||||
|
RETURN = r"""
|
||||||
|
site:
|
||||||
|
description: Serialized object as created or already existent within Netbox
|
||||||
|
returned: on creation
|
||||||
|
type: dict
|
||||||
|
msg:
|
||||||
|
description: Message indicating failure or info about what has been achieved
|
||||||
|
returned: always
|
||||||
|
type: str
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
|
||||||
|
from ansible.module_utils.net_tools.netbox.netbox_utils import (
|
||||||
|
find_ids,
|
||||||
|
normalize_data,
|
||||||
|
create_netbox_object,
|
||||||
|
delete_netbox_object,
|
||||||
|
update_netbox_object,
|
||||||
|
SITE_STATUS,
|
||||||
|
)
|
||||||
|
from ansible.module_utils.compat import ipaddress
|
||||||
|
from ansible.module_utils._text import to_text
|
||||||
|
|
||||||
|
|
||||||
|
PYNETBOX_IMP_ERR = None
|
||||||
|
try:
|
||||||
|
import pynetbox
|
||||||
|
HAS_PYNETBOX = True
|
||||||
|
except ImportError:
|
||||||
|
PYNETBOX_IMP_ERR = traceback.format_exc()
|
||||||
|
HAS_PYNETBOX = False
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""
|
||||||
|
Main entry point for module execution
|
||||||
|
"""
|
||||||
|
argument_spec = dict(
|
||||||
|
netbox_url=dict(type="str", required=True),
|
||||||
|
netbox_token=dict(type="str", required=True, no_log=True),
|
||||||
|
data=dict(type="dict", required=True),
|
||||||
|
state=dict(required=False, default="present", choices=["present", "absent"]),
|
||||||
|
validate_certs=dict(type="bool", default=True)
|
||||||
|
)
|
||||||
|
|
||||||
|
global module
|
||||||
|
module = AnsibleModule(argument_spec=argument_spec,
|
||||||
|
supports_check_mode=True)
|
||||||
|
|
||||||
|
# Fail module if pynetbox is not installed
|
||||||
|
if not HAS_PYNETBOX:
|
||||||
|
module.fail_json(msg=missing_required_lib('pynetbox'), exception=PYNETBOX_IMP_ERR)
|
||||||
|
# Assign variables to be used with module
|
||||||
|
app = "dcim"
|
||||||
|
endpoint = "sites"
|
||||||
|
url = module.params["netbox_url"]
|
||||||
|
token = module.params["netbox_token"]
|
||||||
|
data = module.params["data"]
|
||||||
|
state = module.params["state"]
|
||||||
|
validate_certs = module.params["validate_certs"]
|
||||||
|
# Attempt to create Netbox API object
|
||||||
|
try:
|
||||||
|
nb = pynetbox.api(url, token=token, ssl_verify=validate_certs)
|
||||||
|
except Exception:
|
||||||
|
module.fail_json(msg="Failed to establish connection to Netbox API")
|
||||||
|
try:
|
||||||
|
nb_app = getattr(nb, app)
|
||||||
|
except AttributeError:
|
||||||
|
module.fail_json(msg="Incorrect application specified: %s" % (app))
|
||||||
|
nb_endpoint = getattr(nb_app, endpoint)
|
||||||
|
norm_data = normalize_data(data)
|
||||||
|
try:
|
||||||
|
norm_data = _check_and_adapt_data(nb, norm_data)
|
||||||
|
|
||||||
|
if "present" in state:
|
||||||
|
return module.exit_json(
|
||||||
|
**ensure_site_present(nb, nb_endpoint, norm_data)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return module.exit_json(
|
||||||
|
**ensure_site_absent(nb, nb_endpoint, norm_data)
|
||||||
|
)
|
||||||
|
except pynetbox.RequestError as e:
|
||||||
|
return module.fail_json(msg=json.loads(e.error))
|
||||||
|
except ValueError as e:
|
||||||
|
return module.fail_json(msg=str(e))
|
||||||
|
except AttributeError as e:
|
||||||
|
return module.fail_json(msg=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
def _check_and_adapt_data(nb, data):
|
||||||
|
data = find_ids(nb, data)
|
||||||
|
|
||||||
|
if data.get("status"):
|
||||||
|
data["status"] = SITE_STATUS.get(data["status"].lower())
|
||||||
|
|
||||||
|
if "-" in data["name"]:
|
||||||
|
site_slug = data["name"].replace(" ", "").lower()
|
||||||
|
elif " " in data["name"]:
|
||||||
|
site_slug = data["name"].replace(" ", "-").lower()
|
||||||
|
else:
|
||||||
|
site_slug = data["name"].lower()
|
||||||
|
|
||||||
|
data["slug"] = site_slug
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_site_present(nb, nb_endpoint, data):
|
||||||
|
"""
|
||||||
|
:returns dict(interface, msg, changed): dictionary resulting of the request,
|
||||||
|
where 'site' is the serialized interface fetched or newly created in Netbox
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not isinstance(data, dict):
|
||||||
|
changed = False
|
||||||
|
return {"msg": data, "changed": changed}
|
||||||
|
|
||||||
|
nb_site = nb_endpoint.get(slug=data["slug"])
|
||||||
|
result = dict()
|
||||||
|
if not nb_site:
|
||||||
|
site, diff = create_netbox_object(nb_endpoint, data, module.check_mode)
|
||||||
|
changed = True
|
||||||
|
msg = "Site %s created" % (data["name"])
|
||||||
|
result["diff"] = diff
|
||||||
|
else:
|
||||||
|
site, diff = update_netbox_object(nb_site, data, module.check_mode)
|
||||||
|
if site is False:
|
||||||
|
module.fail_json(
|
||||||
|
msg="Request failed, couldn't update device: %s" % (data["name"])
|
||||||
|
)
|
||||||
|
if diff:
|
||||||
|
msg = "Site %s updated" % (data["name"])
|
||||||
|
changed = True
|
||||||
|
result["diff"] = diff
|
||||||
|
else:
|
||||||
|
msg = "Site %s already exists" % (data["name"])
|
||||||
|
changed = False
|
||||||
|
|
||||||
|
result.update({"site": site, "msg": msg, "changed": changed})
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_site_absent(nb, nb_endpoint, data):
|
||||||
|
"""
|
||||||
|
:returns dict(msg, changed)
|
||||||
|
"""
|
||||||
|
nb_site = nb_endpoint.get(slug=data["slug"])
|
||||||
|
result = dict()
|
||||||
|
if nb_site:
|
||||||
|
dummy, diff = delete_netbox_object(nb_site, module.check_mode)
|
||||||
|
changed = True
|
||||||
|
msg = "Site %s deleted" % (data["name"])
|
||||||
|
result["diff"] = diff
|
||||||
|
else:
|
||||||
|
msg = "Site %s already absent" % (data["name"])
|
||||||
|
changed = False
|
||||||
|
|
||||||
|
result.update({"msg": msg, "changed": changed})
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
Loading…
Add table
Add a link
Reference in a new issue