nornir-collection 0.0.19__tar.gz → 0.0.20__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/PKG-INFO +1 -1
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/update_prefixes_ip_addresses.py +101 -63
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection.egg-info/PKG-INFO +1 -1
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/setup.py +1 -1
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/LICENSE +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/README.md +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/batfish/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/batfish/assert_config.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/batfish/utils.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/cli/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/cli/config_tasks.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/cli/config_workflow.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/cli/show_tasks.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/netconf/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/netconf/config_tasks.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/netconf/config_workflow.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/netconf/nr_cfg_iosxe_netconf.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/netconf/ops_tasks.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/processor.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/pyats.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/restconf/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/restconf/cisco_rpc.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/restconf/config_workflow.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/restconf/tasks.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/utils.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/software_upgrade/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/software_upgrade/cisco_software_upgrade.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/software_upgrade/utils.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/api_calls.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/cisco_maintenance_report.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/cisco_support.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/reports.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/utils.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/fortinet/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/fortinet/utils.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/git.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/custom_script.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/inventory.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/set_device_status.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/sync_datasource.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/update_cisco_inventory_data.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/update_cisco_support_plugin_data.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/update_fortinet_inventory_data.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/update_purestorage_inventory_data.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/utils.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/verify_device_primary_ip.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/nornir_plugins/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/nornir_plugins/inventory/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/nornir_plugins/inventory/netbox.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/nornir_plugins/inventory/staggered_yaml.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/nornir_plugins/inventory/utils.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/purestorage/__init__.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/purestorage/utils.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/utils.py +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection.egg-info/SOURCES.txt +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection.egg-info/dependency_links.txt +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection.egg-info/requires.txt +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection.egg-info/top_level.txt +0 -0
- {nornir_collection-0.0.19 → nornir_collection-0.0.20}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: nornir-collection
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.20
|
4
4
|
Summary: Nornir-Collection contains network automation functions and complete IaC workflows with Nornir and other python libraries. It contains Nornir tasks and general functions in Nornir style.
|
5
5
|
Author: Willi Kubny
|
6
6
|
Author-email: willi.kubny@gmail.ch
|
@@ -156,27 +156,27 @@ def create_nb_response_result(
|
|
156
156
|
ports = (
|
157
157
|
", ".join([str(p) for p in ip["ports"]]) if "ports" in ip and ip["ports"] else "None"
|
158
158
|
)
|
159
|
-
updated_fields.append(f"- {address} (DNS: {dns_name}, Ports: {ports})")
|
159
|
+
updated_fields.append(f" - {address} (DNS: {dns_name}, Ports: {ports})")
|
160
160
|
else:
|
161
|
-
updated_fields.append(f"- {address} (DNS: {dns_name})")
|
161
|
+
updated_fields.append(f" - {address} (DNS: {dns_name})")
|
162
162
|
# If data is a dictionary containing the prefix or information vlan associated with a prefix
|
163
163
|
elif nb_type in ("ip", "vlan") and isinstance(data, dict):
|
164
164
|
# If the response json contains the key 'vid' the response is from a vlan and has no VRF
|
165
165
|
if "vid" in resp.json():
|
166
|
-
updated_fields.append(f"- Status: {data['status']['label']}")
|
166
|
+
updated_fields.append(f" - Status: {data['status']['label']}")
|
167
167
|
# It's the response from a ip-address and ha a VRF
|
168
168
|
else:
|
169
|
-
updated_fields.append(f"- VRF: {data['vrf'] if data['vrf'] else 'Global'}")
|
169
|
+
updated_fields.append(f" - VRF: {data['vrf'] if data['vrf'] else 'Global'}")
|
170
170
|
updated_fields.extend(
|
171
171
|
[
|
172
|
-
f"- Tenant: {data['tenant']['name'] if data['tenant'] else 'None'}",
|
173
|
-
f"- Tags: {(', '.join([i['name'] for i in data['tags']])) or 'None'}",
|
174
|
-
f"- Location: {', '.join(list(data['custom_fields']['ipam_location'])).upper() or 'None'}", # noqa: E501
|
172
|
+
f" - Tenant: {data['tenant']['name'] if data['tenant'] else 'None'}",
|
173
|
+
f" - Tags: {(', '.join([i['name'] for i in data['tags']])) or 'None'}",
|
174
|
+
f" - Location: {', '.join(list(data['custom_fields']['ipam_location'])).upper() or 'None'}", # noqa: E501
|
175
175
|
]
|
176
176
|
)
|
177
177
|
# If the type is 'vlan' and data is empty
|
178
178
|
elif nb_type == "vlan" and not data:
|
179
|
-
updated_fields.append("- Tags: L2 Only")
|
179
|
+
updated_fields.append(" - Tags: L2 Only")
|
180
180
|
else:
|
181
181
|
result = [
|
182
182
|
f"{task_result(text=task_text, changed=True, level_name='ERROR')}\n"
|
@@ -266,23 +266,33 @@ def create_nb_ip_payload(
|
|
266
266
|
return payload
|
267
267
|
|
268
268
|
|
269
|
-
def
|
269
|
+
def nmap_scan_host_ip_or_subnet(hosts: str) -> list:
|
270
270
|
"""
|
271
271
|
TBD
|
272
272
|
"""
|
273
|
+
# Create an ip-interface object from the hosts argument.
|
274
|
+
# Can be a host ip-address with CIDR netmask or a network prefix with CIDR netmask
|
275
|
+
ip_iface = ipaddress.ip_interface(hosts)
|
273
276
|
# Get the prefix length from the prefix
|
274
|
-
prefixlen =
|
275
|
-
# Get the network and broadcast address of the prefix to exclude them from the nmap scan
|
276
|
-
network_addr = ipaddress.ip_network(prefix["prefix"]).network_address
|
277
|
-
broadcast_addr = ipaddress.ip_network(prefix["prefix"]).broadcast_address
|
277
|
+
prefixlen = ip_iface.network.prefixlen
|
278
278
|
# Set the nmap scan arguments
|
279
|
-
arguments =
|
279
|
+
arguments = "-PE -PP -PA21 -PS80,443,3389 -PU161,40125 --source-port 53"
|
280
|
+
|
281
|
+
# If the ip-address is the network address of the prefix, then it's a whole prefix to scan
|
282
|
+
if ip_iface.ip == ip_iface.network.network_address:
|
283
|
+
# Add the the nmap arguments that the network and broadcast address should be excluded from the scan
|
284
|
+
arguments += f" --exclude {ip_iface.network.network_address},{ip_iface.network.broadcast_address}"
|
285
|
+
# Scan the whole prefix
|
286
|
+
scan_target = str(ip_iface.network)
|
287
|
+
else:
|
288
|
+
# Scan only the host ip-address
|
289
|
+
scan_target = str(ip_iface.ip)
|
280
290
|
|
281
|
-
# Nmap ARP scan
|
291
|
+
# Nmap ARP scan and add a list of active ip-addresses and other details to the list
|
282
292
|
nm = nmap.PortScanner()
|
283
|
-
nm.scan(hosts=
|
293
|
+
nm.scan(hosts=scan_target, arguments=arguments, sudo=True)
|
284
294
|
if nm.all_hosts():
|
285
|
-
|
295
|
+
nmap_scan_result = [
|
286
296
|
{
|
287
297
|
"address": f"{nm[host]['addresses']['ipv4']}/{prefixlen}",
|
288
298
|
"dns_name": nm[host]["hostnames"][0]["name"],
|
@@ -291,23 +301,19 @@ def nmap_scan_prefix(prefix: dict, result: list) -> tuple:
|
|
291
301
|
for host in nm.all_hosts()
|
292
302
|
]
|
293
303
|
else:
|
294
|
-
|
304
|
+
nmap_scan_result = []
|
295
305
|
|
296
|
-
|
297
|
-
text = f"Nmap Scan Prefix {prefix['prefix']} for active IP-Addresses"
|
298
|
-
result.append(
|
299
|
-
f"{task_result(text=text, changed=False, level_name='INFO')}\n"
|
300
|
-
+ f"'{text}' -> NetBoxResponse <Success: True>\n"
|
301
|
-
+ f"-> Nmap prefix scan ip-address count: {len(prefix['datasource_ips'])}"
|
302
|
-
)
|
306
|
+
return nmap_scan_result
|
303
307
|
|
304
|
-
return result, prefix
|
305
308
|
|
306
|
-
|
307
|
-
def get_ipfabric_data_for_prefix(ipf: IPFClient, prefix: dict, result: list) -> tuple:
|
309
|
+
def get_ipfabric_data_for_prefix(prefix: dict) -> tuple:
|
308
310
|
"""
|
309
311
|
TBD
|
310
312
|
"""
|
313
|
+
# Connect to IP-Fabric
|
314
|
+
ipf = IPFClient(
|
315
|
+
base_url=os.environ["IPF_URL"], auth=os.environ["IPF_TOKEN"], snapshot_id="$last", verify=False
|
316
|
+
)
|
311
317
|
# Get the prefix length from the prefix
|
312
318
|
prefixlen = ipaddress.ip_network(prefix["prefix"]).prefixlen
|
313
319
|
# Get all ip-addresses of the prefix from the IP-Fabric technology arp table
|
@@ -335,7 +341,7 @@ def get_ipfabric_data_for_prefix(ipf: IPFClient, prefix: dict, result: list) ->
|
|
335
341
|
# Combine all ip-addresses lists and remove duplicates
|
336
342
|
all_ips = list(set(arp_ip_list + managed_ipv4_ip_list + interface_ip_list + host_ip_list))
|
337
343
|
# Add a list of all ip-addresses and other details to prefix
|
338
|
-
|
344
|
+
ipf_ip_address_result = [
|
339
345
|
{
|
340
346
|
"address": f"{ip}/{prefixlen}",
|
341
347
|
"dns_name": host_ip_dict[ip] if ip in host_ip_dict.keys() else "",
|
@@ -344,15 +350,7 @@ def get_ipfabric_data_for_prefix(ipf: IPFClient, prefix: dict, result: list) ->
|
|
344
350
|
for ip in all_ips
|
345
351
|
]
|
346
352
|
|
347
|
-
|
348
|
-
text = f"IP-Fabric Get Data for Prefix {prefix['prefix']} IP-Addresses"
|
349
|
-
result.append(
|
350
|
-
f"{task_result(text=text, changed=False, level_name='INFO')}\n"
|
351
|
-
+ f"'{text}' -> IPFResponse <Success: True>\n"
|
352
|
-
+ f"-> IP-Fabric prefix ip-address count: {len(prefix['datasource_ips'])}"
|
353
|
-
)
|
354
|
-
|
355
|
-
return result, prefix
|
353
|
+
return ipf_ip_address_result
|
356
354
|
|
357
355
|
|
358
356
|
def get_nb_ips_and_external_datasource(nb_url: str, prefix: dict, ds: Literal["nmap", "ip-fabric"]) -> tuple:
|
@@ -375,14 +373,26 @@ def get_nb_ips_and_external_datasource(nb_url: str, prefix: dict, ds: Literal["n
|
|
375
373
|
# Get the ip-addresses of the prefix from the datasource
|
376
374
|
if ds == "nmap":
|
377
375
|
# Scan the prefix with nmap
|
378
|
-
|
379
|
-
|
380
|
-
#
|
381
|
-
|
382
|
-
|
376
|
+
prefix["datasource_ips"] = nmap_scan_host_ip_or_subnet(hosts=prefix["prefix"])
|
377
|
+
|
378
|
+
# Print the task result
|
379
|
+
text = f"Nmap Scan Prefix {prefix['prefix']} for active IP-Addresses"
|
380
|
+
result.append(
|
381
|
+
f"{task_result(text=text, changed=False, level_name='INFO')}\n"
|
382
|
+
+ f"'{text}' -> NetBoxResponse <Success: True>\n"
|
383
|
+
+ f"-> Nmap prefix scan ip-address count: {len(prefix['datasource_ips'])}"
|
383
384
|
)
|
385
|
+
elif ds == "ip-fabric":
|
384
386
|
# Get the ip-addresses from the IP-Fabric
|
385
|
-
|
387
|
+
prefix["datasource_ips"] = get_ipfabric_data_for_prefix(prefix=prefix)
|
388
|
+
|
389
|
+
# Print the task result
|
390
|
+
text = f"IP-Fabric Get Data for Prefix {prefix['prefix']} IP-Addresses"
|
391
|
+
result.append(
|
392
|
+
f"{task_result(text=text, changed=False, level_name='INFO')}\n"
|
393
|
+
+ f"'{text}' -> IPFResponse <Success: True>\n"
|
394
|
+
+ f"-> IP-Fabric prefix ip-address count: {len(prefix['datasource_ips'])}"
|
395
|
+
)
|
386
396
|
else:
|
387
397
|
# Invalid datasource
|
388
398
|
failed = True
|
@@ -513,7 +523,7 @@ def update_discovered_ip_addresses(nb_url: str, prefix: dict, ds: Literal["nmap"
|
|
513
523
|
result.extend(sub_result)
|
514
524
|
failed = True if sub_failed else failed
|
515
525
|
|
516
|
-
# Update the ip-addresses with the status 'auto_discovered' that are part of the
|
526
|
+
# Update the ip-addresses with the status 'auto_discovered' that are part of the datasource list
|
517
527
|
update_ips = create_ip_list(
|
518
528
|
loop_list=prefix["discovered_ips"], check_list=prefix["datasource_ips"], is_in_both=True
|
519
529
|
)
|
@@ -557,10 +567,19 @@ def delete_inactive_auto_discovered_ip_addresses(
|
|
557
567
|
failed = False
|
558
568
|
task_text = "Delete Auto-Discovered IP-Addresses"
|
559
569
|
|
560
|
-
# Delete the ip-addresses with the status 'auto_discovered' that are not in the
|
561
|
-
|
570
|
+
# Delete the ip-addresses with the status 'auto_discovered' that are not in the datasource list
|
571
|
+
maybe_delete_ips = create_ip_list(
|
562
572
|
loop_list=prefix["discovered_ips"], check_list=prefix["datasource_ips"], is_in_both=False
|
563
573
|
)
|
574
|
+
delete_ips = []
|
575
|
+
if maybe_delete_ips:
|
576
|
+
# Nmap scan ip-addresses of the maybe_delete_ips list
|
577
|
+
for ip in maybe_delete_ips:
|
578
|
+
# Scan the prefix with nmap
|
579
|
+
scan_result = nmap_scan_host_ip_or_subnet(hosts=ip["address"])
|
580
|
+
# Add the nmal scan result to the inactive_ips list
|
581
|
+
if scan_result:
|
582
|
+
delete_ips.extend(scan_result)
|
564
583
|
|
565
584
|
# If ip-addresses have been found
|
566
585
|
if delete_ips:
|
@@ -594,7 +613,7 @@ def update_reserved_ip_addresses(nb_url: str, prefix: dict, ds: Literal["nmap",
|
|
594
613
|
failed = False
|
595
614
|
task_text = "Update Reserved IP-Addresses Status"
|
596
615
|
|
597
|
-
# Update the ip-addresses with the status 'reserved' that are part of the
|
616
|
+
# Update the ip-addresses with the status 'reserved' that are part of the datacenter list
|
598
617
|
update_ips = create_ip_list(
|
599
618
|
loop_list=prefix["reserved_ips"], check_list=prefix["datasource_ips"], is_in_both=True
|
600
619
|
)
|
@@ -632,17 +651,27 @@ def update_inactive_ip_addresses(nb_url: str, prefix: dict, ds: Literal["nmap",
|
|
632
651
|
failed = False
|
633
652
|
task_text = "Update Inactive IP-Addresses Status"
|
634
653
|
|
635
|
-
# Update the ip-addresses with the status 'inactive' that are part of the
|
636
|
-
|
654
|
+
# Update the ip-addresses with the status 'inactive' that are part of the datasource list
|
655
|
+
active_ips = create_ip_list(
|
637
656
|
loop_list=prefix["inactive_ips"], check_list=prefix["datasource_ips"], is_in_both=True
|
638
657
|
)
|
658
|
+
# Double-check ip-addresses with the status 'inactive' that are not part of the datasource list
|
659
|
+
maybe_active_ips = create_ip_list(
|
660
|
+
loop_list=prefix["inactive_ips"], check_list=prefix["datasource_ips"], is_in_both=False
|
661
|
+
)
|
662
|
+
if maybe_active_ips:
|
663
|
+
# Nmap scan ip-addresses of the maybe_inactive_ips list
|
664
|
+
for ip in maybe_active_ips:
|
665
|
+
# Scan the prefix with nmap
|
666
|
+
scan_result = nmap_scan_host_ip_or_subnet(hosts=ip["address"])
|
667
|
+
# Add the nmal scan result to the inactive_ips list
|
668
|
+
if scan_result:
|
669
|
+
active_ips.extend(scan_result)
|
639
670
|
|
640
671
|
# If ip-addresses have been found
|
641
|
-
if
|
672
|
+
if active_ips:
|
642
673
|
# Create the payload to update the ip-addresses
|
643
|
-
payload = create_nb_ip_payload(
|
644
|
-
parent_prefix=prefix, data=inactive_ips, ds=ds, desired_status="active"
|
645
|
-
)
|
674
|
+
payload = create_nb_ip_payload(parent_prefix=prefix, data=active_ips, ds=ds, desired_status="active")
|
646
675
|
# PATCH request to update the ip-addresses
|
647
676
|
resp = patch_nb_resources(url=f"{nb_url}/api/ipam/ip-addresses/", payload=payload)
|
648
677
|
|
@@ -650,7 +679,7 @@ def update_inactive_ip_addresses(nb_url: str, prefix: dict, ds: Literal["nmap",
|
|
650
679
|
text = "The following 'Inactive' ip-addresses had been set to status 'Active':"
|
651
680
|
# The function returns the result list and True if the response is successful else False
|
652
681
|
result, failed = create_nb_response_result(
|
653
|
-
resp=resp, nb_type="ip", data=
|
682
|
+
resp=resp, nb_type="ip", data=active_ips, ds=ds, task_text=task_text, text=text
|
654
683
|
)
|
655
684
|
|
656
685
|
return result, failed
|
@@ -674,7 +703,7 @@ def update_active_ip_addresses(
|
|
674
703
|
failed = False
|
675
704
|
task_text = "Update Active IP-Addresses Status"
|
676
705
|
|
677
|
-
# Update the ip-addresses with the status 'active' that are part of the
|
706
|
+
# Update the ip-addresses with the status 'active' that are part of the datacenter list
|
678
707
|
active_ips = create_ip_list(
|
679
708
|
loop_list=prefix["active_ips"], check_list=prefix["datasource_ips"], is_in_both=True
|
680
709
|
)
|
@@ -695,10 +724,19 @@ def update_active_ip_addresses(
|
|
695
724
|
result.extend(sub_result)
|
696
725
|
failed = True if sub_failed else failed
|
697
726
|
|
698
|
-
# Update the ip-addresses with the status 'active' that are not part of the
|
699
|
-
|
727
|
+
# Update the ip-addresses with the status 'active' that are not part of the datacenter list
|
728
|
+
maybe_inactive_ips = create_ip_list(
|
700
729
|
loop_list=prefix["active_ips"], check_list=prefix["datasource_ips"], is_in_both=False
|
701
730
|
)
|
731
|
+
inactive_ips = []
|
732
|
+
if maybe_inactive_ips:
|
733
|
+
# Nmap scan ip-addresses of the maybe_inactive_ips list
|
734
|
+
for ip in maybe_inactive_ips:
|
735
|
+
# Scan the prefix with nmap
|
736
|
+
scan_result = nmap_scan_host_ip_or_subnet(hosts=ip["address"])
|
737
|
+
# Add the nmal scan result to the inactive_ips list
|
738
|
+
if scan_result:
|
739
|
+
inactive_ips.extend(scan_result)
|
702
740
|
# Create a new list to exclude the overwrite_active ip-addresses
|
703
741
|
inactive_ips = [ip for ip in inactive_ips if ip["address"] not in overwrite_active]
|
704
742
|
|
@@ -785,7 +823,7 @@ def update_netbox_prefix_ip_addresses(prefix: list, *args) -> tuple:
|
|
785
823
|
result, sub_failed = update_discovered_ip_addresses(nb_url=nb_url, prefix=prefix, ds=ds)
|
786
824
|
results, changed, failed = set_results_changed_failed(results, result, changed, sub_failed, failed)
|
787
825
|
|
788
|
-
# Delete inactive 'auto-discovered' ip-addresses
|
826
|
+
# Delete inactive 'auto-discovered' ip-addresses (double check with partial nmap scan)
|
789
827
|
result, sub_failed = delete_inactive_auto_discovered_ip_addresses(nb_url=nb_url, prefix=prefix, ds=ds)
|
790
828
|
results, changed, failed = set_results_changed_failed(results, result, changed, sub_failed, failed)
|
791
829
|
|
@@ -793,11 +831,11 @@ def update_netbox_prefix_ip_addresses(prefix: list, *args) -> tuple:
|
|
793
831
|
result, sub_failed = update_reserved_ip_addresses(nb_url=nb_url, prefix=prefix, ds=ds)
|
794
832
|
results, changed, failed = set_results_changed_failed(results, result, changed, sub_failed, failed)
|
795
833
|
|
796
|
-
# Update 'inactive' ip-addresses -> set status to 'active'
|
834
|
+
# Update 'inactive' ip-addresses -> set status to 'active' (double check with partial nmap scan)
|
797
835
|
result, sub_failed = update_inactive_ip_addresses(nb_url=nb_url, prefix=prefix, ds=ds)
|
798
836
|
results, changed, failed = set_results_changed_failed(results, result, changed, sub_failed, failed)
|
799
837
|
|
800
|
-
# Update 'active' ip-addresses -> set status to 'active' or 'inactive'
|
838
|
+
# Update 'active' ip-addresses -> set status to 'active' or 'inactive' (check with partial nmap scan)
|
801
839
|
result, sub_failed = update_active_ip_addresses(
|
802
840
|
nb_url=nb_url, prefix=prefix, overwrite_active=overwrite_active, ds=ds
|
803
841
|
)
|
@@ -1038,7 +1076,7 @@ def main(nr_config: str, nmap_scan: bool = False, overwrite_active: list[str] =
|
|
1038
1076
|
|
1039
1077
|
# Load Active and Inventory NetBox Prefixes from all Tenants (except marked utilized)
|
1040
1078
|
# These prefixes will be updated with input from IP-Fabric
|
1041
|
-
|
1079
|
+
nb_active_inventory_all_prefixes = load_netbox_data(
|
1042
1080
|
task_text="Load Active OOB/T1/T2/T3/T4 NetBox Prefixes",
|
1043
1081
|
nb_api_url=f"{nb_url}/api/ipam/prefixes/",
|
1044
1082
|
query={
|
@@ -1093,7 +1131,7 @@ def main(nr_config: str, nmap_scan: bool = False, overwrite_active: list[str] =
|
|
1093
1131
|
thread_result = run_thread_pool(
|
1094
1132
|
title=title,
|
1095
1133
|
task=update_netbox_prefix_ip_addresses,
|
1096
|
-
thread_list=
|
1134
|
+
thread_list=nb_active_inventory_all_prefixes,
|
1097
1135
|
max_workers=5,
|
1098
1136
|
args=("ip-fabric", overwrite_active),
|
1099
1137
|
)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.2
|
2
2
|
Name: nornir-collection
|
3
|
-
Version: 0.0.
|
3
|
+
Version: 0.0.20
|
4
4
|
Summary: Nornir-Collection contains network automation functions and complete IaC workflows with Nornir and other python libraries. It contains Nornir tasks and general functions in Nornir style.
|
5
5
|
Author: Willi Kubny
|
6
6
|
Author-email: willi.kubny@gmail.ch
|
@@ -11,7 +11,7 @@ with open("requirements.txt", encoding="utf-8") as f:
|
|
11
11
|
|
12
12
|
setuptools.setup(
|
13
13
|
name="nornir-collection",
|
14
|
-
version="0.0.
|
14
|
+
version="0.0.20",
|
15
15
|
author="Willi Kubny",
|
16
16
|
author_email="willi.kubny@gmail.ch",
|
17
17
|
description="Nornir-Collection contains network automation functions and complete IaC workflows with \
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/batfish/assert_config.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/reports.py
RENAMED
File without changes
|
{nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/utils.py
RENAMED
File without changes
|
{nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/fortinet/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/custom_script.py
RENAMED
File without changes
|
File without changes
|
{nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/set_device_status.py
RENAMED
File without changes
|
{nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/netbox/sync_datasource.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/nornir_plugins/__init__.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/purestorage/__init__.py
RENAMED
File without changes
|
{nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection/purestorage/utils.py
RENAMED
File without changes
|
File without changes
|
{nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection.egg-info/SOURCES.txt
RENAMED
File without changes
|
File without changes
|
{nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection.egg-info/requires.txt
RENAMED
File without changes
|
{nornir_collection-0.0.19 → nornir_collection-0.0.20}/nornir_collection.egg-info/top_level.txt
RENAMED
File without changes
|
File without changes
|