nornir-collection 0.0.18__tar.gz → 0.0.20__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/PKG-INFO +1 -1
  2. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/netbox/update_prefixes_ip_addresses.py +102 -63
  3. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection.egg-info/PKG-INFO +1 -1
  4. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/setup.py +1 -1
  5. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/LICENSE +0 -0
  6. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/README.md +0 -0
  7. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/__init__.py +0 -0
  8. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/batfish/__init__.py +0 -0
  9. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/batfish/assert_config.py +0 -0
  10. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/batfish/utils.py +0 -0
  11. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/__init__.py +0 -0
  12. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/__init__.py +0 -0
  13. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/cli/__init__.py +0 -0
  14. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/cli/config_tasks.py +0 -0
  15. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/cli/config_workflow.py +0 -0
  16. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/cli/show_tasks.py +0 -0
  17. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/netconf/__init__.py +0 -0
  18. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/netconf/config_tasks.py +0 -0
  19. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/netconf/config_workflow.py +0 -0
  20. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/netconf/nr_cfg_iosxe_netconf.py +0 -0
  21. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/netconf/ops_tasks.py +0 -0
  22. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/processor.py +0 -0
  23. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/pyats.py +0 -0
  24. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/restconf/__init__.py +0 -0
  25. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/restconf/cisco_rpc.py +0 -0
  26. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/restconf/config_workflow.py +0 -0
  27. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/restconf/tasks.py +0 -0
  28. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/configuration_management/utils.py +0 -0
  29. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/software_upgrade/__init__.py +0 -0
  30. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/software_upgrade/cisco_software_upgrade.py +0 -0
  31. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/software_upgrade/utils.py +0 -0
  32. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/__init__.py +0 -0
  33. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/api_calls.py +0 -0
  34. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/cisco_maintenance_report.py +0 -0
  35. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/cisco_support.py +0 -0
  36. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/reports.py +0 -0
  37. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/cisco/support_api/utils.py +0 -0
  38. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/fortinet/__init__.py +0 -0
  39. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/fortinet/utils.py +0 -0
  40. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/git.py +0 -0
  41. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/netbox/__init__.py +0 -0
  42. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/netbox/custom_script.py +0 -0
  43. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/netbox/inventory.py +0 -0
  44. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/netbox/set_device_status.py +0 -0
  45. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/netbox/sync_datasource.py +0 -0
  46. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/netbox/update_cisco_inventory_data.py +0 -0
  47. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/netbox/update_cisco_support_plugin_data.py +0 -0
  48. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/netbox/update_fortinet_inventory_data.py +0 -0
  49. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/netbox/update_purestorage_inventory_data.py +0 -0
  50. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/netbox/utils.py +0 -0
  51. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/netbox/verify_device_primary_ip.py +0 -0
  52. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/nornir_plugins/__init__.py +0 -0
  53. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/nornir_plugins/inventory/__init__.py +0 -0
  54. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/nornir_plugins/inventory/netbox.py +0 -0
  55. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/nornir_plugins/inventory/staggered_yaml.py +0 -0
  56. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/nornir_plugins/inventory/utils.py +0 -0
  57. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/purestorage/__init__.py +0 -0
  58. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/purestorage/utils.py +0 -0
  59. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection/utils.py +0 -0
  60. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection.egg-info/SOURCES.txt +0 -0
  61. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection.egg-info/dependency_links.txt +0 -0
  62. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection.egg-info/requires.txt +0 -0
  63. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/nornir_collection.egg-info/top_level.txt +0 -0
  64. {nornir_collection-0.0.18 → nornir_collection-0.0.20}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: nornir-collection
3
- Version: 0.0.18
3
+ Version: 0.0.20
4
4
  Summary: Nornir-Collection contains network automation functions and complete IaC workflows with Nornir and other python libraries. It contains Nornir tasks and general functions in Nornir style.
5
5
  Author: Willi Kubny
6
6
  Author-email: willi.kubny@gmail.ch
@@ -156,27 +156,27 @@ def create_nb_response_result(
156
156
  ports = (
157
157
  ", ".join([str(p) for p in ip["ports"]]) if "ports" in ip and ip["ports"] else "None"
158
158
  )
159
- updated_fields.append(f"- {address} (DNS: {dns_name}, Ports: {ports})")
159
+ updated_fields.append(f" - {address} (DNS: {dns_name}, Ports: {ports})")
160
160
  else:
161
- updated_fields.append(f"- {address} (DNS: {dns_name})")
161
+ updated_fields.append(f" - {address} (DNS: {dns_name})")
162
162
  # If data is a dictionary containing the prefix or information vlan associated with a prefix
163
163
  elif nb_type in ("ip", "vlan") and isinstance(data, dict):
164
164
  # If the response json contains the key 'vid' the response is from a vlan and has no VRF
165
165
  if "vid" in resp.json():
166
- updated_fields.append(f"- Status: {data['status']['label']}")
166
+ updated_fields.append(f" - Status: {data['status']['label']}")
167
167
  # It's the response from a ip-address and ha a VRF
168
168
  else:
169
- updated_fields.append(f"- VRF: {data['vrf'] if data['vrf'] else 'Global'}")
169
+ updated_fields.append(f" - VRF: {data['vrf'] if data['vrf'] else 'Global'}")
170
170
  updated_fields.extend(
171
171
  [
172
- f"- Tenant: {data['tenant']['name'] if data['tenant'] else 'None'}",
173
- f"- Tags: {(', '.join([i['name'] for i in data['tags']])) or 'None'}",
174
- f"- Location: {', '.join(list(data['custom_fields']['ipam_location'])).upper() or 'None'}", # noqa: E501
172
+ f" - Tenant: {data['tenant']['name'] if data['tenant'] else 'None'}",
173
+ f" - Tags: {(', '.join([i['name'] for i in data['tags']])) or 'None'}",
174
+ f" - Location: {', '.join(list(data['custom_fields']['ipam_location'])).upper() or 'None'}", # noqa: E501
175
175
  ]
176
176
  )
177
177
  # If the type is 'vlan' and data is empty
178
178
  elif nb_type == "vlan" and not data:
179
- updated_fields.append("- Tags: L2 Only")
179
+ updated_fields.append(" - Tags: L2 Only")
180
180
  else:
181
181
  result = [
182
182
  f"{task_result(text=task_text, changed=True, level_name='ERROR')}\n"
@@ -266,23 +266,33 @@ def create_nb_ip_payload(
266
266
  return payload
267
267
 
268
268
 
269
- def nmap_scan_prefix(prefix: dict, result: list) -> tuple:
269
+ def nmap_scan_host_ip_or_subnet(hosts: str) -> list:
270
270
  """
271
271
  TBD
272
272
  """
273
+ # Create an ip-interface object from the hosts argument.
274
+ # Can be a host ip-address with CIDR netmask or a network prefix with CIDR netmask
275
+ ip_iface = ipaddress.ip_interface(hosts)
273
276
  # Get the prefix length from the prefix
274
- prefixlen = ipaddress.ip_network(prefix["prefix"]).prefixlen
275
- # Get the network and broadcast address of the prefix to exclude them from the nmap scan
276
- network_addr = ipaddress.ip_network(prefix["prefix"]).network_address
277
- broadcast_addr = ipaddress.ip_network(prefix["prefix"]).broadcast_address
277
+ prefixlen = ip_iface.network.prefixlen
278
278
  # Set the nmap scan arguments
279
- arguments = f"-PE -PP -PA21 -PS80,443,3389 -PU161,40125 --source-port 53 --exclude {network_addr},{broadcast_addr}" # noqa E501
279
+ arguments = "-PE -PP -PA21 -PS80,443,3389 -PU161,40125 --source-port 53"
280
+
281
+ # If the ip-address is the network address of the prefix, then it's a whole prefix to scan
282
+ if ip_iface.ip == ip_iface.network.network_address:
283
+ # Add the the nmap arguments that the network and broadcast address should be excluded from the scan
284
+ arguments += f" --exclude {ip_iface.network.network_address},{ip_iface.network.broadcast_address}"
285
+ # Scan the whole prefix
286
+ scan_target = str(ip_iface.network)
287
+ else:
288
+ # Scan only the host ip-address
289
+ scan_target = str(ip_iface.ip)
280
290
 
281
- # Nmap ARP scan for the prefix and add a list of active ip-addresses and other details to prefix
291
+ # Nmap ARP scan and add a list of active ip-addresses and other details to the list
282
292
  nm = nmap.PortScanner()
283
- nm.scan(hosts=prefix["prefix"], arguments=arguments, sudo=True)
293
+ nm.scan(hosts=scan_target, arguments=arguments, sudo=True)
284
294
  if nm.all_hosts():
285
- prefix["datasource_ips"] = [
295
+ nmap_scan_result = [
286
296
  {
287
297
  "address": f"{nm[host]['addresses']['ipv4']}/{prefixlen}",
288
298
  "dns_name": nm[host]["hostnames"][0]["name"],
@@ -291,23 +301,19 @@ def nmap_scan_prefix(prefix: dict, result: list) -> tuple:
291
301
  for host in nm.all_hosts()
292
302
  ]
293
303
  else:
294
- prefix["datasource_ips"] = []
304
+ nmap_scan_result = []
295
305
 
296
- # Print the task result
297
- text = f"Nmap Scan Prefix {prefix['prefix']} for active IP-Addresses"
298
- result.append(
299
- f"{task_result(text=text, changed=False, level_name='INFO')}\n"
300
- + f"'{text}' -> NetBoxResponse <Success: True>\n"
301
- + f"-> Nmap prefix scan ip-address count: {len(prefix['datasource_ips'])}"
302
- )
306
+ return nmap_scan_result
303
307
 
304
- return result, prefix
305
308
 
306
-
307
- def get_ipfabric_data_for_prefix(ipf: IPFClient, prefix: dict, result: list) -> tuple:
309
+ def get_ipfabric_data_for_prefix(prefix: dict) -> tuple:
308
310
  """
309
311
  TBD
310
312
  """
313
+ # Connect to IP-Fabric
314
+ ipf = IPFClient(
315
+ base_url=os.environ["IPF_URL"], auth=os.environ["IPF_TOKEN"], snapshot_id="$last", verify=False
316
+ )
311
317
  # Get the prefix length from the prefix
312
318
  prefixlen = ipaddress.ip_network(prefix["prefix"]).prefixlen
313
319
  # Get all ip-addresses of the prefix from the IP-Fabric technology arp table
@@ -315,6 +321,7 @@ def get_ipfabric_data_for_prefix(ipf: IPFClient, prefix: dict, result: list) ->
315
321
  filters = {"ip": ["cidr", prefix["prefix"]], "proxy": ["eq", False]}
316
322
  arp_ip_list = list(set([x["ip"] for x in ipf.technology.addressing.arp_table.all(filters=filters)]))
317
323
  # Get all ip-addresses of the prefix from the IP-Fabric inventory managed ipv4 table
324
+ filters = {"ip": ["cidr", prefix["prefix"]]}
318
325
  managed_ipv4_ip_list = list(
319
326
  set([x["ip"] for x in ipf.technology.addressing.managed_ip_ipv4.all(filters=filters, columns=["ip"])])
320
327
  )
@@ -334,7 +341,7 @@ def get_ipfabric_data_for_prefix(ipf: IPFClient, prefix: dict, result: list) ->
334
341
  # Combine all ip-addresses lists and remove duplicates
335
342
  all_ips = list(set(arp_ip_list + managed_ipv4_ip_list + interface_ip_list + host_ip_list))
336
343
  # Add a list of all ip-addresses and other details to prefix
337
- prefix["datasource_ips"] = [
344
+ ipf_ip_address_result = [
338
345
  {
339
346
  "address": f"{ip}/{prefixlen}",
340
347
  "dns_name": host_ip_dict[ip] if ip in host_ip_dict.keys() else "",
@@ -343,15 +350,7 @@ def get_ipfabric_data_for_prefix(ipf: IPFClient, prefix: dict, result: list) ->
343
350
  for ip in all_ips
344
351
  ]
345
352
 
346
- # Print the task result
347
- text = f"IP-Fabric Get Data for Prefix {prefix['prefix']} IP-Addresses"
348
- result.append(
349
- f"{task_result(text=text, changed=False, level_name='INFO')}\n"
350
- + f"'{text}' -> IPFResponse <Success: True>\n"
351
- + f"-> IP-Fabric prefix ip-address count: {len(prefix['datasource_ips'])}"
352
- )
353
-
354
- return result, prefix
353
+ return ipf_ip_address_result
355
354
 
356
355
 
357
356
  def get_nb_ips_and_external_datasource(nb_url: str, prefix: dict, ds: Literal["nmap", "ip-fabric"]) -> tuple:
@@ -374,14 +373,26 @@ def get_nb_ips_and_external_datasource(nb_url: str, prefix: dict, ds: Literal["n
374
373
  # Get the ip-addresses of the prefix from the datasource
375
374
  if ds == "nmap":
376
375
  # Scan the prefix with nmap
377
- result, prefix = nmap_scan_prefix(prefix=prefix, result=result)
378
- elif ds == "ip-fabric":
379
- # Connect to IP-Fabric
380
- ipf = IPFClient(
381
- base_url=os.environ["IPF_URL"], auth=os.environ["IPF_TOKEN"], snapshot_id="$last", verify=False
376
+ prefix["datasource_ips"] = nmap_scan_host_ip_or_subnet(hosts=prefix["prefix"])
377
+
378
+ # Print the task result
379
+ text = f"Nmap Scan Prefix {prefix['prefix']} for active IP-Addresses"
380
+ result.append(
381
+ f"{task_result(text=text, changed=False, level_name='INFO')}\n"
382
+ + f"'{text}' -> NetBoxResponse <Success: True>\n"
383
+ + f"-> Nmap prefix scan ip-address count: {len(prefix['datasource_ips'])}"
382
384
  )
385
+ elif ds == "ip-fabric":
383
386
  # Get the ip-addresses from the IP-Fabric
384
- result, prefix = get_ipfabric_data_for_prefix(ipf=ipf, prefix=prefix, result=result)
387
+ prefix["datasource_ips"] = get_ipfabric_data_for_prefix(prefix=prefix)
388
+
389
+ # Print the task result
390
+ text = f"IP-Fabric Get Data for Prefix {prefix['prefix']} IP-Addresses"
391
+ result.append(
392
+ f"{task_result(text=text, changed=False, level_name='INFO')}\n"
393
+ + f"'{text}' -> IPFResponse <Success: True>\n"
394
+ + f"-> IP-Fabric prefix ip-address count: {len(prefix['datasource_ips'])}"
395
+ )
385
396
  else:
386
397
  # Invalid datasource
387
398
  failed = True
@@ -512,7 +523,7 @@ def update_discovered_ip_addresses(nb_url: str, prefix: dict, ds: Literal["nmap"
512
523
  result.extend(sub_result)
513
524
  failed = True if sub_failed else failed
514
525
 
515
- # Update the ip-addresses with the status 'auto_discovered' that are part of the nmap scan list
526
+ # Update the ip-addresses with the status 'auto_discovered' that are part of the datasource list
516
527
  update_ips = create_ip_list(
517
528
  loop_list=prefix["discovered_ips"], check_list=prefix["datasource_ips"], is_in_both=True
518
529
  )
@@ -556,10 +567,19 @@ def delete_inactive_auto_discovered_ip_addresses(
556
567
  failed = False
557
568
  task_text = "Delete Auto-Discovered IP-Addresses"
558
569
 
559
- # Delete the ip-addresses with the status 'auto_discovered' that are not in the nmap scan list
560
- delete_ips = create_ip_list(
570
+ # Delete the ip-addresses with the status 'auto_discovered' that are not in the datasource list
571
+ maybe_delete_ips = create_ip_list(
561
572
  loop_list=prefix["discovered_ips"], check_list=prefix["datasource_ips"], is_in_both=False
562
573
  )
574
+ delete_ips = []
575
+ if maybe_delete_ips:
576
+ # Nmap scan ip-addresses of the maybe_delete_ips list
577
+ for ip in maybe_delete_ips:
578
+ # Scan the prefix with nmap
579
+ scan_result = nmap_scan_host_ip_or_subnet(hosts=ip["address"])
580
+ # Add the nmal scan result to the inactive_ips list
581
+ if scan_result:
582
+ delete_ips.extend(scan_result)
563
583
 
564
584
  # If ip-addresses have been found
565
585
  if delete_ips:
@@ -593,7 +613,7 @@ def update_reserved_ip_addresses(nb_url: str, prefix: dict, ds: Literal["nmap",
593
613
  failed = False
594
614
  task_text = "Update Reserved IP-Addresses Status"
595
615
 
596
- # Update the ip-addresses with the status 'reserved' that are part of the nmap scan list
616
+ # Update the ip-addresses with the status 'reserved' that are part of the datacenter list
597
617
  update_ips = create_ip_list(
598
618
  loop_list=prefix["reserved_ips"], check_list=prefix["datasource_ips"], is_in_both=True
599
619
  )
@@ -631,17 +651,27 @@ def update_inactive_ip_addresses(nb_url: str, prefix: dict, ds: Literal["nmap",
631
651
  failed = False
632
652
  task_text = "Update Inactive IP-Addresses Status"
633
653
 
634
- # Update the ip-addresses with the status 'inactive' that are part of the nmap scan list
635
- inactive_ips = create_ip_list(
654
+ # Update the ip-addresses with the status 'inactive' that are part of the datasource list
655
+ active_ips = create_ip_list(
636
656
  loop_list=prefix["inactive_ips"], check_list=prefix["datasource_ips"], is_in_both=True
637
657
  )
658
+ # Double-check ip-addresses with the status 'inactive' that are not part of the datasource list
659
+ maybe_active_ips = create_ip_list(
660
+ loop_list=prefix["inactive_ips"], check_list=prefix["datasource_ips"], is_in_both=False
661
+ )
662
+ if maybe_active_ips:
663
+ # Nmap scan ip-addresses of the maybe_inactive_ips list
664
+ for ip in maybe_active_ips:
665
+ # Scan the prefix with nmap
666
+ scan_result = nmap_scan_host_ip_or_subnet(hosts=ip["address"])
667
+ # Add the nmal scan result to the inactive_ips list
668
+ if scan_result:
669
+ active_ips.extend(scan_result)
638
670
 
639
671
  # If ip-addresses have been found
640
- if inactive_ips:
672
+ if active_ips:
641
673
  # Create the payload to update the ip-addresses
642
- payload = create_nb_ip_payload(
643
- parent_prefix=prefix, data=inactive_ips, ds=ds, desired_status="active"
644
- )
674
+ payload = create_nb_ip_payload(parent_prefix=prefix, data=active_ips, ds=ds, desired_status="active")
645
675
  # PATCH request to update the ip-addresses
646
676
  resp = patch_nb_resources(url=f"{nb_url}/api/ipam/ip-addresses/", payload=payload)
647
677
 
@@ -649,7 +679,7 @@ def update_inactive_ip_addresses(nb_url: str, prefix: dict, ds: Literal["nmap",
649
679
  text = "The following 'Inactive' ip-addresses had been set to status 'Active':"
650
680
  # The function returns the result list and True if the response is successful else False
651
681
  result, failed = create_nb_response_result(
652
- resp=resp, nb_type="ip", data=inactive_ips, ds=ds, task_text=task_text, text=text
682
+ resp=resp, nb_type="ip", data=active_ips, ds=ds, task_text=task_text, text=text
653
683
  )
654
684
 
655
685
  return result, failed
@@ -673,7 +703,7 @@ def update_active_ip_addresses(
673
703
  failed = False
674
704
  task_text = "Update Active IP-Addresses Status"
675
705
 
676
- # Update the ip-addresses with the status 'active' that are part of the nmap scan list
706
+ # Update the ip-addresses with the status 'active' that are part of the datacenter list
677
707
  active_ips = create_ip_list(
678
708
  loop_list=prefix["active_ips"], check_list=prefix["datasource_ips"], is_in_both=True
679
709
  )
@@ -694,10 +724,19 @@ def update_active_ip_addresses(
694
724
  result.extend(sub_result)
695
725
  failed = True if sub_failed else failed
696
726
 
697
- # Update the ip-addresses with the status 'active' that are not part of the nmap scan list
698
- inactive_ips = create_ip_list(
727
+ # Update the ip-addresses with the status 'active' that are not part of the datacenter list
728
+ maybe_inactive_ips = create_ip_list(
699
729
  loop_list=prefix["active_ips"], check_list=prefix["datasource_ips"], is_in_both=False
700
730
  )
731
+ inactive_ips = []
732
+ if maybe_inactive_ips:
733
+ # Nmap scan ip-addresses of the maybe_inactive_ips list
734
+ for ip in maybe_inactive_ips:
735
+ # Scan the prefix with nmap
736
+ scan_result = nmap_scan_host_ip_or_subnet(hosts=ip["address"])
737
+ # Add the nmal scan result to the inactive_ips list
738
+ if scan_result:
739
+ inactive_ips.extend(scan_result)
701
740
  # Create a new list to exclude the overwrite_active ip-addresses
702
741
  inactive_ips = [ip for ip in inactive_ips if ip["address"] not in overwrite_active]
703
742
 
@@ -784,7 +823,7 @@ def update_netbox_prefix_ip_addresses(prefix: list, *args) -> tuple:
784
823
  result, sub_failed = update_discovered_ip_addresses(nb_url=nb_url, prefix=prefix, ds=ds)
785
824
  results, changed, failed = set_results_changed_failed(results, result, changed, sub_failed, failed)
786
825
 
787
- # Delete inactive 'auto-discovered' ip-addresses
826
+ # Delete inactive 'auto-discovered' ip-addresses (double check with partial nmap scan)
788
827
  result, sub_failed = delete_inactive_auto_discovered_ip_addresses(nb_url=nb_url, prefix=prefix, ds=ds)
789
828
  results, changed, failed = set_results_changed_failed(results, result, changed, sub_failed, failed)
790
829
 
@@ -792,11 +831,11 @@ def update_netbox_prefix_ip_addresses(prefix: list, *args) -> tuple:
792
831
  result, sub_failed = update_reserved_ip_addresses(nb_url=nb_url, prefix=prefix, ds=ds)
793
832
  results, changed, failed = set_results_changed_failed(results, result, changed, sub_failed, failed)
794
833
 
795
- # Update 'inactive' ip-addresses -> set status to 'active'
834
+ # Update 'inactive' ip-addresses -> set status to 'active' (double check with partial nmap scan)
796
835
  result, sub_failed = update_inactive_ip_addresses(nb_url=nb_url, prefix=prefix, ds=ds)
797
836
  results, changed, failed = set_results_changed_failed(results, result, changed, sub_failed, failed)
798
837
 
799
- # Update 'active' ip-addresses -> set status to 'active' or 'inactive'
838
+ # Update 'active' ip-addresses -> set status to 'active' or 'inactive' (check with partial nmap scan)
800
839
  result, sub_failed = update_active_ip_addresses(
801
840
  nb_url=nb_url, prefix=prefix, overwrite_active=overwrite_active, ds=ds
802
841
  )
@@ -1037,7 +1076,7 @@ def main(nr_config: str, nmap_scan: bool = False, overwrite_active: list[str] =
1037
1076
 
1038
1077
  # Load Active and Inventory NetBox Prefixes from all Tenants (except marked utilized)
1039
1078
  # These prefixes will be updated with input from IP-Fabric
1040
- nb_active_inventory_t3_t4_prefixes = load_netbox_data(
1079
+ nb_active_inventory_all_prefixes = load_netbox_data(
1041
1080
  task_text="Load Active OOB/T1/T2/T3/T4 NetBox Prefixes",
1042
1081
  nb_api_url=f"{nb_url}/api/ipam/prefixes/",
1043
1082
  query={
@@ -1092,7 +1131,7 @@ def main(nr_config: str, nmap_scan: bool = False, overwrite_active: list[str] =
1092
1131
  thread_result = run_thread_pool(
1093
1132
  title=title,
1094
1133
  task=update_netbox_prefix_ip_addresses,
1095
- thread_list=nb_active_inventory_t3_t4_prefixes,
1134
+ thread_list=nb_active_inventory_all_prefixes,
1096
1135
  max_workers=5,
1097
1136
  args=("ip-fabric", overwrite_active),
1098
1137
  )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: nornir-collection
3
- Version: 0.0.18
3
+ Version: 0.0.20
4
4
  Summary: Nornir-Collection contains network automation functions and complete IaC workflows with Nornir and other python libraries. It contains Nornir tasks and general functions in Nornir style.
5
5
  Author: Willi Kubny
6
6
  Author-email: willi.kubny@gmail.ch
@@ -11,7 +11,7 @@ with open("requirements.txt", encoding="utf-8") as f:
11
11
 
12
12
  setuptools.setup(
13
13
  name="nornir-collection",
14
- version="0.0.18",
14
+ version="0.0.20",
15
15
  author="Willi Kubny",
16
16
  author_email="willi.kubny@gmail.ch",
17
17
  description="Nornir-Collection contains network automation functions and complete IaC workflows with \