ipfabric_netbox 4.3.2b5__py3-none-any.whl → 4.3.2b6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipfabric_netbox might be problematic. Click here for more details.

@@ -6,7 +6,7 @@ class NetboxIPFabricConfig(PluginConfig):
6
6
  name = "ipfabric_netbox"
7
7
  verbose_name = "NetBox IP Fabric SoT Plugin"
8
8
  description = "Sync IP Fabric into NetBox"
9
- version = "4.3.2b5"
9
+ version = "4.3.2b6"
10
10
  base_url = "ipfabric"
11
11
  min_version = "4.4.0"
12
12
 
@@ -44,3 +44,7 @@ class IPAddressDuplicateError(IngestionIssue, SyncError):
44
44
  class IPAddressPrimaryRemovalError(IngestionIssue, SyncError):
45
45
  def __str__(self):
46
46
  return "Error removing primary IP from other device."
47
+
48
+
49
+ class RequiredDependencyFailedSkip(SearchError):
50
+ """Raised when a required dependency failed, causing this item to be skipped."""
@@ -12,6 +12,7 @@ from typing import TYPE_CHECKING
12
12
  from typing import TypeVar
13
13
 
14
14
  from core.exceptions import SyncError
15
+ from core.signals import clear_events
15
16
  from dcim.models import Device
16
17
  from django.conf import settings
17
18
  from django.core.exceptions import MultipleObjectsReturned
@@ -19,15 +20,18 @@ from django.core.exceptions import ObjectDoesNotExist
19
20
  from django.db.models import Model
20
21
  from django.utils.text import slugify
21
22
  from django_tables2 import Column
23
+ from extras.events import flush_events
22
24
  from ipfabric import IPFClient
23
25
  from jinja2.sandbox import SandboxedEnvironment
24
26
  from netbox.config import get_config
27
+ from netbox.context import events_queue
25
28
  from netutils.utils import jinja2_convenience_function
26
29
 
27
30
  from ..choices import IPFabricSourceTypeChoices
28
31
  from ..choices import IPFabricSyncStatusChoices
29
32
  from ..exceptions import IPAddressDuplicateError
30
33
  from ..exceptions import IPAddressPrimaryRemovalError
34
+ from ..exceptions import RequiredDependencyFailedSkip
31
35
  from ..exceptions import SearchError
32
36
  from ..exceptions import SyncDataError
33
37
 
@@ -37,6 +41,7 @@ if TYPE_CHECKING:
37
41
  from ..models import IPFabricIngestionIssue
38
42
  from ipam.models import IPAddress
39
43
  from ..models import IPFabricTransformMap
44
+ from ..models import IPFabricSync
40
45
 
41
46
  logger = logging.getLogger("ipfabric_netbox.utilities.ipf_utils")
42
47
 
@@ -72,6 +77,49 @@ def render_jinja2(template_code, context):
72
77
  return environment.from_string(source=template_code).render(**context)
73
78
 
74
79
 
80
+ class EventsClearer:
81
+ """
82
+ Handles clearing of events after defined number of objects are saved to reduce memory usage.
83
+ ChangeLoggingMiddleware causes rest_framework Fields to be held in memory for the whole
84
+ duration of current request. This causes high memory usage during sync jobs.
85
+
86
+ The data is already written in DB due to branching ChangeDiff. So we can trigger
87
+ the events clearing here to free up memory.
88
+
89
+ For performance reasons we don't want to clear the event cache after every object is saved,
90
+ but we also don't want to wait until the end of the sync when syncing large number of objects.
91
+ This class helps to clear events after a defined threshold is reached.
92
+
93
+ Without it, the memory requirements are roughly 1,5GB per 10k changes.
94
+ """
95
+
96
+ def __init__(self, sender: object, threshold: int = 100) -> None:
97
+ self.sender = sender
98
+ self.threshold = threshold
99
+ self.counter = 0
100
+
101
+ def increment(self) -> None:
102
+ """
103
+ Increment the counter and clear events if threshold is reached.
104
+
105
+ This should not be called after every instance.snapshot() but instead
106
+ when the instance is processed. This makes sure it's changes are synced
107
+ as a whole. Calling it after every single snapshot() causes issues.
108
+ """
109
+ self.counter += 1
110
+ if self.counter >= self.threshold:
111
+ self.clear()
112
+
113
+ def clear(self) -> None:
114
+ logger.debug("Clearing events to reduce memory usage.")
115
+ # This makes sure webhooks are sent properly
116
+ if events := list(events_queue.get().values()):
117
+ flush_events(events)
118
+ # And now the queue can be cleared
119
+ clear_events.send(sender=None)
120
+ self.counter = 0
121
+
122
+
75
123
  class IPFabric(object):
76
124
  def __init__(self, parameters=None) -> None:
77
125
  if parameters:
@@ -325,14 +373,21 @@ def order_members(members: list[dict]) -> dict[str, list[dict]]:
325
373
  return devices
326
374
 
327
375
 
328
- def order_devices(
376
+ def prepare_devices(
329
377
  devices: list[dict], members: dict[str, list[dict]]
330
378
  ) -> tuple[list[dict], list[dict]]:
379
+ """
380
+ Prepare devices list for syncing:
381
+ - incorporating virtual chassis members
382
+ - handle duplicate hostnames
383
+ """
384
+
331
385
  hostnames = [d["hostname"] for d in devices]
332
386
  counter = Counter(hostnames)
333
387
 
334
- new_devices = []
335
- ordered_devices = []
388
+ # All devices to be synced
389
+ all_devices = []
390
+ # All virtual chassis to be synced
336
391
  virtualchassis = []
337
392
 
338
393
  for device in devices[:]:
@@ -341,32 +396,33 @@ def order_devices(
341
396
  if child_members := members.get(device.get("sn")):
342
397
  # This device is the VC master, and we're iterating over all it's members
343
398
  for child_member in child_members:
399
+ # There is physically no device with hostname matching the virtual chassis
400
+ # There are only members, so "hostname/1", "hostname/2", etc.
401
+ new_device = deepcopy(device)
402
+ new_device[
403
+ "hostname"
404
+ ] = f"{device['hostname']}/{child_member.get('member')}"
405
+ new_device["virtual_chassis"] = child_member
344
406
  if device.get("sn") != child_member.get("memberSn"):
345
407
  # VC members (non-master) are not in Device table, need to add them as new Device
346
- new_device = deepcopy(device)
347
- new_device[
348
- "hostname"
349
- ] = f"{device['hostname']}/{child_member.get('member')}"
350
408
  new_device["model"] = child_member.get("pn")
351
409
  new_device["sn"] = child_member.get("memberSn")
352
- new_device["virtual_chassis"] = child_member
353
- new_devices.append(new_device)
410
+ all_devices.append(new_device)
354
411
  else:
355
- # This is the master device
356
- device["virtual_chassis"] = child_member
357
- virtualchassis.append(child_member)
412
+ # Master device, create the virtual chassis based on it
413
+ virtualchassis.append(child_member)
414
+ all_devices.append(new_device)
358
415
  hostnames = [d["hostname"] for d in devices]
359
416
  counter = Counter(hostnames)
360
417
  else:
361
- ordered_devices.append(device)
362
- ordered_devices.extend(new_devices)
363
- return ordered_devices, virtualchassis
418
+ all_devices.append(device)
419
+ return all_devices, virtualchassis
364
420
 
365
421
 
366
422
  class IPFabricSyncRunner(object):
367
423
  def __init__(
368
424
  self,
369
- sync,
425
+ sync: "IPFabricSync",
370
426
  client: IPFClient = None,
371
427
  ingestion=None,
372
428
  settings: dict = None,
@@ -387,6 +443,8 @@ class IPFabricSyncRunner(object):
387
443
  # For now store only serial numbers since that is the largest dependency chain
388
444
  self.error_serials = set()
389
445
 
446
+ self.events_clearer = EventsClearer(sender=self.__class__, threshold=100)
447
+
390
448
  @staticmethod
391
449
  def get_error_serial(context: dict | None, data: dict | None) -> str | None:
392
450
  """Get error serial from context or raw data for skipping purposes."""
@@ -413,7 +471,8 @@ class IPFabricSyncRunner(object):
413
471
  context = context or {}
414
472
 
415
473
  error_serial = self.get_error_serial(context, data)
416
- if error_serial:
474
+ # Ignore models that do not have any dependencies by serial number
475
+ if error_serial and model not in ["ipaddress", "macaddress"]:
417
476
  self.error_serials.add(error_serial)
418
477
 
419
478
  # TODO: This is to prevent circular import issues, clean it up later.
@@ -516,11 +575,21 @@ class IPFabricSyncRunner(object):
516
575
  # First check if there are any previous errors linked to this object
517
576
  error_serial = self.get_error_serial(record.context, record.data)
518
577
  if error_serial and error_serial in self.error_serials:
519
- self.logger.log_info(
520
- f"Skipping syncing of `{record.model}` with serial `{error_serial}` due to previous errors.",
521
- obj=self.sync,
578
+ # We want to raise it as exception so it's shown in ingestion issues but can be filtered out.
579
+ exception = RequiredDependencyFailedSkip(
580
+ message=f"Skipping syncing of `{record.model}` with serial `{error_serial}` due to previous errors.",
581
+ model=record.model,
582
+ context=record.context,
583
+ data=record.data,
522
584
  )
523
- return None
585
+ _, issue = self.create_or_get_sync_issue(
586
+ exception=exception,
587
+ ingestion=self.ingestion,
588
+ model=record.model,
589
+ context=record.context,
590
+ data=record.data,
591
+ )
592
+ raise exception
524
593
  record = self.get_transform_context(record)
525
594
  queryset = record.transform_map.target_model.model_class().objects
526
595
  model_settings = self.settings.get(record.model, False)
@@ -703,7 +772,7 @@ class IPFabricSyncRunner(object):
703
772
 
704
773
  self.logger.log_info("Preparing devices", obj=self.sync)
705
774
  members = order_members(data.get("virtualchassis", []))
706
- devices, virtualchassis = order_devices(data.get("device", []), members)
775
+ devices, virtualchassis = prepare_devices(data.get("device", []), members)
707
776
 
708
777
  # We need to store primary IPs of Devices to assign them later
709
778
  # since they are not stored on Device object directly
@@ -875,15 +944,69 @@ class IPFabricSyncRunner(object):
875
944
 
876
945
  for item in items:
877
946
  self.sync_item(item, cf, ingestion, stats)
947
+ self.events_clearer.increment()
878
948
 
879
949
  @handle_errors
880
- def sync_ip_addresses(self, ip_addresses: set[DataRecord]):
950
+ def sync_devices(
951
+ self,
952
+ devices: set[DataRecord],
953
+ cf: bool = False,
954
+ ingestion: "IPFabricIngestion" = None,
955
+ ) -> None:
956
+ """Sync devices separately to handle resetting primary IP."""
957
+ if not self.settings.get("device"):
958
+ self.logger.log_info(
959
+ "Did not ask to sync devices, skipping.", obj=self.sync
960
+ )
961
+ return
962
+
963
+ for device in devices:
964
+ device_obj: "Device | None" = self.sync_item(
965
+ record=device, cf=cf, ingestion=ingestion
966
+ )
967
+
968
+ if (
969
+ device_obj is None
970
+ or device_obj.primary_ip4 is None
971
+ or device.data.get("loginIp") is not None
972
+ ):
973
+ self.events_clearer.increment()
974
+ continue
975
+
976
+ # If device has primary IP assigned in NetBox, but not in IP Fabric, remove it
977
+ try:
978
+ connection_name = self.get_db_connection_name()
979
+ device_obj.refresh_from_db(using=connection_name)
980
+ device_obj.snapshot()
981
+ device_obj.primary_ip4 = None
982
+ device_obj.save(using=connection_name)
983
+ except Exception as err:
984
+ _, issue = self.create_or_get_sync_issue(
985
+ exception=err,
986
+ ingestion=self.ingestion,
987
+ message="Error removing primary IP current device.",
988
+ model=device.model,
989
+ data=device.data,
990
+ )
991
+ self.events_clearer.increment()
992
+ raise IPAddressPrimaryRemovalError(
993
+ data=device.data,
994
+ model=device.model,
995
+ issue_id=issue.pk,
996
+ ) from err
997
+ self.events_clearer.increment()
998
+
999
+ @handle_errors
1000
+ def sync_ip_addresses(self, ip_addresses: set[DataRecord]) -> None:
881
1001
  """
882
1002
  We cannot assign primary IP in signals since IPAddress does not
883
1003
  contain information whether it is primary or not. And it must be done
884
1004
  on Device object, so cannot be done via Transform Maps yet since that
885
1005
  would require another Transform Map for Device.
886
1006
  So we need to do it manually here.
1007
+
1008
+ Cleaning events queue happens during each cycle to make sure all required
1009
+ operations (primary IP assignment) happen during the same batch.
887
1010
  """
888
1011
  if not self.settings.get("ipaddress"):
889
1012
  self.logger.log_info(
@@ -924,6 +1047,7 @@ class IPFabricSyncRunner(object):
924
1047
  model=ip_address.model,
925
1048
  data=ip_address.data,
926
1049
  )
1050
+ self.events_clearer.increment()
927
1051
  raise IPAddressPrimaryRemovalError(
928
1052
  data=ip_address.data,
929
1053
  model=ip_address.model,
@@ -932,6 +1056,7 @@ class IPFabricSyncRunner(object):
932
1056
 
933
1057
  ip_address_obj: "IPAddress | None" = self.sync_item(record=ip_address)
934
1058
  if ip_address_obj is None or ip_address_obj.assigned_object is None:
1059
+ self.events_clearer.increment()
935
1060
  continue
936
1061
 
937
1062
  parent_device = ip_address_obj.assigned_object.parent_object
@@ -949,6 +1074,7 @@ class IPFabricSyncRunner(object):
949
1074
  self.logger.log_failure(
950
1075
  f"Error assigning primary IP to device: {err}", obj=self.sync
951
1076
  )
1077
+ self.events_clearer.increment()
952
1078
 
953
1079
  def collect_and_sync(self, ingestion=None) -> None:
954
1080
  self.logger.log_info("Starting data collection.", obj=self.sync)
@@ -967,8 +1093,8 @@ class IPFabricSyncRunner(object):
967
1093
  self.sync_items(items=records["platform"])
968
1094
  self.sync_items(items=records["devicerole"])
969
1095
  self.sync_items(items=records["virtualchassis"])
970
- self.sync_items(
971
- items=records["device"],
1096
+ self.sync_devices(
1097
+ devices=records["device"],
972
1098
  cf=self.sync.update_custom_fields,
973
1099
  ingestion=ingestion,
974
1100
  )
@@ -982,3 +1108,6 @@ class IPFabricSyncRunner(object):
982
1108
  self.sync_items(items=records["vrf"])
983
1109
  self.sync_items(items=records["prefix"])
984
1110
  self.sync_ip_addresses(ip_addresses=records["ipaddress"])
1111
+
1112
+ # Make sure to clean queue (and memory) at the end
1113
+ self.events_clearer.clear()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ipfabric_netbox
3
- Version: 4.3.2b5
3
+ Version: 4.3.2b6
4
4
  Summary: NetBox plugin to sync IP Fabric data into NetBox
5
5
  License: MIT
6
6
  Keywords: netbox,ipfabric,plugin,sync
@@ -1,11 +1,11 @@
1
- ipfabric_netbox/__init__.py,sha256=Y0eTGqE2S1Bo0EEpLOqg-aeHQD7Z9fbJT4_AV7MWIrM,674
1
+ ipfabric_netbox/__init__.py,sha256=pbFl740qmbZh3_w4p0CYtejadvV7TqiSTWHUbctx2rY,674
2
2
  ipfabric_netbox/api/__init__.py,sha256=XRclTGWVR0ZhAAwgYul5Wm_loug5_hUjEumbLQEwKYM,47
3
3
  ipfabric_netbox/api/serializers.py,sha256=92Cwhnqsm1l1oZfdHH5aJI1VFX0eO5JS4BsdXE6Ur18,6738
4
4
  ipfabric_netbox/api/urls.py,sha256=1fXXVTxNY5E64Nfz6b7zXD9bZI3FcefuxAWKMe0w_QU,1240
5
5
  ipfabric_netbox/api/views.py,sha256=qOBTIzPtOBY75tTjirsTBbiRXrQQid478Tp15-WKbmQ,6859
6
6
  ipfabric_netbox/choices.py,sha256=r1A7zasYw92fdB6MxnvcLkzz4mA61_wSUmbfuDbmg0M,6017
7
7
  ipfabric_netbox/data/transform_map.json,sha256=dYPXiaLJFuO9vabGgkuywn7XFCe7xdgPrfvOvP206TM,22016
8
- ipfabric_netbox/exceptions.py,sha256=5nyAVoaPEGDHcrUXNpsCr_Nhq1vz1VbAmq54fU03iRg,1453
8
+ ipfabric_netbox/exceptions.py,sha256=uTXF03nVryB7FhzuhLkEyLeHIcQSyUTExnWobsYW9mg,1589
9
9
  ipfabric_netbox/filtersets.py,sha256=4I_ogO0Wqexf4e4gy_CirdGmA6vSCybyCadFcjI2LM8,8011
10
10
  ipfabric_netbox/forms.py,sha256=s9jYgK75CJzCrhnEeB3WxxZ9bF2YfNDA4N-sO9xTqgc,50068
11
11
  ipfabric_netbox/graphql/__init__.py,sha256=-a5w_VY7pc-RVt8MvThkTzeAqCC3xCan4Ue6iMefmjI,754
@@ -79,10 +79,10 @@ ipfabric_netbox/tests/test_models.py,sha256=gagJKoxD-BnEMWwZ2d5uImrWJOqYgfXe23JR
79
79
  ipfabric_netbox/tests/test_views.py,sha256=KKHKA4ejTEwdy6Ce5StJxjxWVWbQ54Y1puyPeBRw1vM,87923
80
80
  ipfabric_netbox/urls.py,sha256=qF2BzZEDnPRd3opFaRfiMdaarYKFfv69iMaAbU2rsBU,2702
81
81
  ipfabric_netbox/utilities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
82
- ipfabric_netbox/utilities/ipfutils.py,sha256=wc6wVBEHKJ8RoCOrp6Fm3w08nf1xiHGGMsMYrypOcpA,38852
82
+ ipfabric_netbox/utilities/ipfutils.py,sha256=Eb63rU8Cd_fbsKUqezeqdrI0zMu_YLosssnzVKHyEKk,44113
83
83
  ipfabric_netbox/utilities/logging.py,sha256=GYknjocMN6LQ2873_az3y0RKm29TCXaWviUIIneH-x0,3445
84
84
  ipfabric_netbox/utilities/transform_map.py,sha256=nJhEdi2DqqowrtfowNgg-FZiE3_lN0MhQvaNwHS4yXw,8979
85
85
  ipfabric_netbox/views.py,sha256=nFqb9htUH-D8NXvJcWPb5D5u909rfHMZ4trUbCULafU,45208
86
- ipfabric_netbox-4.3.2b5.dist-info/METADATA,sha256=BGU2f-PPRiSy2wedUc8muqMfLABDWvZYnQZVPeljMO4,4789
87
- ipfabric_netbox-4.3.2b5.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
88
- ipfabric_netbox-4.3.2b5.dist-info/RECORD,,
86
+ ipfabric_netbox-4.3.2b6.dist-info/METADATA,sha256=XV5wOkMNlqa92rwmOhDyHNl6OBjgfyiMk6iiLOSWl1U,4789
87
+ ipfabric_netbox-4.3.2b6.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
88
+ ipfabric_netbox-4.3.2b6.dist-info/RECORD,,