ipfabric_netbox 4.3.2b9__py3-none-any.whl → 4.3.2b10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipfabric_netbox might be problematic. Click here for more details.

Files changed (49) hide show
  1. ipfabric_netbox/__init__.py +2 -2
  2. ipfabric_netbox/api/serializers.py +112 -7
  3. ipfabric_netbox/api/urls.py +6 -0
  4. ipfabric_netbox/api/views.py +23 -0
  5. ipfabric_netbox/choices.py +72 -40
  6. ipfabric_netbox/data/endpoint.json +47 -0
  7. ipfabric_netbox/data/filters.json +51 -0
  8. ipfabric_netbox/data/transform_map.json +188 -174
  9. ipfabric_netbox/exceptions.py +7 -5
  10. ipfabric_netbox/filtersets.py +310 -41
  11. ipfabric_netbox/forms.py +324 -79
  12. ipfabric_netbox/graphql/__init__.py +6 -0
  13. ipfabric_netbox/graphql/enums.py +5 -5
  14. ipfabric_netbox/graphql/filters.py +56 -4
  15. ipfabric_netbox/graphql/schema.py +28 -0
  16. ipfabric_netbox/graphql/types.py +61 -1
  17. ipfabric_netbox/jobs.py +5 -1
  18. ipfabric_netbox/migrations/0022_prepare_for_filters.py +182 -0
  19. ipfabric_netbox/migrations/0023_populate_filters_data.py +279 -0
  20. ipfabric_netbox/migrations/0024_finish_filters.py +29 -0
  21. ipfabric_netbox/models.py +384 -12
  22. ipfabric_netbox/navigation.py +98 -24
  23. ipfabric_netbox/tables.py +194 -9
  24. ipfabric_netbox/templates/ipfabric_netbox/htmx_list.html +5 -0
  25. ipfabric_netbox/templates/ipfabric_netbox/inc/combined_expressions.html +59 -0
  26. ipfabric_netbox/templates/ipfabric_netbox/inc/combined_expressions_content.html +39 -0
  27. ipfabric_netbox/templates/ipfabric_netbox/inc/endpoint_filters_with_selector.html +54 -0
  28. ipfabric_netbox/templates/ipfabric_netbox/ipfabricendpoint.html +39 -0
  29. ipfabric_netbox/templates/ipfabric_netbox/ipfabricfilter.html +51 -0
  30. ipfabric_netbox/templates/ipfabric_netbox/ipfabricfilterexpression.html +39 -0
  31. ipfabric_netbox/templates/ipfabric_netbox/ipfabricfilterexpression_edit.html +150 -0
  32. ipfabric_netbox/templates/ipfabric_netbox/ipfabricsync.html +1 -1
  33. ipfabric_netbox/templates/ipfabric_netbox/ipfabrictransformmap.html +16 -2
  34. ipfabric_netbox/templatetags/ipfabric_netbox_helpers.py +65 -0
  35. ipfabric_netbox/tests/api/test_api.py +333 -13
  36. ipfabric_netbox/tests/test_filtersets.py +2592 -0
  37. ipfabric_netbox/tests/test_forms.py +1256 -74
  38. ipfabric_netbox/tests/test_models.py +242 -34
  39. ipfabric_netbox/tests/test_views.py +2030 -25
  40. ipfabric_netbox/urls.py +35 -0
  41. ipfabric_netbox/utilities/endpoint.py +30 -0
  42. ipfabric_netbox/utilities/filters.py +88 -0
  43. ipfabric_netbox/utilities/ipfutils.py +254 -316
  44. ipfabric_netbox/utilities/logging.py +7 -7
  45. ipfabric_netbox/utilities/transform_map.py +126 -0
  46. ipfabric_netbox/views.py +719 -5
  47. {ipfabric_netbox-4.3.2b9.dist-info → ipfabric_netbox-4.3.2b10.dist-info}/METADATA +3 -2
  48. {ipfabric_netbox-4.3.2b9.dist-info → ipfabric_netbox-4.3.2b10.dist-info}/RECORD +49 -33
  49. {ipfabric_netbox-4.3.2b9.dist-info → ipfabric_netbox-4.3.2b10.dist-info}/WHEEL +1 -1
@@ -2,11 +2,8 @@ import json
2
2
  import logging
3
3
  from collections import Counter
4
4
  from copy import deepcopy
5
- from enum import Enum
6
5
  from functools import cache
7
- from functools import partial
8
6
  from importlib import metadata
9
- from typing import Any
10
7
  from typing import Callable
11
8
  from typing import TYPE_CHECKING
12
9
  from typing import TypeVar
@@ -14,8 +11,10 @@ from typing import TypeVar
14
11
  from core.exceptions import SyncError
15
12
  from core.signals import clear_events
16
13
  from dcim.models import Device
14
+ from dcim.models import Site
17
15
  from dcim.models import VirtualChassis
18
16
  from dcim.signals import assign_virtualchassis_master
17
+ from dcim.signals import sync_cached_scope_fields
19
18
  from django.conf import settings
20
19
  from django.core.exceptions import MultipleObjectsReturned
21
20
  from django.core.exceptions import ObjectDoesNotExist
@@ -136,26 +135,6 @@ class IPFabric(object):
136
135
  "user-agent"
137
136
  ] += f'; ipfabric-netbox/{metadata.version("ipfabric-netbox")}' # noqa: E702
138
137
 
139
- def get_snapshots(self) -> dict:
140
- formatted_snapshots = {}
141
- if self.ipf:
142
- for snapshot_ref, snapshot in self.ipf.snapshots.items():
143
- if snapshot.status != "done" and snapshot.finish_status != "done":
144
- continue
145
- if snapshot_ref in ["$prev", "$lastLocked"]:
146
- continue
147
- if snapshot.name:
148
- description = (
149
- snapshot.name
150
- + " - "
151
- + snapshot.end.strftime("%d-%b-%y %H:%M:%S")
152
- )
153
- else:
154
- description = snapshot.end.strftime("%d-%b-%y %H:%M:%S")
155
-
156
- formatted_snapshots[snapshot_ref] = (description, snapshot.snapshot_id)
157
- return formatted_snapshots
158
-
159
138
  def get_table_data(self, table, device):
160
139
  filter = {"sn": ["eq", device.serial]}
161
140
  split = table.split(".")
@@ -194,15 +173,13 @@ class DataRecord:
194
173
 
195
174
  def __init__(
196
175
  self,
197
- app: str,
198
- model: str,
176
+ model_string: str,
199
177
  data: dict,
200
178
  # These values are filled later as the record is passed down the pipeline
201
179
  context: dict | None = None,
202
180
  transform_map: "IPFabricTransformMap | None" = None,
203
181
  ):
204
- self.app = app
205
- self.model = model
182
+ self.model_string = model_string
206
183
  self.data = data
207
184
  self.context = context or dict()
208
185
  self.transform_map = transform_map
@@ -212,8 +189,7 @@ class DataRecord:
212
189
  try:
213
190
  self._hash = hash(
214
191
  (
215
- self.app,
216
- self.model,
192
+ self.model_string,
217
193
  # Since the dicts are already ordered, it is safe to hash them
218
194
  # .values() are mutable, this is fixed by tuple() to get same hash every time
219
195
  make_hashable(self.data),
@@ -248,117 +224,6 @@ class DataRecord:
248
224
  self._hash = None # Invalidate cached hash
249
225
 
250
226
 
251
- # TODO: 1) Store this as model in DB linked to Transform map
252
- # TODO: 2) Each table will be added to template data with customizable name (instead of just `object`)
253
- # TODO: and linked using common column (like `sn` for Device and Interface)
254
- # TODO: 3) Only pull required tables according to sync settings
255
- class Endpoints(Enum):
256
- SITE = "inventory.sites"
257
- DEVICE = "inventory.devices"
258
- VIRTUALCHASSIS = "technology.platforms.stacks_members"
259
- INTERFACE = "inventory.interfaces"
260
- INVENTORYITEM = "inventory.pn"
261
- VLAN = "technology.vlans.site_summary"
262
- VRF = "technology.routing.vrf_detail"
263
- PREFIX = "technology.managed_networks.networks"
264
- IPADDRESS = "technology.addressing.managed_ip_ipv4"
265
-
266
-
267
- class Filters(Enum):
268
- SITE = "site"
269
- DEVICE = "device"
270
- VIRTUALCHASSIS = "virtualchassis"
271
- INTERFACE = "interface"
272
- INVENTORYITEM = "inventoryitem"
273
- VLAN = "vlan"
274
- VRF = "vrf"
275
- PREFIX = "prefix"
276
- IPADDRESS = "ipaddress"
277
-
278
- @staticmethod
279
- def _site_filter(sites: dict | None) -> dict:
280
- if sites:
281
- return {"or": [{"siteName": ["eq", site]} for site in sites]}
282
- return {}
283
-
284
- @staticmethod
285
- def _device_filter(sites: dict | None, child_table: bool = False) -> dict:
286
- key = "vendor" if not child_table else "device.vendor"
287
- excluded_vendors = ["aws", "azure"]
288
- device_filter = {"and": [{key: ["neq", vendor]} for vendor in excluded_vendors]}
289
- if sites:
290
- site_filter = Filters._site_filter(sites)
291
- device_filter["and"].append(site_filter)
292
- return device_filter
293
-
294
- @staticmethod
295
- def _virtualchassis_filter(sites: dict | None) -> dict:
296
- return Filters._device_filter(sites, child_table=True)
297
-
298
- @staticmethod
299
- def _interface_filter(sites: dict | None) -> dict:
300
- return Filters._device_filter(sites, child_table=True)
301
-
302
- @staticmethod
303
- def _inventoryitem_filter(sites: dict | None) -> dict:
304
- inventory_item_filter = {
305
- "and": [
306
- {"sn": ["empty", False]},
307
- {"name": ["empty", False]},
308
- ]
309
- }
310
- if site_filter := Filters._device_filter(sites, child_table=True):
311
- inventory_item_filter["and"].append(site_filter)
312
- return inventory_item_filter
313
-
314
- @staticmethod
315
- def _vlan_filter(sites: dict | None) -> dict:
316
- # Remove VLANs with ID 0, minimum VLAN ID in NetBox is 1
317
- vlan_filter = {"and": [{"vlanId": ["neq", 0]}]}
318
- if site_filter := Filters._site_filter(sites):
319
- vlan_filter["and"].append(site_filter)
320
- return vlan_filter
321
-
322
- @staticmethod
323
- def _vrf_filter(sites: dict | None) -> dict:
324
- return Filters._device_filter(sites, child_table=True)
325
-
326
- @staticmethod
327
- def _prefix_filter(sites: dict | None) -> dict:
328
- if site_filter := Filters._device_filter(sites, child_table=True):
329
- return {"and": [site_filter, {"and": [{"net": ["empty", False]}]}]}
330
- else:
331
- return {"and": [{"net": ["empty", False]}]}
332
-
333
- @staticmethod
334
- def _ipaddress_filter(sites: dict | None) -> dict:
335
- return Filters._device_filter(sites, child_table=True)
336
-
337
- @staticmethod
338
- def get_filter(endpoint: str, sites: dict | None) -> dict:
339
- method_name = f"_{getattr(Filters, endpoint).value}_filter"
340
- filter_func = getattr(Filters, method_name, None)
341
- if filter_func:
342
- return filter_func(sites)
343
- return {}
344
-
345
-
346
- # TODO: Store also hierarchy of models (e.g. Device required Device and virtual chassis endpoints)
347
-
348
-
349
- class EndpointHandler:
350
- def __init__(self, client: IPFClient, snapshot_id: str) -> None:
351
- self.client = client
352
- self.snapshot_id = snapshot_id
353
-
354
- def resolve_endpoint(self, endpoint: str) -> Callable[..., Any]:
355
- """Resolve a dot-separated endpoint string to the corresponding IPFClient attribute."""
356
- obj = self.client
357
- for attr in endpoint.split("."):
358
- obj = getattr(obj, attr)
359
- return partial(obj.all, snapshot_id=self.snapshot_id)
360
-
361
-
362
227
  def order_members(members: list[dict]) -> dict[str, list[dict]]:
363
228
  """Order VC members to dict, where key is master serial number and values are all members."""
364
229
  devices = {}
@@ -465,7 +330,7 @@ class IPFabricSyncRunner(object):
465
330
  exception: Exception,
466
331
  ingestion: "IPFabricIngestion",
467
332
  message: str = None,
468
- model: str = None,
333
+ model_string: str = None,
469
334
  context: dict = None,
470
335
  data: dict = None,
471
336
  ) -> "tuple[bool, IPFabricIngestionIssue]":
@@ -476,7 +341,7 @@ class IPFabricSyncRunner(object):
476
341
 
477
342
  error_serial = self.get_error_serial(context, data)
478
343
  # Ignore models that do not have any dependencies by serial number
479
- if error_serial and model not in ["ipaddress", "macaddress"]:
344
+ if error_serial and model_string not in ["ipam.ipaddress", "dcim.macaddress"]:
480
345
  self.error_serials.add(error_serial)
481
346
 
482
347
  # TODO: This is to prevent circular import issues, clean it up later.
@@ -487,7 +352,7 @@ class IPFabricSyncRunner(object):
487
352
  ingestion=ingestion,
488
353
  exception=exception.__class__.__name__,
489
354
  message=message or getattr(exception, "message", str(exception)),
490
- model=model,
355
+ model=model_string,
491
356
  coalesce_fields={
492
357
  k: v for k, v in context.items() if k not in ["defaults"]
493
358
  },
@@ -515,19 +380,19 @@ class IPFabricSyncRunner(object):
515
380
  # Logging section for logs inside job - facing user
516
381
  self = args[0]
517
382
  if isinstance(err, SearchError):
518
- if self.settings.get(err.model):
383
+ if self.settings.get(err.model_string):
519
384
  self.logger.log_failure(
520
- f"Aborting syncing `{err.model}` instance due to above error, please check your transform maps and/or existing data.",
385
+ f"Aborting syncing `{err.model_string}` instance due to above error, please check your transform maps and/or existing data.",
521
386
  obj=self.sync,
522
387
  )
523
388
  else:
524
389
  self.logger.log_failure(
525
- f"Syncing `{err.model}` is disabled in settings, but hit above error trying to find the correct item. Please check your transform maps and/or existing data.",
390
+ f"Syncing `{err.model_string}` is disabled in settings, but hit above error trying to find the correct item. Please check your transform maps and/or existing data.",
526
391
  obj=self.sync,
527
392
  )
528
393
  if isinstance(err, IPAddressDuplicateError):
529
394
  self.logger.log_warning(
530
- f"IP Address `{err.data.get('address')}` already exists in `{err.model}` with coalesce fields: `{err.coalesce_fields}`. Please check your transform maps and/or existing data.",
395
+ f"IP Address `{err.data.get('address')}` already exists in `{err.model_string}` with coalesce fields: `{err.coalesce_fields}`. Please check your transform maps and/or existing data.",
531
396
  obj=self.sync,
532
397
  )
533
398
  else:
@@ -549,13 +414,11 @@ class IPFabricSyncRunner(object):
549
414
 
550
415
  def get_transform_context(self, record: DataRecord) -> DataRecord:
551
416
  if not record.transform_map:
552
- raise SystemError(
553
- f"No transform map available for {record.app}: {record.model}"
554
- )
417
+ raise SystemError(f"No transform map available for {record.model_string}")
555
418
  try:
556
419
  record.context = record.transform_map.get_context(record.data)
557
420
  except Exception as err:
558
- message = f"Error getting context for `{record.model}`."
421
+ message = f"Error getting context for `{record.model_string}`."
559
422
  if isinstance(err, ObjectDoesNotExist):
560
423
  message += (
561
424
  " Could not find related object using template in transform maps."
@@ -566,11 +429,14 @@ class IPFabricSyncRunner(object):
566
429
  exception=err,
567
430
  ingestion=self.ingestion,
568
431
  message=message,
569
- model=record.model,
432
+ model_string=record.model_string,
570
433
  data=record.data,
571
434
  )
572
435
  raise SearchError(
573
- message=message, data=record.data, model=record.model, issue_id=issue.pk
436
+ message=message,
437
+ data=record.data,
438
+ model_string=record.model_string,
439
+ issue_id=issue.pk,
574
440
  ) from err
575
441
 
576
442
  return record
@@ -581,67 +447,67 @@ class IPFabricSyncRunner(object):
581
447
  if error_serial and error_serial in self.error_serials:
582
448
  # We want to raise it as exception so it's shown in ingestion issues but can be filtered out.
583
449
  exception = RequiredDependencyFailedSkip(
584
- message=f"Skipping syncing of `{record.model}` with serial `{error_serial}` due to previous errors.",
585
- model=record.model,
450
+ message=f"Skipping syncing of `{record.model_string}` with serial `{error_serial}` due to previous errors.",
451
+ model_string=record.model_string,
586
452
  context=record.context,
587
453
  data=record.data,
588
454
  )
589
455
  _, issue = self.create_or_get_sync_issue(
590
456
  exception=exception,
591
457
  ingestion=self.ingestion,
592
- model=record.model,
458
+ model_string=record.model_string,
593
459
  context=record.context,
594
460
  data=record.data,
595
461
  )
596
462
  raise exception
597
463
  record = self.get_transform_context(record)
598
464
  queryset = record.transform_map.target_model.model_class().objects
599
- model_settings = self.settings.get(record.model, False)
465
+ model_settings = self.settings.get(f"{record.model_string}", False)
600
466
 
601
467
  obj = None
602
468
  try:
603
469
  connection_name = self.get_db_connection_name()
604
470
  if model_settings:
605
- logger.info(f"Creating {record.model}")
471
+ logger.info(f"Creating {record.model_string}")
606
472
  obj = record.transform_map.update_or_create_instance(
607
473
  context=record.context,
608
474
  tags=self.sync.tags.all(),
609
475
  connection_name=connection_name,
610
476
  )
611
477
  else:
612
- logger.info(f"Getting {record.model}")
478
+ logger.info(f"Getting {record.model_string}")
613
479
  record.context.pop("defaults", None)
614
480
  obj = queryset.using(connection_name).get(**record.context)
615
481
  except queryset.model.DoesNotExist as err:
616
- message = f"Instance of `{record.model}` not found."
482
+ message = f"Instance of `{record.model_string}` not found."
617
483
  _, issue = self.create_or_get_sync_issue(
618
484
  exception=err,
619
485
  ingestion=self.ingestion,
620
486
  message=message,
621
- model=record.model,
487
+ model_string=record.model_string,
622
488
  context=record.context,
623
489
  data=record.data,
624
490
  )
625
491
  raise SearchError(
626
492
  message=message,
627
- model=record.model,
493
+ model_string=record.model_string,
628
494
  context=record.context,
629
495
  data=record.data,
630
496
  issue_id=issue.pk,
631
497
  ) from err
632
498
  except queryset.model.MultipleObjectsReturned as err:
633
- message = f"Multiple instances of `{record.model}` found."
499
+ message = f"Multiple instances of `{record.model_string}` found."
634
500
  _, issue = self.create_or_get_sync_issue(
635
501
  exception=err,
636
502
  ingestion=self.ingestion,
637
503
  message=message,
638
- model=record.model,
504
+ model_string=record.model_string,
639
505
  context=record.context,
640
506
  data=record.data,
641
507
  )
642
508
  raise SearchError(
643
509
  message=message,
644
- model=record.model,
510
+ model_string=record.model_string,
645
511
  context=record.context,
646
512
  data=record.data,
647
513
  issue_id=issue.pk,
@@ -650,12 +516,12 @@ class IPFabricSyncRunner(object):
650
516
  _, issue = self.create_or_get_sync_issue(
651
517
  exception=err,
652
518
  ingestion=self.ingestion,
653
- model=record.model,
519
+ model_string=record.model_string,
654
520
  context=record.context,
655
521
  data=record.data,
656
522
  )
657
523
  raise SyncDataError(
658
- model=record.model,
524
+ model_string=record.model_string,
659
525
  context=record.context,
660
526
  data=record.data,
661
527
  issue_id=issue.pk,
@@ -664,12 +530,16 @@ class IPFabricSyncRunner(object):
664
530
  return obj
665
531
 
666
532
  def collect_data(self):
533
+ # Importing here to avoid circular import
534
+ from ..models import IPFabricEndpoint
535
+
667
536
  try:
668
537
  self.logger.log_info(
669
538
  "Collecting information from IP Fabric",
670
539
  obj=self.sync.snapshot_data.source,
671
540
  )
672
541
  data = {}
542
+ # TODO: Replace endpoint value in SnapshotData with Endpoint link
673
543
  if self.sync.snapshot_data.source.type == IPFabricSourceTypeChoices.REMOTE:
674
544
  # This requires data already pushed to NetBox by user, does not connect to IPF directly
675
545
  self.logger.log_info(
@@ -679,34 +549,59 @@ class IPFabricSyncRunner(object):
679
549
  raise SyncError(
680
550
  "No snapshot data available. This is a remote sync. Push data to NetBox first."
681
551
  )
682
- for endpoint in Endpoints:
683
- data[endpoint.name.lower()] = list(
552
+ for endpoint in IPFabricEndpoint.objects.all():
553
+ data[endpoint.endpoint] = list(
684
554
  self.sync.snapshot_data.ipf_data.filter(
685
- type=endpoint.name.lower()
555
+ type=endpoint.endpoint
686
556
  ).values_list("data", flat=True)
687
557
  )
688
558
  else:
689
559
  # This pulls data directly from IP Fabric instance
560
+ # TODO: If certain columns are not enabled in IPF, sync will fail in odd ways
561
+ # For example missing `nameOriginal` on Interface makes it unable to sync IPs
562
+ # When pulling from endpoint, make sure to specify columns according to transform maps
563
+ # This can be pulled using logic in IPFabricTransformMap.strip_source_data()
564
+ # TODO: Also cache it to improve performance, no need to regex every time!
690
565
  self.logger.log_info(
691
566
  "Local collector being used for snapshot data.", obj=self.sync
692
567
  )
693
- endpoint_handler = EndpointHandler(
694
- self.client,
695
- snapshot_id=self.settings["snapshot_id"],
696
- )
697
- ingestion_sites = self.settings.get("sites")
698
- for endpoint in Endpoints:
699
- filters = Filters.get_filter(endpoint.name, ingestion_sites)
568
+ enabled_models = [
569
+ param
570
+ for param in self.sync.parameters.keys()
571
+ if "." in param and self.sync.parameters.get(param) is True
572
+ ]
573
+ # Always pull devices for now since other models depend on them
574
+ enabled_models.append("dcim.device")
575
+ # Interfaces are required for IP Addresses assigned interface name
576
+ if "ipam.ipaddress" in enabled_models:
577
+ enabled_models.append("dcim.interface")
578
+ transform_maps = self.sync.get_transform_maps()
579
+ for endpoint in IPFabricEndpoint.objects.all():
580
+ tms_for_endpoint = transform_maps.filter(source_endpoint=endpoint)
581
+ if not tms_for_endpoint.exists():
582
+ raise SyncError(
583
+ f"No transform map found for endpoint `{endpoint.endpoint}`."
584
+ )
585
+ if not [
586
+ True
587
+ for tm in tms_for_endpoint
588
+ if ".".join(tm.target_model.natural_key()) in enabled_models
589
+ ]:
590
+ logger.debug(
591
+ f"Skipping endpoint `{endpoint.endpoint}` as no enabled models are using it."
592
+ )
593
+ continue
594
+ filters = endpoint.combine_filters(sync=self.sync)
700
595
  logger.debug(
701
- f"Collecting data from endpoint: `{endpoint.value}` using filter `{json.dumps(filters)}`."
596
+ f"Collecting data from endpoint: `{endpoint.endpoint}` using filter `{json.dumps(filters)}`."
702
597
  )
703
- data[endpoint.name.lower()] = endpoint_handler.resolve_endpoint(
704
- endpoint.value
705
- )(
598
+ data[endpoint.endpoint] = self.client.fetch_all(
599
+ url=endpoint.endpoint,
600
+ snapshot_id=self.settings["snapshot_id"],
706
601
  filters=filters,
707
602
  )
708
603
  self.logger.log_info(
709
- f"Collected {len(data[endpoint.name.lower()])} items from endpoint `{endpoint.value}`.",
604
+ f"Collected {len(data[endpoint.endpoint])} items from endpoint `{endpoint.endpoint}`.",
710
605
  obj=self.sync.snapshot_data.source,
711
606
  )
712
607
  except Exception as e:
@@ -723,60 +618,89 @@ class IPFabricSyncRunner(object):
723
618
  target_model__app_label=app, target_model__model=model
724
619
  )
725
620
 
621
+ def create_new_data_records(
622
+ self, app: str, model: str, data: list[dict] | dict[str, list[dict]]
623
+ ) -> set[DataRecord]:
624
+ """Create DataRecord objects for given app, model and data list if enabled in settings."""
625
+ if not self.sync.parameters.get(f"{app}.{model}"):
626
+ return set()
627
+ if isinstance(data, dict):
628
+ # Data is either already list or full data where we have to choose the list
629
+ transform_map = self.get_transform_map(app=app, model=model)
630
+ data = data.get(transform_map.source_endpoint.endpoint, [])
631
+ return set(
632
+ self.create_new_data_record(app=app, model=model, data=item)
633
+ for item in data
634
+ )
635
+
726
636
  def create_new_data_record(self, app: str, model: str, data: dict) -> DataRecord:
727
637
  """Extract only relevant source data according to transform map configuration."""
728
638
  transform_map = self.get_transform_map(app=app, model=model)
639
+ model_string = f"{app}.{model}"
729
640
  try:
730
641
  source_data = transform_map.strip_source_data(data)
731
642
  except KeyError as err:
732
643
  raise SyncError(
733
- f"Missing key column {err.args[0]} in source data when preparing data for {app}_{model}."
644
+ f"Missing key column {err.args[0]} in source data when preparing data for {model_string}."
734
645
  ) from err
735
646
  return DataRecord(
736
- app=app, model=model, data=source_data, transform_map=transform_map
647
+ model_string=model_string, data=source_data, transform_map=transform_map
737
648
  )
738
649
 
739
650
  def preprocess_data(self, data: dict) -> dict[str, set[DataRecord]]:
740
- # TODO: Only process data according to settings to improve performance
741
651
  # Set those records that can't be iterated separately
742
652
  # Others are as empty set to define order which is shown in UI progress
743
653
  records = {
744
- "site": set(
745
- self.create_new_data_record(app="dcim", model="site", data=item)
746
- for item in data.get("site", [])
654
+ "dcim.site": self.create_new_data_records(
655
+ app="dcim", model="site", data=data
747
656
  ),
748
- "manufacturer": set(),
749
- "devicetype": set(),
750
- "platform": set(),
751
- "devicerole": set(),
752
- "device": set(),
753
- "virtualchassis": set(),
754
- "interface": set(),
755
- "macaddress": set(),
756
- "inventoryitem": set(
757
- self.create_new_data_record(
758
- app="dcim", model="inventoryitem", data=item
759
- )
760
- for item in data.get("inventoryitem", [])
657
+ "dcim.manufacturer": set(),
658
+ "dcim.devicetype": set(),
659
+ "dcim.platform": set(),
660
+ "dcim.devicerole": set(),
661
+ "dcim.device": set(),
662
+ "dcim.virtualchassis": set(),
663
+ "dcim.interface": set(),
664
+ "dcim.macaddress": set(),
665
+ "dcim.inventoryitem": self.create_new_data_records(
666
+ app="dcim",
667
+ model="inventoryitem",
668
+ data=data,
761
669
  ),
762
- "vlan": set(
763
- self.create_new_data_record(app="ipam", model="vlan", data=item)
764
- for item in data.get("vlan", [])
670
+ "ipam.vlan": self.create_new_data_records(
671
+ app="ipam",
672
+ model="vlan",
673
+ data=data,
765
674
  ),
766
- "vrf": set(
767
- self.create_new_data_record(app="ipam", model="vrf", data=item)
768
- for item in data.get("vrf", [])
675
+ "ipam.vrf": self.create_new_data_records(
676
+ app="ipam",
677
+ model="vrf",
678
+ data=data,
769
679
  ),
770
- "prefix": set(
771
- self.create_new_data_record(app="ipam", model="prefix", data=item)
772
- for item in data.get("prefix", [])
680
+ "ipam.prefix": self.create_new_data_records(
681
+ app="ipam",
682
+ model="prefix",
683
+ data=data,
773
684
  ),
774
- "ipaddress": set(),
685
+ "ipam.ipaddress": set(),
775
686
  }
776
687
 
777
- self.logger.log_info("Preparing devices", obj=self.sync)
778
- members = order_members(data.get("virtualchassis", []))
779
- devices, virtualchassis = prepare_devices(data.get("device", []), members)
688
+ if self.sync.parameters.get("dcim.virtualchassis"):
689
+ self.logger.log_info("Preparing virtual chassis members", obj=self.sync)
690
+ members = order_members(data.get("/technology/platforms/stack/members", []))
691
+ else:
692
+ members = []
693
+
694
+ if self.sync.parameters.get("dcim.device") or self.sync.parameters.get(
695
+ "dcim.virtualchassis"
696
+ ):
697
+ self.logger.log_info("Preparing devices", obj=self.sync)
698
+ devices, virtualchassis = prepare_devices(
699
+ data.get("/inventory/devices", []), members
700
+ )
701
+ else:
702
+ # We can skip iterating over devices since we don't need stack members for the rest of the models
703
+ devices, virtualchassis = data.get("/inventory/devices", []), []
780
704
 
781
705
  # We need to store primary IPs of Devices to assign them later
782
706
  # since they are not stored on Device object directly
@@ -784,31 +708,41 @@ class IPFabricSyncRunner(object):
784
708
  device_primary_ips = {}
785
709
 
786
710
  for device in devices:
787
- records["manufacturer"].add(
788
- self.create_new_data_record(
789
- app="dcim", model="manufacturer", data=device
711
+ if self.sync.parameters.get("dcim.manufacturer"):
712
+ records["dcim.manufacturer"].add(
713
+ self.create_new_data_record(
714
+ app="dcim", model="manufacturer", data=device
715
+ )
716
+ )
717
+ if self.sync.parameters.get("dcim.devicetype"):
718
+ records["dcim.devicetype"].add(
719
+ self.create_new_data_record(
720
+ app="dcim", model="devicetype", data=device
721
+ )
722
+ )
723
+ if self.sync.parameters.get("dcim.platform"):
724
+ records["dcim.platform"].add(
725
+ self.create_new_data_record(
726
+ app="dcim", model="platform", data=device
727
+ )
728
+ )
729
+ if self.sync.parameters.get("dcim.devicerole"):
730
+ records["dcim.devicerole"].add(
731
+ self.create_new_data_record(
732
+ app="dcim", model="devicerole", data=device
733
+ )
790
734
  )
791
- )
792
- records["devicetype"].add(
793
- self.create_new_data_record(app="dcim", model="devicetype", data=device)
794
- )
795
- records["platform"].add(
796
- self.create_new_data_record(app="dcim", model="platform", data=device)
797
- )
798
- records["devicerole"].add(
799
- self.create_new_data_record(app="dcim", model="devicerole", data=device)
800
- )
801
735
  # This field is required by Device transform maps, but is set only when Device is part of VC.
802
736
  if "virtual_chassis" not in device:
803
737
  device["virtual_chassis"] = None
804
- records["device"].add(
805
- self.create_new_data_record(app="dcim", model="device", data=device)
806
- )
738
+ if self.sync.parameters.get("dcim.device"):
739
+ records["dcim.device"].add(
740
+ self.create_new_data_record(app="dcim", model="device", data=device)
741
+ )
807
742
  device_primary_ips[device.get("sn")] = device.get("loginIp")
808
743
 
809
- records["virtualchassis"] = set(
810
- self.create_new_data_record(app="dcim", model="virtualchassis", data=item)
811
- for item in virtualchassis
744
+ records["dcim.virtualchassis"] = self.create_new_data_records(
745
+ app="dcim", model="virtualchassis", data=virtualchassis
812
746
  )
813
747
 
814
748
  # `nameOriginal` is human-readable interface name hidden column in IP Fabric
@@ -826,53 +760,61 @@ class IPFabricSyncRunner(object):
826
760
  )
827
761
  raise SyncError(f"Error collecting source column name for interface: {e}")
828
762
 
829
- self.logger.log_info("Preparing Interfaces", obj=self.sync)
830
763
  # Store human-readable interface names to use them later for IP Addresses
831
764
  readable_int_names = {}
832
- for interface in data["interface"]:
833
- interface_record = self.create_new_data_record(
834
- app="dcim", model="interface", data=interface
835
- )
836
- interface_record.data["loginIp"] = device_primary_ips.get(
837
- interface.get("sn")
838
- )
839
- records["interface"].add(interface_record)
840
- readable_int_names[
841
- f"{interface.get('sn')}_{interface.get('intName')}"
842
- ] = interface.get(interface_key)
843
- records["macaddress"].add(
844
- self.create_new_data_record(
845
- app="dcim", model="macaddress", data=interface
846
- )
847
- )
765
+ if (
766
+ self.sync.parameters.get("dcim.interface")
767
+ or self.sync.parameters.get("dcim.macaddress")
768
+ or self.sync.parameters.get("ipam.ipaddress")
769
+ ):
770
+ self.logger.log_info("Preparing Interfaces", obj=self.sync)
771
+ for interface in data.get("/inventory/interfaces", []):
772
+ if self.sync.parameters.get("dcim.interface"):
773
+ interface_record = self.create_new_data_record(
774
+ app="dcim", model="interface", data=interface
775
+ )
776
+ interface_record.data["loginIp"] = device_primary_ips.get(
777
+ interface.get("sn")
778
+ )
779
+ records["dcim.interface"].add(interface_record)
780
+ readable_int_names[
781
+ f"{interface.get('sn')}_{interface.get('intName')}"
782
+ ] = interface.get(interface_key)
783
+ if self.sync.parameters.get("dcim.macaddress"):
784
+ records["dcim.macaddress"].add(
785
+ self.create_new_data_record(
786
+ app="dcim", model="macaddress", data=interface
787
+ )
788
+ )
848
789
 
849
- self.logger.log_info("Preparing IP Addresses", obj=self.sync)
850
- for ip in data["ipaddress"]:
851
- # We get `nameOriginal` from Interface table to get human-readable name instead fo `intName`
852
- ip["nameOriginal"] = readable_int_names.get(
853
- f"{ip.get('sn')}_{ip.get('intName')}"
854
- )
855
- # Let's skip IPs we cannot assign to an interface
856
- if not ip["nameOriginal"]:
857
- continue
858
- ipaddress_record = self.create_new_data_record(
859
- app="ipam", model="ipaddress", data=ip
860
- )
861
- # Store whether this IP is primary for the device
862
- ipaddress_record.data["is_primary"] = ip.get(
863
- "sn"
864
- ) in device_primary_ips and device_primary_ips.get(
865
- ip.get("sn")
866
- ) == ipaddress_record.data.get(
867
- "ip"
868
- )
869
- records["ipaddress"].add(ipaddress_record)
790
+ if self.sync.parameters.get("ipam.ipaddress"):
791
+ self.logger.log_info("Preparing IP Addresses", obj=self.sync)
792
+ for ip in data.get("/technology/addressing/managed-ip/ipv4", []):
793
+ # We get `nameOriginal` from Interface table to get human-readable name instead fo `intName`
794
+ ip["nameOriginal"] = readable_int_names.get(
795
+ f"{ip.get('sn')}_{ip.get('intName')}"
796
+ )
797
+ # Let's skip IPs we cannot assign to an interface
798
+ if not ip["nameOriginal"]:
799
+ continue
800
+ ipaddress_record = self.create_new_data_record(
801
+ app="ipam", model="ipaddress", data=ip
802
+ )
803
+ # Store whether this IP is primary for the device
804
+ ipaddress_record.data["is_primary"] = ip.get(
805
+ "sn"
806
+ ) in device_primary_ips and device_primary_ips.get(
807
+ ip.get("sn")
808
+ ) == ipaddress_record.data.get(
809
+ "ip"
810
+ )
811
+ records["ipam.ipaddress"].add(ipaddress_record)
870
812
 
871
- for model, records_set in records.items():
872
- if self.settings.get(model) and len(records_set):
873
- self.logger.init_statistics(model, len(records_set))
813
+ for model_string, records_set in records.items():
814
+ if self.settings.get(model_string) and len(records_set):
815
+ self.logger.init_statistics(model_string, len(records_set))
874
816
  self.logger.log_info(
875
- f"Prepared {len(records_set)} items for `{model}` to be synced.",
817
+ f"Prepared {len(records_set)} items for `{model_string}` to be synced.",
876
818
  obj=self.sync,
877
819
  )
878
820
 
@@ -883,13 +825,8 @@ class IPFabricSyncRunner(object):
883
825
  self,
884
826
  record: DataRecord,
885
827
  stats: bool = True,
886
- sync: bool = False,
887
828
  ) -> ModelTypeVar | None:
888
829
  """Sync a single item to NetBox."""
889
- # The `sync` param is a workaround since we need to get some models (Device...) even when not syncing them.
890
- if not sync:
891
- return None
892
-
893
830
  if not record.data:
894
831
  return None
895
832
 
@@ -897,7 +834,7 @@ class IPFabricSyncRunner(object):
897
834
 
898
835
  # Only log when we successfully synced the item and asked for it
899
836
  if stats and instance:
900
- self.logger.increment_statistics(model=record.model)
837
+ self.logger.increment_statistics(model_string=record.model_string)
901
838
 
902
839
  return instance
903
840
 
@@ -909,11 +846,7 @@ class IPFabricSyncRunner(object):
909
846
  stats: bool = True,
910
847
  ) -> ModelTypeVar | None:
911
848
  """Sync a single item to NetBox."""
912
- synced_object = self.sync_model(
913
- record=record,
914
- sync=self.settings.get(record.model),
915
- stats=stats,
916
- )
849
+ synced_object = self.sync_model(record=record, stats=stats)
917
850
  if synced_object is None:
918
851
  return None
919
852
 
@@ -939,10 +872,10 @@ class IPFabricSyncRunner(object):
939
872
  if not items:
940
873
  return
941
874
 
942
- app, model = (lambda x: (x.app, x.model))(next(iter(items)))
943
- if not self.settings.get(model):
875
+ model_string = (lambda record: record.model_string)(next(iter(items)))
876
+ if not self.settings.get(model_string):
944
877
  self.logger.log_info(
945
- f"Did not ask to sync {model}s, skipping.", obj=self.sync
878
+ f"Did not ask to sync {model_string}s, skipping.", obj=self.sync
946
879
  )
947
880
  return
948
881
 
@@ -958,7 +891,7 @@ class IPFabricSyncRunner(object):
958
891
  ingestion: "IPFabricIngestion" = None,
959
892
  ) -> None:
960
893
  """Sync devices separately to handle resetting primary IP."""
961
- if not self.settings.get("device"):
894
+ if not self.settings.get("dcim.device"):
962
895
  self.logger.log_info(
963
896
  "Did not ask to sync devices, skipping.", obj=self.sync
964
897
  )
@@ -989,13 +922,13 @@ class IPFabricSyncRunner(object):
989
922
  exception=err,
990
923
  ingestion=self.ingestion,
991
924
  message="Error removing primary IP current device.",
992
- model=device.model,
925
+ model_string=device.model_string,
993
926
  data=device.data,
994
927
  )
995
928
  self.events_clearer.increment()
996
929
  raise IPAddressPrimaryRemovalError(
997
930
  data=device.data,
998
- model=device.model,
931
+ model_string=device.model_string,
999
932
  issue_id=issue.pk,
1000
933
  ) from err
1001
934
  self.events_clearer.increment()
@@ -1032,13 +965,13 @@ class IPFabricSyncRunner(object):
1032
965
  exception=err,
1033
966
  ingestion=self.ingestion,
1034
967
  message="Error removing primary IP from other device.",
1035
- model=ip_address.model,
968
+ model_string=ip_address.model_string,
1036
969
  data=ip_address.data,
1037
970
  )
1038
971
  self.events_clearer.increment()
1039
972
  raise IPAddressPrimaryRemovalError(
1040
973
  data=ip_address.data,
1041
- model=ip_address.model,
974
+ model_string=ip_address.model_string,
1042
975
  issue_id=issue.pk,
1043
976
  ) from err
1044
977
 
@@ -1063,13 +996,13 @@ class IPFabricSyncRunner(object):
1063
996
  exception=err,
1064
997
  ingestion=self.ingestion,
1065
998
  message="Error assigning primary IP to device.",
1066
- model=ip_address.model,
999
+ model_string=ip_address.model_string,
1067
1000
  data=ip_address.data,
1068
1001
  )
1069
1002
  self.events_clearer.increment()
1070
1003
  raise IPAddressPrimaryAssignmentError(
1071
1004
  data=ip_address.data,
1072
- model=ip_address.model,
1005
+ model_string=ip_address.model_string,
1073
1006
  issue_id=issue.pk,
1074
1007
  ) from err
1075
1008
  self.events_clearer.increment()
@@ -1086,7 +1019,7 @@ class IPFabricSyncRunner(object):
1086
1019
  Cleaning events queue happens during each cycle to make sure all required
1087
1020
  operations (primary IP assignment) happen during the same batch.
1088
1021
  """
1089
- if not self.settings.get("ipaddress"):
1022
+ if not self.settings.get("ipam.ipaddress"):
1090
1023
  self.logger.log_info(
1091
1024
  "Did not ask to sync ipaddresses, skipping.", obj=self.sync
1092
1025
  )
@@ -1102,40 +1035,45 @@ class IPFabricSyncRunner(object):
1102
1035
  records = self.preprocess_data(data=data)
1103
1036
 
1104
1037
  self.logger.log_info("Starting data sync.", obj=self.sync)
1105
- self.sync_items(
1106
- items=records["site"],
1107
- cf=self.sync.update_custom_fields,
1108
- ingestion=ingestion,
1109
- )
1110
- self.sync_items(items=records["manufacturer"])
1111
- self.sync_items(items=records["devicetype"])
1112
- self.sync_items(items=records["platform"])
1113
- self.sync_items(items=records["devicerole"])
1038
+ try:
1039
+ # This signal does not call for snapshot(), causing issue with branching plugin
1040
+ signals.post_save.disconnect(sync_cached_scope_fields, sender=Site)
1041
+ self.sync_items(
1042
+ items=records["dcim.site"],
1043
+ cf=self.sync.update_custom_fields,
1044
+ ingestion=ingestion,
1045
+ )
1046
+ finally:
1047
+ signals.post_save.connect(sync_cached_scope_fields, sender=Site)
1048
+ self.sync_items(items=records["dcim.manufacturer"])
1049
+ self.sync_items(items=records["dcim.devicetype"])
1050
+ self.sync_items(items=records["dcim.platform"])
1051
+ self.sync_items(items=records["dcim.devicerole"])
1114
1052
  try:
1115
1053
  # This signal does not call for snapshot(), causing issue with branching plugin
1116
1054
  signals.post_save.disconnect(
1117
1055
  assign_virtualchassis_master, sender=VirtualChassis
1118
1056
  )
1119
- self.sync_items(items=records["virtualchassis"])
1057
+ self.sync_items(items=records["dcim.virtualchassis"])
1120
1058
  self.sync_devices(
1121
- devices=records["device"],
1059
+ devices=records["dcim.device"],
1122
1060
  cf=self.sync.update_custom_fields,
1123
1061
  ingestion=ingestion,
1124
1062
  )
1125
1063
  # The Device exists now, so we can update the master of the VC.
1126
1064
  # The logic is handled in transform maps.
1127
- self.sync_items(items=records["virtualchassis"], stats=False)
1065
+ self.sync_items(items=records["dcim.virtualchassis"], stats=False)
1128
1066
  finally:
1129
1067
  signals.post_save.connect(
1130
1068
  assign_virtualchassis_master, sender=VirtualChassis
1131
1069
  )
1132
- self.sync_items(items=records["interface"])
1133
- self.sync_items(items=records["macaddress"])
1134
- self.sync_items(items=records["inventoryitem"])
1135
- self.sync_items(items=records["vlan"])
1136
- self.sync_items(items=records["vrf"])
1137
- self.sync_items(items=records["prefix"])
1138
- self.sync_ip_addresses(ip_addresses=records["ipaddress"])
1070
+ self.sync_items(items=records["dcim.interface"])
1071
+ self.sync_items(items=records["dcim.macaddress"])
1072
+ self.sync_items(items=records["dcim.inventoryitem"])
1073
+ self.sync_items(items=records["ipam.vlan"])
1074
+ self.sync_items(items=records["ipam.vrf"])
1075
+ self.sync_items(items=records["ipam.prefix"])
1076
+ self.sync_ip_addresses(ip_addresses=records["ipam.ipaddress"])
1139
1077
 
1140
1078
  # Make sure to clean queue (and memory) at the end
1141
1079
  self.events_clearer.clear()