ipfabric_netbox 4.3.2b9__py3-none-any.whl → 4.3.2b11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipfabric_netbox might be problematic. Click here for more details.

Files changed (50) hide show
  1. ipfabric_netbox/__init__.py +1 -1
  2. ipfabric_netbox/api/serializers.py +112 -7
  3. ipfabric_netbox/api/urls.py +6 -0
  4. ipfabric_netbox/api/views.py +23 -0
  5. ipfabric_netbox/choices.py +74 -40
  6. ipfabric_netbox/data/endpoint.json +52 -0
  7. ipfabric_netbox/data/filters.json +51 -0
  8. ipfabric_netbox/data/transform_map.json +190 -176
  9. ipfabric_netbox/exceptions.py +7 -5
  10. ipfabric_netbox/filtersets.py +310 -41
  11. ipfabric_netbox/forms.py +330 -80
  12. ipfabric_netbox/graphql/__init__.py +6 -0
  13. ipfabric_netbox/graphql/enums.py +5 -5
  14. ipfabric_netbox/graphql/filters.py +56 -4
  15. ipfabric_netbox/graphql/schema.py +28 -0
  16. ipfabric_netbox/graphql/types.py +61 -1
  17. ipfabric_netbox/jobs.py +12 -1
  18. ipfabric_netbox/migrations/0022_prepare_for_filters.py +182 -0
  19. ipfabric_netbox/migrations/0023_populate_filters_data.py +303 -0
  20. ipfabric_netbox/migrations/0024_finish_filters.py +29 -0
  21. ipfabric_netbox/migrations/0025_add_vss_chassis_endpoint.py +166 -0
  22. ipfabric_netbox/models.py +432 -17
  23. ipfabric_netbox/navigation.py +98 -24
  24. ipfabric_netbox/tables.py +194 -9
  25. ipfabric_netbox/templates/ipfabric_netbox/htmx_list.html +5 -0
  26. ipfabric_netbox/templates/ipfabric_netbox/inc/combined_expressions.html +59 -0
  27. ipfabric_netbox/templates/ipfabric_netbox/inc/combined_expressions_content.html +39 -0
  28. ipfabric_netbox/templates/ipfabric_netbox/inc/endpoint_filters_with_selector.html +54 -0
  29. ipfabric_netbox/templates/ipfabric_netbox/ipfabricendpoint.html +39 -0
  30. ipfabric_netbox/templates/ipfabric_netbox/ipfabricfilter.html +51 -0
  31. ipfabric_netbox/templates/ipfabric_netbox/ipfabricfilterexpression.html +39 -0
  32. ipfabric_netbox/templates/ipfabric_netbox/ipfabricfilterexpression_edit.html +150 -0
  33. ipfabric_netbox/templates/ipfabric_netbox/ipfabricsync.html +1 -1
  34. ipfabric_netbox/templates/ipfabric_netbox/ipfabrictransformmap.html +16 -2
  35. ipfabric_netbox/templatetags/ipfabric_netbox_helpers.py +68 -0
  36. ipfabric_netbox/tests/api/test_api.py +333 -13
  37. ipfabric_netbox/tests/test_filtersets.py +2592 -0
  38. ipfabric_netbox/tests/test_forms.py +1349 -74
  39. ipfabric_netbox/tests/test_models.py +242 -34
  40. ipfabric_netbox/tests/test_views.py +2031 -26
  41. ipfabric_netbox/urls.py +35 -0
  42. ipfabric_netbox/utilities/endpoint.py +83 -0
  43. ipfabric_netbox/utilities/filters.py +88 -0
  44. ipfabric_netbox/utilities/ipfutils.py +393 -377
  45. ipfabric_netbox/utilities/logging.py +7 -7
  46. ipfabric_netbox/utilities/transform_map.py +144 -5
  47. ipfabric_netbox/views.py +719 -5
  48. {ipfabric_netbox-4.3.2b9.dist-info → ipfabric_netbox-4.3.2b11.dist-info}/METADATA +2 -2
  49. {ipfabric_netbox-4.3.2b9.dist-info → ipfabric_netbox-4.3.2b11.dist-info}/RECORD +50 -33
  50. {ipfabric_netbox-4.3.2b9.dist-info → ipfabric_netbox-4.3.2b11.dist-info}/WHEEL +1 -1
ipfabric_netbox/models.py CHANGED
@@ -24,7 +24,10 @@ from django.core.validators import MinValueValidator
24
24
  from django.db import models
25
25
  from django.db import transaction
26
26
  from django.db.models import Q
27
+ from django.db.models import QuerySet
27
28
  from django.db.models import signals
29
+ from django.db.models.signals import m2m_changed
30
+ from django.dispatch import receiver
28
31
  from django.urls import reverse
29
32
  from django.utils import timezone
30
33
  from django.utils.module_loading import import_string
@@ -42,19 +45,20 @@ from netbox_branching.utilities import supports_branching
42
45
  from utilities.querysets import RestrictedQuerySet
43
46
  from utilities.request import NetBoxFakeRequest
44
47
 
48
+ from .choices import IPFabricEndpointChoices
49
+ from .choices import IPFabricFilterTypeChoices
45
50
  from .choices import IPFabricRawDataTypeChoices
46
51
  from .choices import IPFabricSnapshotStatusModelChoices
47
52
  from .choices import IPFabricSourceStatusChoices
48
53
  from .choices import IPFabricSourceTypeChoices
49
54
  from .choices import IPFabricSyncStatusChoices
50
- from .choices import IPFabricTransformMapSourceModelChoices
51
55
  from .choices import required_transform_map_contenttypes
52
56
  from .signals import assign_primary_mac_address
53
57
  from .utilities.ipfutils import IPFabric
54
58
  from .utilities.ipfutils import IPFabricSyncRunner
55
59
  from .utilities.ipfutils import render_jinja2
56
60
  from .utilities.logging import SyncLogging
57
-
61
+ from .utilities.transform_map import has_cycle_dfs
58
62
 
59
63
  logger = logging.getLogger("ipfabric_netbox.models")
60
64
 
@@ -98,6 +102,125 @@ IPFabricRelationshipFieldSourceModels = Q(
98
102
  )
99
103
 
100
104
 
105
+ class IPFabricEndpoint(NetBoxModel):
106
+ objects = RestrictedQuerySet.as_manager()
107
+
108
+ name = models.CharField(max_length=100, unique=True)
109
+ description = models.TextField(blank=True, null=True)
110
+ endpoint = models.CharField(
111
+ max_length=200,
112
+ verbose_name=_(
113
+ "Endpoint path from URL notation, for example `/inventory/devices`."
114
+ ),
115
+ choices=IPFabricEndpointChoices,
116
+ unique=True,
117
+ )
118
+
119
+ class Meta:
120
+ ordering = ("pk",)
121
+ verbose_name = _("IP Fabric Endpoint")
122
+ verbose_name_plural = _("IP Fabric Endpoints")
123
+
124
+ def __str__(self):
125
+ return f"{self.endpoint}"
126
+
127
+ def get_absolute_url(self):
128
+ return reverse("plugins:ipfabric_netbox:ipfabricendpoint", args=[self.pk])
129
+
130
+ def save(self, *args, **kwargs):
131
+ super().save(*args, **kwargs)
132
+ if not self.endpoint.startswith("/"):
133
+ self.endpoint = f"/{self.endpoint}"
134
+ if self.endpoint.endswith("/"):
135
+ self.endpoint = self.endpoint.rstrip("/")
136
+
137
+ @staticmethod
138
+ def _merge_filter_structures(base: dict, new: dict) -> dict:
139
+ """Recursively merge filter structures with matching and/or keys at same level.
140
+
141
+ Args:
142
+ base: Base filter dictionary to merge into
143
+ new: New filter dictionary to merge from
144
+
145
+ Returns:
146
+ Merged filter dictionary
147
+ """
148
+ for key, value in new.items():
149
+ # Only merge 'and' and 'or' keys
150
+ if key not in ("and", "or") or not isinstance(value, list):
151
+ continue
152
+
153
+ if key not in base:
154
+ base[key] = []
155
+
156
+ # Process each item in the new filter's array
157
+ for new_item in value:
158
+ if not isinstance(new_item, dict):
159
+ # Non-dict items just get appended
160
+ base[key].append(new_item)
161
+ continue
162
+
163
+ # Check if there's a matching structure in base to merge with
164
+ merged = False
165
+ for base_item in base[key]:
166
+ if not isinstance(base_item, dict):
167
+ continue
168
+
169
+ # Check if both dicts have the same and/or keys
170
+ new_keys = set(k for k in new_item.keys() if k in ("and", "or"))
171
+ base_keys = set(k for k in base_item.keys() if k in ("and", "or"))
172
+
173
+ if new_keys == base_keys and new_keys:
174
+ # Matching structure found - recursively merge
175
+ IPFabricEndpoint._merge_filter_structures(base_item, new_item)
176
+ merged = True
177
+ break
178
+
179
+ if not merged:
180
+ # No matching structure found - append as new item
181
+ base[key].append(new_item)
182
+
183
+ return base
184
+
185
+ def combine_filters(self, sync=None) -> dict:
186
+ """Combine all filters for this endpoint into a single filter dictionary.
187
+
188
+ Args:
189
+ sync: Optional IPFabricSync to filter by. If provided, only filters
190
+ associated with that sync are included.
191
+
192
+ Returns:
193
+ Dict with filter types as keys (e.g., 'and', 'or') and lists of
194
+ expressions as values.
195
+ """
196
+ combined_filter = {}
197
+
198
+ # Get filters for this endpoint, optionally filtered by sync
199
+ if sync:
200
+ endpoint_filters = self.filters.filter(syncs=sync)
201
+ else:
202
+ endpoint_filters = self.filters.all()
203
+
204
+ for endpoint_filter in endpoint_filters:
205
+ filter_expressions = endpoint_filter.merge_expressions()
206
+
207
+ # Create a temporary dict with the filter type as key
208
+ new_filter = {endpoint_filter.filter_type: filter_expressions}
209
+
210
+ # Recursively merge the new filter into combined_filter
211
+ combined_filter = self._merge_filter_structures(combined_filter, new_filter)
212
+
213
+ # Sites filter is stored in sync parameters for user convenience
214
+ if sync and (sites := (sync.parameters or {}).get("sites")):
215
+ if "and" not in combined_filter:
216
+ combined_filter["and"] = []
217
+ combined_filter["and"].extend(
218
+ [{"or": [{"siteName": ["eq", site]} for site in sites]}]
219
+ )
220
+
221
+ return combined_filter
222
+
223
+
101
224
  class IPFabricTransformMapGroup(NetBoxModel):
102
225
  name = models.CharField(max_length=100, unique=True)
103
226
  description = models.TextField(blank=True, null=True)
@@ -117,9 +240,12 @@ class IPFabricTransformMapGroup(NetBoxModel):
117
240
 
118
241
 
119
242
  class IPFabricTransformMap(NetBoxModel):
120
- name = models.CharField(max_length=100)
121
- source_model = models.CharField(
122
- max_length=50, choices=IPFabricTransformMapSourceModelChoices
243
+ name = models.CharField(max_length=200)
244
+ source_endpoint = models.ForeignKey(
245
+ to=IPFabricEndpoint,
246
+ on_delete=models.PROTECT,
247
+ related_name="transform_maps",
248
+ editable=True,
123
249
  )
124
250
  target_model = models.ForeignKey(
125
251
  to=ContentType,
@@ -138,6 +264,15 @@ class IPFabricTransformMap(NetBoxModel):
138
264
  blank=True,
139
265
  null=True,
140
266
  )
267
+ parents = models.ManyToManyField(
268
+ "self",
269
+ symmetrical=False,
270
+ blank=True,
271
+ related_name="children",
272
+ help_text=_(
273
+ "Parent transform maps, for hierarchical organization during sync."
274
+ ),
275
+ )
141
276
 
142
277
  class Meta:
143
278
  ordering = ("pk",)
@@ -145,10 +280,12 @@ class IPFabricTransformMap(NetBoxModel):
145
280
  verbose_name_plural = _("IP Fabric Transform Maps")
146
281
 
147
282
  def __str__(self):
148
- if self.source_model and self.target_model:
149
- return f"{self.source_model} - {self.target_model}"
150
- else:
151
- return "Transform Map"
283
+ try:
284
+ if self.source_endpoint and self.target_model:
285
+ return f"{self.source_endpoint} - {self.target_model}"
286
+ except (AttributeError, IPFabricEndpoint.DoesNotExist):
287
+ pass
288
+ return f"Transform Map: {self.name}" if self.name else "Transform Map"
152
289
 
153
290
  def get_absolute_url(self):
154
291
  return reverse("plugins:ipfabric_netbox:ipfabrictransformmap", args=[self.pk])
@@ -163,23 +300,51 @@ class IPFabricTransformMap(NetBoxModel):
163
300
  qs = IPFabricTransformMap.objects.filter(
164
301
  group=self.group,
165
302
  target_model_id=self.target_model_id,
303
+ source_endpoint_id=self.source_endpoint_id,
166
304
  )
167
305
  if self.pk:
168
306
  qs = qs.exclude(pk=self.pk)
169
307
  if qs.exists():
170
308
  err_msg = _(
171
- f"A transform map with group '{self.group}' and target model '{self.target_model}' already exists."
309
+ f"A transform map with group '{self.group}', target model '{self.target_model}', and source endpoint '{self.source_endpoint}' already exists."
172
310
  )
173
311
  raise ValidationError(
174
312
  {
175
313
  "group": err_msg,
176
314
  "target_model": err_msg,
315
+ "source_endpoint": err_msg,
177
316
  }
178
317
  )
318
+
319
+ # Validate no circular dependencies (only if saved and has parents)
320
+ if self.pk:
321
+ self._validate_no_circular_dependency()
322
+
179
323
  return cleaned_data
180
324
 
325
+ def _validate_no_circular_dependency(self):
326
+ """
327
+ Check if the current parent relationships create a circular dependency.
328
+ Uses DFS to detect cycles in the directed graph.
329
+ """
330
+
331
+ def get_parents(node_id: int, parent_override: list | None) -> models.QuerySet:
332
+ """Get parents for a node."""
333
+ node = IPFabricTransformMap.objects.get(pk=node_id)
334
+ return node.parents.all()
335
+
336
+ if has_cycle_dfs(self.pk, get_parents):
337
+ raise ValidationError(
338
+ {
339
+ "parents": _(
340
+ "The selected parents create a circular dependency. "
341
+ "A transform map cannot be an ancestor of itself."
342
+ )
343
+ }
344
+ )
345
+
181
346
  @functools.cache
182
- def get_models(self):
347
+ def get_all_models(self):
183
348
  _context = dict()
184
349
 
185
350
  for app, app_models in apps.all_models.items():
@@ -194,6 +359,13 @@ class IPFabricTransformMap(NetBoxModel):
194
359
  _context["contenttypes"]["ContentType"] = ContentType
195
360
  return _context
196
361
 
362
+ @classmethod
363
+ def get_distinct_target_models(cls) -> QuerySet[ContentType]:
364
+ target_model_ids = IPFabricTransformMap.objects.values_list(
365
+ "target_model", flat=True
366
+ ).distinct()
367
+ return ContentType.objects.filter(id__in=target_model_ids)
368
+
197
369
  def build_relationships(self, source_data):
198
370
  relationship_maps = self.relationship_maps.all()
199
371
  rel_dict = {}
@@ -205,7 +377,7 @@ class IPFabricTransformMap(NetBoxModel):
205
377
  context = {
206
378
  "object": source_data,
207
379
  }
208
- context.update(self.get_models())
380
+ context.update(self.get_all_models())
209
381
  text = render_jinja2(field.template, context).strip()
210
382
  if text:
211
383
  try:
@@ -242,7 +414,9 @@ class IPFabricTransformMap(NetBoxModel):
242
414
  keys.update(
243
415
  re.findall(r"object\.([a-zA-Z_0-9]+)(?=.*)", field.template)
244
416
  )
245
- return {k: source_data[k] for k in keys}
417
+ # FIXME: Make it raise KeyError when key is missing during IN-68
418
+ # This is temporary hack to allow missing keys when syncing VSS
419
+ return {k: source_data.get(k) for k in keys}
246
420
 
247
421
  def get_context(self, source_data):
248
422
  new_data = deepcopy(source_data)
@@ -306,7 +480,7 @@ class IPFabricTransformMap(NetBoxModel):
306
480
  "object": source_data,
307
481
  field.source_field: source_data[field.source_field],
308
482
  }
309
- context.update(self.get_models())
483
+ context.update(self.get_all_models())
310
484
  text = render_jinja2(field.template, context).strip()
311
485
  else:
312
486
  text = source_data[field.source_field]
@@ -717,20 +891,92 @@ class IPFabricSync(IPFabricClient, JobsMixin, TagsMixin, ChangeLoggedModel):
717
891
  """
718
892
  Returns a queryset of IPFabricTransformMap objects that would be used by this sync,
719
893
  following group and default precedence logic.
894
+
895
+ Transform maps are unique by (target_model, source_endpoint) combination.
896
+ Groups have precedence: later groups override earlier groups and defaults.
720
897
  """
721
898
  default_maps = IPFabricTransformMap.objects.filter(group__isnull=True)
722
899
  group_ids = group_ids or []
723
- maps_by_target = {tm.target_model_id: tm for tm in default_maps}
900
+
901
+ # Use composite key: (target_model_id, source_endpoint_id)
902
+ maps_by_composite_key = {
903
+ (tm.target_model_id, tm.source_endpoint_id): tm for tm in default_maps
904
+ }
905
+
724
906
  # Replace default maps with the ones from the groups, in given order.
725
907
  if group_ids:
726
908
  for group_id in group_ids:
727
909
  group_maps = IPFabricTransformMap.objects.filter(group_id=group_id)
728
910
  for tm in group_maps:
729
- maps_by_target[tm.target_model_id] = tm
911
+ maps_by_composite_key[
912
+ (tm.target_model_id, tm.source_endpoint_id)
913
+ ] = tm
914
+
730
915
  return IPFabricTransformMap.objects.filter(
731
- pk__in=[tm.pk for tm in maps_by_target.values()]
916
+ pk__in=[tm.pk for tm in maps_by_composite_key.values()]
732
917
  )
733
918
 
919
+ @classmethod
920
+ def get_model_hierarchy(cls, group_ids=None) -> list["IPFabricTransformMap"]:
921
+ """
922
+ Get transform maps in hierarchical order based on parent relationships.
923
+ Uses topological sort (Kahn's algorithm) to support multiple parents.
924
+ Transform maps without parents come first, then their children, etc.
925
+
926
+ Example: IP Address transform map has parents [Interface, VRF], so it will only be
927
+ processed after both Interface AND VRF transform maps have been processed.
928
+
929
+ Returns list of transform maps ordered by dependencies.
930
+ """
931
+ maps = cls.get_transform_maps(group_ids)
932
+
933
+ # Build adjacency list and in-degree count using transform map IDs
934
+ graph = {} # parent_tm_id -> [child_tm_id, ...]
935
+ in_degree = {} # tm_id -> count of unprocessed parents
936
+ tm_by_id = {} # tm_id -> transform_map
937
+
938
+ for transform_map in maps:
939
+ tm_id = transform_map.id
940
+ tm_by_id[tm_id] = transform_map
941
+
942
+ # Get all parents for this transform map
943
+ parent_maps = transform_map.parents.all()
944
+
945
+ # Set in-degree (number of parents)
946
+ in_degree[tm_id] = parent_maps.count()
947
+
948
+ # Build adjacency list (parent -> children)
949
+ for parent_map in parent_maps:
950
+ parent_id = parent_map.id
951
+ graph.setdefault(parent_id, []).append(tm_id)
952
+
953
+ # Topological sort using Kahn's algorithm (BFS-based)
954
+ queue = [tm_id for tm_id, degree in in_degree.items() if degree == 0]
955
+ ordered = []
956
+
957
+ while queue:
958
+ # Pop from front to maintain BFS/level-order
959
+ current_tm_id = queue.pop(0)
960
+ ordered.append(current_tm_id)
961
+
962
+ # Reduce in-degree for all children
963
+ for child_tm_id in graph.get(current_tm_id, []):
964
+ in_degree[child_tm_id] -= 1
965
+ if in_degree[child_tm_id] == 0:
966
+ queue.append(child_tm_id)
967
+
968
+ # Check for circular dependencies
969
+ if len(ordered) != len(in_degree):
970
+ unprocessed_ids = set(in_degree.keys()) - set(ordered)
971
+ unprocessed_maps = [tm_by_id[tm_id] for tm_id in unprocessed_ids]
972
+ raise ValidationError(
973
+ f"Circular dependency detected in transform map hierarchy. "
974
+ f"Unprocessed maps: {', '.join(str(tm) for tm in unprocessed_maps)}"
975
+ )
976
+
977
+ # Return ordered list of transform maps
978
+ return [tm_by_id[tm_id] for tm_id in ordered]
979
+
734
980
  def delete_scheduled_jobs(self) -> None:
735
981
  Job.objects.filter(
736
982
  object_type=ObjectType.objects.get_for_model(self),
@@ -1012,6 +1258,30 @@ class IPFabricIngestion(JobsMixin, models.Model):
1012
1258
  statistics[model] = stats["current"] / stats["total"] * 100
1013
1259
  else:
1014
1260
  statistics[model] = stats["current"] / 1 * 100
1261
+
1262
+ # Sort statistics according to transform map hierarchy
1263
+ # This ensures consistent ordering in the progress display matching sync order
1264
+ try:
1265
+ group_ids = self.sync.parameters.get("groups", [])
1266
+ transform_maps = self.sync.get_model_hierarchy(group_ids=group_ids)
1267
+
1268
+ # Create ordered dict following the hierarchy
1269
+ ordered_statistics = {}
1270
+ for transform_map in transform_maps:
1271
+ model_string = f"{transform_map.target_model.app_label}.{transform_map.target_model.model}"
1272
+ if model_string in statistics:
1273
+ ordered_statistics[model_string] = statistics[model_string]
1274
+
1275
+ # Add any remaining statistics that weren't in the hierarchy
1276
+ for model_string, value in statistics.items():
1277
+ if model_string not in ordered_statistics:
1278
+ ordered_statistics[model_string] = value
1279
+
1280
+ statistics = ordered_statistics
1281
+ except Exception:
1282
+ # If hierarchy ordering fails, fall back to alphabetical sorting
1283
+ statistics = dict(sorted(statistics.items()))
1284
+
1015
1285
  return {"job_results": job_results, "statistics": statistics}
1016
1286
 
1017
1287
  def sync_merge(self):
@@ -1081,3 +1351,148 @@ class IPFabricData(models.Model):
1081
1351
 
1082
1352
  def get_absolute_url(self):
1083
1353
  return reverse("plugins:ipfabric_netbox:ipfabricdata_data", args=[self.pk])
1354
+
1355
+
1356
+ class IPFabricFilter(NetBoxModel):
1357
+ objects = RestrictedQuerySet.as_manager()
1358
+
1359
+ name = models.CharField(max_length=100, unique=True)
1360
+ description = models.TextField(blank=True, null=True)
1361
+ endpoints = models.ManyToManyField(
1362
+ to=IPFabricEndpoint,
1363
+ related_name="filters",
1364
+ editable=True,
1365
+ default=None,
1366
+ blank=True,
1367
+ )
1368
+ filter_type = models.CharField(
1369
+ max_length=10, choices=IPFabricFilterTypeChoices, verbose_name=_("Filter Type")
1370
+ )
1371
+ syncs = models.ManyToManyField(
1372
+ to=IPFabricSync,
1373
+ related_name="filters",
1374
+ editable=True,
1375
+ default=None,
1376
+ blank=True,
1377
+ )
1378
+
1379
+ class Meta:
1380
+ ordering = ("pk",)
1381
+ verbose_name = _("IP Fabric Filter")
1382
+ verbose_name_plural = _("IP Fabric Filters")
1383
+
1384
+ def __str__(self):
1385
+ return self.name
1386
+
1387
+ def get_absolute_url(self):
1388
+ return reverse("plugins:ipfabric_netbox:ipfabricfilter", args=[self.pk])
1389
+
1390
+ def merge_expressions(self) -> list[dict]:
1391
+ """Merge all linked Expressions into a single filter expression."""
1392
+ merged_expression = []
1393
+ for expression in self.expressions.all():
1394
+ merged_expression.extend(expression.expression)
1395
+ return merged_expression
1396
+
1397
+
1398
+ class IPFabricFilterExpression(NetBoxModel):
1399
+ objects = RestrictedQuerySet.as_manager()
1400
+
1401
+ name = models.CharField(max_length=100, unique=True)
1402
+ description = models.TextField(blank=True, null=True)
1403
+ expression = models.JSONField(
1404
+ blank=False,
1405
+ null=False,
1406
+ default=list,
1407
+ verbose_name=_("IP Fabric Filter Expression JSON"),
1408
+ help_text=_(
1409
+ "JSON filter for API call to IPF, can be obtained from IPF UI call via browser developer console."
1410
+ ),
1411
+ )
1412
+ filters = models.ManyToManyField(
1413
+ to=IPFabricFilter,
1414
+ related_name="expressions",
1415
+ editable=True,
1416
+ )
1417
+
1418
+ class Meta:
1419
+ ordering = ("pk",)
1420
+ verbose_name = _("IP Fabric Filter Expression")
1421
+ verbose_name_plural = _("IP Fabric Filter Expressions")
1422
+
1423
+ def __str__(self):
1424
+ return self.name
1425
+
1426
+ def get_absolute_url(self):
1427
+ return reverse(
1428
+ "plugins:ipfabric_netbox:ipfabricfilterexpression", args=[self.pk]
1429
+ )
1430
+
1431
+ def clean(self):
1432
+ super().clean()
1433
+
1434
+ # Validate that expression is a list of dictionaries
1435
+ if self.expression is None:
1436
+ raise ValidationError({"expression": _("Filter Expression is required.")})
1437
+
1438
+ if not isinstance(self.expression, list):
1439
+ raise ValidationError(
1440
+ {
1441
+ "expression": _("Expression must be a list. Got: %(type)s")
1442
+ % {"type": type(self.expression).__name__}
1443
+ }
1444
+ )
1445
+
1446
+ if not self.expression:
1447
+ raise ValidationError(
1448
+ {"expression": _("Expression cannot be an empty list.")}
1449
+ )
1450
+
1451
+ for idx, item in enumerate(self.expression):
1452
+ if not isinstance(item, dict):
1453
+ raise ValidationError(
1454
+ {
1455
+ "expression": _(
1456
+ "Expression item at index %(index)d must be a dictionary. Got: %(type)s"
1457
+ )
1458
+ % {"index": idx, "type": type(item).__name__}
1459
+ }
1460
+ )
1461
+
1462
+
1463
+ @receiver(m2m_changed, sender=IPFabricTransformMap.parents.through)
1464
+ def validate_circular_dependency_on_m2m_change(
1465
+ sender, instance, action, pk_set, **kwargs
1466
+ ):
1467
+ """
1468
+ Validate circular dependencies when parent M2M relationships are modified.
1469
+ This catches changes made through the API or programmatically.
1470
+ """
1471
+ if action == "pre_add" and pk_set:
1472
+ # Simulate what the parents would be after this add operation
1473
+ current_parent_ids = set(instance.parents.values_list("pk", flat=True))
1474
+ future_parent_ids = current_parent_ids | pk_set
1475
+
1476
+ # Get the actual parent objects
1477
+ future_parents = IPFabricTransformMap.objects.filter(pk__in=future_parent_ids)
1478
+
1479
+ # Run cycle detection with the future parent set
1480
+ def get_parents(
1481
+ node_id: int, parent_override: models.QuerySet | None
1482
+ ) -> models.QuerySet:
1483
+ """Get parents for a node, with optional override for the instance being modified."""
1484
+ if node_id == instance.pk and parent_override is not None:
1485
+ # Use the future parents for the current node
1486
+ return parent_override
1487
+ else:
1488
+ # Use existing parents for other nodes
1489
+ node = IPFabricTransformMap.objects.get(pk=node_id)
1490
+ return node.parents.all()
1491
+
1492
+ if has_cycle_dfs(instance.pk, get_parents, parent_override=future_parents):
1493
+ raise ValidationError(
1494
+ _(
1495
+ "Cannot add these parents: circular dependency detected. "
1496
+ "A transform map cannot be an ancestor of itself."
1497
+ )
1498
+ )