ipfabric_netbox 3.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ipfabric_netbox might be problematic. Click here for more details.

Files changed (59) hide show
  1. ipfabric_netbox/__init__.py +42 -0
  2. ipfabric_netbox/api/__init__.py +2 -0
  3. ipfabric_netbox/api/nested_serializers.py +99 -0
  4. ipfabric_netbox/api/serializers.py +160 -0
  5. ipfabric_netbox/api/urls.py +21 -0
  6. ipfabric_netbox/api/views.py +111 -0
  7. ipfabric_netbox/choices.py +226 -0
  8. ipfabric_netbox/filtersets.py +125 -0
  9. ipfabric_netbox/forms.py +1063 -0
  10. ipfabric_netbox/jobs.py +95 -0
  11. ipfabric_netbox/migrations/0001_initial.py +342 -0
  12. ipfabric_netbox/migrations/0002_ipfabricsnapshot_status.py +17 -0
  13. ipfabric_netbox/migrations/0003_ipfabricsource_type_and_more.py +49 -0
  14. ipfabric_netbox/migrations/0004_ipfabricsync_auto_merge.py +17 -0
  15. ipfabric_netbox/migrations/0005_alter_ipfabricrelationshipfield_source_model_and_more.py +64 -0
  16. ipfabric_netbox/migrations/0006_alter_ipfabrictransformmap_target_model.py +48 -0
  17. ipfabric_netbox/migrations/__init__.py +0 -0
  18. ipfabric_netbox/models.py +874 -0
  19. ipfabric_netbox/navigation.py +62 -0
  20. ipfabric_netbox/signals.py +68 -0
  21. ipfabric_netbox/tables.py +208 -0
  22. ipfabric_netbox/template_content.py +13 -0
  23. ipfabric_netbox/templates/ipfabric_netbox/inc/diff.html +72 -0
  24. ipfabric_netbox/templates/ipfabric_netbox/inc/json.html +20 -0
  25. ipfabric_netbox/templates/ipfabric_netbox/inc/logs_pending.html +6 -0
  26. ipfabric_netbox/templates/ipfabric_netbox/inc/merge_form.html +22 -0
  27. ipfabric_netbox/templates/ipfabric_netbox/inc/site_topology_button.html +70 -0
  28. ipfabric_netbox/templates/ipfabric_netbox/inc/site_topology_modal.html +61 -0
  29. ipfabric_netbox/templates/ipfabric_netbox/inc/snapshotdata.html +60 -0
  30. ipfabric_netbox/templates/ipfabric_netbox/inc/sync_delete.html +19 -0
  31. ipfabric_netbox/templates/ipfabric_netbox/inc/transform_map_field_map.html +11 -0
  32. ipfabric_netbox/templates/ipfabric_netbox/inc/transform_map_relationship_map.html +11 -0
  33. ipfabric_netbox/templates/ipfabric_netbox/ipfabric_table.html +55 -0
  34. ipfabric_netbox/templates/ipfabric_netbox/ipfabricbranch.html +141 -0
  35. ipfabric_netbox/templates/ipfabric_netbox/ipfabricsnapshot.html +105 -0
  36. ipfabric_netbox/templates/ipfabric_netbox/ipfabricsource.html +111 -0
  37. ipfabric_netbox/templates/ipfabric_netbox/ipfabricsync.html +103 -0
  38. ipfabric_netbox/templates/ipfabric_netbox/ipfabrictransformmap.html +41 -0
  39. ipfabric_netbox/templates/ipfabric_netbox/ipfabrictransformmap_list.html +17 -0
  40. ipfabric_netbox/templates/ipfabric_netbox/ipfabrictransformmap_restore.html +59 -0
  41. ipfabric_netbox/templates/ipfabric_netbox/partials/branch_all.html +10 -0
  42. ipfabric_netbox/templates/ipfabric_netbox/partials/branch_progress.html +19 -0
  43. ipfabric_netbox/templates/ipfabric_netbox/partials/branch_status.html +1 -0
  44. ipfabric_netbox/templates/ipfabric_netbox/partials/job_logs.html +53 -0
  45. ipfabric_netbox/templates/ipfabric_netbox/partials/sync_last_branch.html +1 -0
  46. ipfabric_netbox/templates/ipfabric_netbox/sync_list.html +126 -0
  47. ipfabric_netbox/templates/static/ipfabric_netbox/css/rack.css +9 -0
  48. ipfabric_netbox/tests/__init__.py +0 -0
  49. ipfabric_netbox/tests/test_models.py +1340 -0
  50. ipfabric_netbox/urls.py +141 -0
  51. ipfabric_netbox/utilities/__init__.py +0 -0
  52. ipfabric_netbox/utilities/ipfutils.py +591 -0
  53. ipfabric_netbox/utilities/logging.py +93 -0
  54. ipfabric_netbox/utilities/nbutils.py +105 -0
  55. ipfabric_netbox/utilities/transform_map.py +35 -0
  56. ipfabric_netbox/views.py +845 -0
  57. ipfabric_netbox-3.1.2.dist-info/METADATA +88 -0
  58. ipfabric_netbox-3.1.2.dist-info/RECORD +59 -0
  59. ipfabric_netbox-3.1.2.dist-info/WHEEL +4 -0
@@ -0,0 +1,874 @@
1
+ import ast
2
+ import json
3
+ import logging
4
+ import traceback
5
+ from copy import deepcopy
6
+
7
+ import httpx
8
+ from core.choices import DataSourceStatusChoices
9
+ from core.exceptions import SyncError
10
+ from core.models import Job
11
+ from core.signals import handle_deleted_object
12
+ from core.signals import pre_sync
13
+ from dcim.models import VirtualChassis
14
+ from dcim.signals import assign_virtualchassis_master
15
+ from django.apps import apps
16
+ from django.conf import settings
17
+ from django.contrib.contenttypes.models import ContentType
18
+ from django.core.cache import cache
19
+ from django.core.validators import MinValueValidator
20
+ from django.db import models
21
+ from django.db import transaction
22
+ from django.db.models import Q
23
+ from django.db.models import signals
24
+ from django.urls import reverse
25
+ from django.utils import timezone
26
+ from django.utils.module_loading import import_string
27
+ from django.utils.translation import gettext as _
28
+ from extras.models import Branch
29
+ from extras.models import StagedChange
30
+ from netbox.models import ChangeLoggedModel
31
+ from netbox.models import NetBoxModel
32
+ from netbox.models import PrimaryModel
33
+ from netbox.models.features import JobsMixin
34
+ from netbox.models.features import TagsMixin
35
+ from netbox.registry import registry
36
+ from netbox.staging import checkout
37
+ from utilities.data import shallow_compare_dict
38
+ from utilities.querysets import RestrictedQuerySet
39
+ from utilities.serialization import serialize_object
40
+
41
+ from .choices import IPFabricRawDataTypeChoices
42
+ from .choices import IPFabricSnapshotStatusModelChoices
43
+ from .choices import IPFabricSourceTypeChoices
44
+ from .choices import IPFabricSyncTypeChoices
45
+ from .choices import IPFabricTransformMapSourceModelChoices
46
+ from .utilities.ipfutils import IPFabric
47
+ from .utilities.ipfutils import IPFabricSyncRunner
48
+ from .utilities.ipfutils import render_jinja2
49
+ from .utilities.logging import SyncLogging
50
+
51
+
52
+ logger = logging.getLogger("ipfabric_netbox.models")
53
+
54
+
55
+ def apply_tags(object, tags):
56
+ def _apply(object):
57
+ for tag in tags:
58
+ if hasattr(object, "tags"):
59
+ object.tags.add(tag)
60
+ object.save()
61
+
62
+ _apply(object)
63
+
64
+
65
+ IPFabricSupportedSyncModels = Q(
66
+ Q(app_label="dcim", model="site")
67
+ | Q(app_label="dcim", model="manufacturer")
68
+ | Q(app_label="dcim", model="platform")
69
+ | Q(app_label="dcim", model="devicerole")
70
+ | Q(app_label="dcim", model="devicetype")
71
+ | Q(app_label="dcim", model="device")
72
+ | Q(app_label="dcim", model="virtualchassis")
73
+ | Q(app_label="dcim", model="interface")
74
+ | Q(app_label="ipam", model="vlan")
75
+ | Q(app_label="ipam", model="vrf")
76
+ | Q(app_label="ipam", model="prefix")
77
+ | Q(app_label="ipam", model="ipaddress")
78
+ | Q(app_label="contenttypes", model="contenttype")
79
+ | Q(app_label="tenancy", model="tenant")
80
+ | Q(app_label="dcim", model="inventoryitem")
81
+ )
82
+
83
+
84
+ IPFabricRelationshipFieldSourceModels = Q(
85
+ Q(app_label="dcim")
86
+ | Q(app_label="ipam")
87
+ | Q(app_label="tenancy")
88
+ | Q(app_label="contenttypes", model="contenttype")
89
+ )
90
+
91
+
92
+ class IPFabricTransformMap(NetBoxModel):
93
+ name = models.CharField(max_length=100, unique=True)
94
+ source_model = models.CharField(
95
+ max_length=50, choices=IPFabricTransformMapSourceModelChoices
96
+ )
97
+ target_model = models.ForeignKey(
98
+ to=ContentType,
99
+ related_name="+",
100
+ verbose_name="Target Model",
101
+ limit_choices_to=IPFabricSupportedSyncModels,
102
+ help_text=_("The object(s) to which transform map target applies."),
103
+ on_delete=models.PROTECT,
104
+ blank=False,
105
+ null=False,
106
+ )
107
+ status = models.CharField(
108
+ max_length=50,
109
+ )
110
+
111
+ class Meta:
112
+ verbose_name = "IP Fabric Transform Map"
113
+ verbose_name_plural = "IP Fabric Transform Maps"
114
+
115
+ def __str__(self):
116
+ if self.source_model and self.target_model:
117
+ return f"{self.source_model} - {self.target_model}"
118
+ else:
119
+ return "Transform Map"
120
+
121
+ def get_absolute_url(self):
122
+ return reverse("plugins:ipfabric_netbox:ipfabrictransformmap", args=[self.pk])
123
+
124
+ @property
125
+ def docs_url(self):
126
+ # TODO: Add docs url
127
+ return ""
128
+
129
+ def get_models(self):
130
+ _context = dict()
131
+
132
+ for app, model_names in registry["model_features"]["custom_fields"].items():
133
+ _context.setdefault(app, {})
134
+ for model_name in model_names:
135
+ model = apps.get_registered_model(app, model_name)
136
+ _context[app][model.__name__] = model
137
+ _context["contenttypes"] = {}
138
+ _context["contenttypes"]["ContentType"] = ContentType
139
+ return _context
140
+
141
+ def build_relationships(self, uuid, source_data):
142
+ relationship_maps = self.relationship_maps.all()
143
+ rel_dict = {}
144
+ rel_dict_coalesce = {}
145
+
146
+ for field in relationship_maps:
147
+ if field.template:
148
+ context = {
149
+ "object": source_data,
150
+ }
151
+ context.update(self.get_models())
152
+ text = render_jinja2(field.template, context).strip()
153
+ if text:
154
+ try:
155
+ pk = int(text)
156
+ except ValueError:
157
+ pk = text
158
+
159
+ if isinstance(pk, int):
160
+ related_object = field.source_model.model_class().objects.get(
161
+ pk=pk
162
+ )
163
+ else:
164
+ related_object = ast.literal_eval(pk)
165
+
166
+ if not field.coalesce:
167
+ rel_dict[field.target_field] = related_object
168
+ else:
169
+ rel_dict_coalesce[field.target_field] = related_object
170
+ elif uuid and self.relationship_store.get(uuid):
171
+ object = self.relationship_store[uuid].get(
172
+ field.source_model.model_class()
173
+ )
174
+ if object:
175
+ if not field.coalesce:
176
+ rel_dict[field.target_field] = object
177
+ else:
178
+ rel_dict_coalesce[field.target_field] = object
179
+
180
+ return rel_dict, rel_dict_coalesce
181
+
182
+ def update_or_create_instance(
183
+ self, data, tags=[], uuid=None, relationship_store={}, logger=None
184
+ ):
185
+ self.relationship_store = relationship_store
186
+ new_data = deepcopy(data)
187
+ relationship, coalesce_relationship = self.build_relationships(
188
+ uuid=uuid, source_data=data
189
+ )
190
+ if relationship:
191
+ new_data["relationship"] = relationship
192
+ if coalesce_relationship:
193
+ new_data["relationship_coalesce"] = coalesce_relationship
194
+ context = self.render(new_data)
195
+ try:
196
+ instance, _ = self.target_model.model_class().objects.update_or_create(
197
+ **context
198
+ )
199
+ if instance:
200
+ apply_tags(instance, tags)
201
+ except Exception as e:
202
+ error_message = f"""Failed to create instance:<br/>
203
+ message: `{e}`<br/>
204
+ raw data: `{data}`<br/>
205
+ context: `{context}`<br/>
206
+ """ # noqa E231 E222
207
+ logger.log_failure(error_message, obj=self)
208
+ logger.log_failure(
209
+ "Ensure that all transform map fields are present.", obj=self
210
+ )
211
+ raise SyncError("Unable to update_or_create_instance.")
212
+
213
+ return instance
214
+
215
+ def get_coalesce_fields(self, source_data):
216
+ data = self.render(source_data)
217
+ del data["defaults"]
218
+ return data
219
+
220
+ def render(self, source_data):
221
+ data = {"defaults": {}}
222
+ for field in self.field_maps.all():
223
+ if field.template:
224
+ context = {
225
+ "object": source_data,
226
+ field.source_field: source_data[field.source_field],
227
+ }
228
+ context.update(self.get_models())
229
+ text = render_jinja2(field.template, context).strip()
230
+ else:
231
+ text = source_data[field.source_field]
232
+
233
+ if text is not None:
234
+ if isinstance(text, str):
235
+ if text.lower() in ["true"]:
236
+ text = True
237
+ elif text.lower() in ["false"]:
238
+ text = False
239
+ elif text.lower() in ["none"]:
240
+ text = None
241
+
242
+ if text:
243
+ target_field = getattr(
244
+ self.target_model.model_class(), field.target_field
245
+ )
246
+ target_field_type = target_field.field.get_internal_type()
247
+ if "integer" in target_field_type.lower():
248
+ text = int(text)
249
+
250
+ if not field.coalesce:
251
+ data["defaults"][field.target_field] = text
252
+ else:
253
+ data[field.target_field] = text
254
+
255
+ if relationship := source_data.get("relationship"):
256
+ data["defaults"].update(relationship)
257
+
258
+ if relationship_coalesce := source_data.get("relationship_coalesce"):
259
+ data.update(relationship_coalesce)
260
+
261
+ if self.status:
262
+ data["defaults"]["status"] = self.status
263
+
264
+ return data
265
+
266
+
267
+ class IPFabricRelationshipField(models.Model):
268
+ transform_map = models.ForeignKey(
269
+ to=IPFabricTransformMap,
270
+ on_delete=models.CASCADE,
271
+ related_name="relationship_maps",
272
+ editable=True,
273
+ )
274
+ source_model = models.ForeignKey(
275
+ ContentType,
276
+ related_name="ipfabric_transform_fields",
277
+ limit_choices_to=IPFabricRelationshipFieldSourceModels,
278
+ verbose_name="Source Model",
279
+ on_delete=models.PROTECT,
280
+ blank=False,
281
+ null=False,
282
+ )
283
+ target_field = models.CharField(max_length=100)
284
+ coalesce = models.BooleanField(default=False)
285
+ template = models.TextField(
286
+ help_text=_(
287
+ "Jinja2 template code, return an integer to create a relationship between the source and target model. True, False and None are also supported."
288
+ ),
289
+ blank=True,
290
+ null=True,
291
+ )
292
+
293
+ objects = RestrictedQuerySet.as_manager()
294
+
295
+ class Meta:
296
+ ordering = ("transform_map",)
297
+ verbose_name = "IP Fabric Relationship Field"
298
+ verbose_name_plural = "IP Fabric Relationship Fields"
299
+
300
+ @property
301
+ def docs_url(self):
302
+ # TODO: Add docs url
303
+ return ""
304
+
305
+
306
+ class IPFabricTransformField(models.Model):
307
+ transform_map = models.ForeignKey(
308
+ to=IPFabricTransformMap,
309
+ on_delete=models.CASCADE,
310
+ related_name="field_maps",
311
+ editable=True,
312
+ )
313
+ source_field = models.CharField(max_length=100)
314
+ target_field = models.CharField(max_length=100)
315
+ coalesce = models.BooleanField(default=False)
316
+
317
+ objects = RestrictedQuerySet.as_manager()
318
+
319
+ template = models.TextField(
320
+ help_text=_("Jinja2 template code to be rendered into the target field."),
321
+ blank=True,
322
+ null=True,
323
+ )
324
+
325
+ class Meta:
326
+ ordering = ("transform_map",)
327
+ verbose_name = "IP Fabric Transform Field"
328
+ verbose_name_plural = "IP Fabric Transform Fields"
329
+
330
+ @property
331
+ def docs_url(self):
332
+ # TODO: Add docs url
333
+ return ""
334
+
335
+
336
+ class IPFabricClient:
337
+ def get_client(self, parameters, transform_map=None):
338
+ try:
339
+ if transform_map:
340
+ ipf = IPFabric(parameters=parameters, transform_map=transform_map)
341
+ else:
342
+ ipf = IPFabric(parameters=parameters)
343
+ return ipf.ipf
344
+ except httpx.ConnectError as e:
345
+ if "CERTIFICATE_VERIFY_FAILED" in str(e):
346
+ error_message = (
347
+ "SSL certificate verification failed, self-signed cert? "
348
+ "<a href='https://docs.ipfabric.io/main/integrations/netbox-plugin/user_guide/10_FAQ/' target='_blank'>Check out our FAQ documentation.</a>"
349
+ )
350
+ else:
351
+ error_message = str(e)
352
+ self.handle_sync_failure("ConnectError", e, error_message)
353
+ except httpx.HTTPStatusError as e:
354
+ if e.response.status_code == 401:
355
+ error_message = "Authentication failed, check API key."
356
+ else:
357
+ error_message = str(e)
358
+ self.handle_sync_failure("HTTPStatusError", e, error_message)
359
+ except Exception as e:
360
+ self.handle_sync_failure("Error", e)
361
+
362
+ def handle_sync_failure(self, failure_type, exception, message=None):
363
+ self.status = DataSourceStatusChoices.FAILED
364
+
365
+ if message:
366
+ self.logger.log_failure(
367
+ f"{message} ({failure_type}): `{exception}`", obj=self
368
+ )
369
+ else:
370
+ self.logger.log_failure(f"Syncing Snapshot Failed: `{exception}`", obj=self)
371
+
372
+
373
+ class IPFabricSource(IPFabricClient, JobsMixin, PrimaryModel):
374
+ name = models.CharField(max_length=100, unique=True)
375
+ type = models.CharField(
376
+ verbose_name=_("type"),
377
+ max_length=50,
378
+ choices=IPFabricSourceTypeChoices,
379
+ default=IPFabricSourceTypeChoices.LOCAL,
380
+ )
381
+ url = models.CharField(max_length=200, verbose_name=_("URL"))
382
+ status = models.CharField(
383
+ max_length=50,
384
+ choices=DataSourceStatusChoices,
385
+ default=DataSourceStatusChoices.NEW,
386
+ editable=False,
387
+ )
388
+ parameters = models.JSONField(blank=True, null=True)
389
+ last_synced = models.DateTimeField(blank=True, null=True, editable=True)
390
+
391
+ class Meta:
392
+ ordering = ("name",)
393
+ verbose_name = "IP Fabric Source"
394
+ verbose_name_plural = "IP Fabric Sources"
395
+
396
+ def __str__(self):
397
+ return f"{self.name}"
398
+
399
+ def get_absolute_url(self):
400
+ return reverse("plugins:ipfabric_netbox:ipfabricsource", args=[self.pk])
401
+
402
+ @property
403
+ def ready_for_sync(self):
404
+ return self.status not in (
405
+ DataSourceStatusChoices.QUEUED,
406
+ DataSourceStatusChoices.SYNCING,
407
+ )
408
+
409
+ @property
410
+ def docs_url(self):
411
+ # TODO: Add docs url
412
+ return ""
413
+
414
+ def clean(self):
415
+ super().clean()
416
+
417
+ self.url = self.url.rstrip("/")
418
+
419
+ def enqueue_sync_job(self, request):
420
+ # Set the status to "syncing"
421
+ self.status = DataSourceStatusChoices.QUEUED
422
+ IPFabricSource.objects.filter(pk=self.pk).update(status=self.status)
423
+
424
+ # Enqueue a sync job
425
+ return Job.enqueue(
426
+ import_string("ipfabric_netbox.jobs.sync_ipfabricsource"),
427
+ name=f"{self.name} Snapshot Sync",
428
+ instance=self,
429
+ user=request.user,
430
+ )
431
+
432
+ def sync(self, job):
433
+ self.logger = SyncLogging(job=job.pk)
434
+ if self.status == DataSourceStatusChoices.SYNCING:
435
+ self.logger.log_failure(
436
+ "Cannot initiate sync; syncing already in progress.", obj=self
437
+ )
438
+ raise SyncError("Cannot initiate sync; syncing already in progress.")
439
+
440
+ pre_sync.send(sender=self.__class__, instance=self)
441
+
442
+ self.status = DataSourceStatusChoices.SYNCING
443
+ IPFabricSource.objects.filter(pk=self.pk).update(status=self.status)
444
+
445
+ # Begin Sync
446
+ try:
447
+ self.logger.log_info(f"Syncing snapshots from {self.name}", obj=self)
448
+ logger.debug(f"Syncing snapshots from {self.url}")
449
+
450
+ self.parameters["base_url"] = self.url
451
+ ipf = self.get_client(parameters=self.parameters)
452
+
453
+ if not ipf:
454
+ raise SyncError("Unable to connect to IP Fabric.")
455
+
456
+ for snapshot_id, value in ipf.snapshots.items():
457
+ if snapshot_id not in ["$prev", "$lastLocked"]:
458
+ if value.name:
459
+ name = (
460
+ value.name
461
+ + " - "
462
+ + value.start.strftime("%d-%b-%y %H:%M:%S")
463
+ )
464
+ else:
465
+ name = value.start.strftime("%d-%b-%y %H:%M:%S")
466
+
467
+ if value.status == "done":
468
+ status = "loaded"
469
+ else:
470
+ status = value.status
471
+
472
+ data = {
473
+ "name": name,
474
+ "data": json.loads(value.model_dump_json(exclude={"client"})),
475
+ "date": value.start,
476
+ "created": timezone.now(),
477
+ "last_updated": timezone.now(),
478
+ "status": status,
479
+ }
480
+ snapshot, _ = IPFabricSnapshot.objects.update_or_create(
481
+ source=self, snapshot_id=snapshot_id, defaults=data
482
+ )
483
+ self.logger.log_info(
484
+ f"Created/Updated Snapshot {snapshot.name} ({snapshot.snapshot_id})",
485
+ obj=snapshot, # noqa E225
486
+ )
487
+ self.status = DataSourceStatusChoices.COMPLETED
488
+ self.logger.log_success(f"Completed syncing snapshots from {self.name}")
489
+ logger.debug(f"Completed syncing snapshots from {self.url}")
490
+ except Exception as e:
491
+ self.handle_sync_failure(type(e).__name__, e)
492
+ finally:
493
+ self.last_synced = timezone.now()
494
+ IPFabricSource.objects.filter(pk=self.pk).update(
495
+ status=self.status, last_synced=self.last_synced
496
+ )
497
+ self.logger.log_info("Sync job completed.", obj=self)
498
+ if job:
499
+ job.data = self.logger.log_data
500
+ # Emit the post_sync signal
501
+ # post_sync.send(sender=self.__class__, instance=self)
502
+
503
+
504
+ class IPFabricSnapshot(models.Model):
505
+ created = models.DateTimeField(auto_now_add=True)
506
+ last_updated = models.DateTimeField(editable=False)
507
+ source = models.ForeignKey(
508
+ to=IPFabricSource,
509
+ on_delete=models.CASCADE,
510
+ related_name="snapshots",
511
+ editable=False,
512
+ )
513
+ name = models.CharField(max_length=200)
514
+ snapshot_id = models.CharField(max_length=100)
515
+ data = models.JSONField(blank=True, null=True)
516
+ date = models.DateTimeField(blank=True, null=True, editable=False)
517
+ status = models.CharField(
518
+ max_length=50,
519
+ choices=IPFabricSnapshotStatusModelChoices,
520
+ default=IPFabricSnapshotStatusModelChoices.STATUS_UNLOADED,
521
+ )
522
+
523
+ objects = RestrictedQuerySet.as_manager()
524
+
525
+ class Meta:
526
+ ordering = ("source", "-date")
527
+ verbose_name = "IP Fabric Snapshot"
528
+ verbose_name_plural = "IP Fabric Snapshots"
529
+
530
+ def __str__(self):
531
+ return f"{self.name} - {self.snapshot_id}"
532
+
533
+ def get_absolute_url(self):
534
+ return reverse("plugins:ipfabric_netbox:ipfabricsnapshot", args=[self.pk])
535
+
536
+ def get_status_color(self):
537
+ return IPFabricSnapshotStatusModelChoices.colors.get(self.status)
538
+
539
+ @property
540
+ def sites(self):
541
+ if self.data:
542
+ sites = self.data.get("sites", None)
543
+ if sites:
544
+ return sites
545
+ else:
546
+ return []
547
+ else:
548
+ return []
549
+
550
+
551
+ class IPFabricSync(IPFabricClient, JobsMixin, TagsMixin, ChangeLoggedModel):
552
+ objects = RestrictedQuerySet.as_manager()
553
+ name = models.CharField(max_length=100, unique=True)
554
+ snapshot_data = models.ForeignKey(
555
+ to=IPFabricSnapshot,
556
+ on_delete=models.CASCADE,
557
+ related_name="snapshots",
558
+ )
559
+ type = models.CharField(
560
+ max_length=50,
561
+ choices=IPFabricSyncTypeChoices,
562
+ default=IPFabricSyncTypeChoices.DCIM,
563
+ )
564
+ status = models.CharField(
565
+ max_length=50,
566
+ choices=DataSourceStatusChoices,
567
+ default=DataSourceStatusChoices.NEW,
568
+ editable=False,
569
+ )
570
+ parameters = models.JSONField(blank=True, null=True)
571
+ auto_merge = models.BooleanField(default=False)
572
+ last_synced = models.DateTimeField(blank=True, null=True, editable=False)
573
+ scheduled = models.DateTimeField(null=True, blank=True)
574
+ interval = models.PositiveIntegerField(
575
+ blank=True,
576
+ null=True,
577
+ validators=(MinValueValidator(1),),
578
+ help_text=_("Recurrence interval (in minutes)"),
579
+ )
580
+ user = models.ForeignKey(
581
+ to=settings.AUTH_USER_MODEL,
582
+ on_delete=models.SET_NULL,
583
+ related_name="+",
584
+ blank=True,
585
+ null=True,
586
+ )
587
+
588
+ class Meta:
589
+ ordering = ["pk"]
590
+ verbose_name = "IP Fabric Sync"
591
+
592
+ def __str__(self):
593
+ return f"{self.name}"
594
+
595
+ @property
596
+ def docs_url(self):
597
+ # TODO: Add docs url
598
+ return ""
599
+
600
+ def get_absolute_url(self):
601
+ return reverse("plugins:ipfabric_netbox:ipfabricsync", args=[self.pk])
602
+
603
+ def get_status_color(self):
604
+ return DataSourceStatusChoices.colors.get(self.status)
605
+
606
+ @property
607
+ def ready_for_sync(self):
608
+ if self.status not in (DataSourceStatusChoices.SYNCING,):
609
+ if self.snapshot_data.source.type == "remote":
610
+ if self.snapshot_data.ipf_data.count() > 0:
611
+ return True
612
+ else:
613
+ return False
614
+ else:
615
+ return True
616
+ else:
617
+ return False
618
+
619
+ def enqueue_sync_job(self, adhoc=False, user=None):
620
+ # Set the status to "syncing"
621
+ self.status = DataSourceStatusChoices.QUEUED
622
+ IPFabricSync.objects.filter(pk=self.pk).update(status=self.status)
623
+
624
+ Job.enqueue(
625
+ import_string("ipfabric_netbox.jobs.sync_ipfabricsource"),
626
+ name=f"{self.name} Snapshot Sync (Pre Ingestion)",
627
+ instance=self.snapshot_data.source,
628
+ user=self.user,
629
+ )
630
+
631
+ # Enqueue a sync job
632
+ if not user:
633
+ user = self.user
634
+
635
+ if not adhoc and self.scheduled:
636
+ job = Job.enqueue(
637
+ import_string("ipfabric_netbox.jobs.sync_ipfabric"),
638
+ name=f"{self.name} - (scheduled)",
639
+ instance=self,
640
+ user=self.user,
641
+ schedule_at=self.scheduled,
642
+ interval=self.interval,
643
+ )
644
+ elif adhoc:
645
+ job = Job.enqueue(
646
+ import_string("ipfabric_netbox.jobs.sync_ipfabric"),
647
+ instance=self,
648
+ user=user,
649
+ name=f"{self.name} - (adhoc)",
650
+ adhoc=adhoc,
651
+ )
652
+ return job
653
+
654
+ def sync(self, job=None):
655
+ if job:
656
+ self.logger = SyncLogging(job=job.pk)
657
+ user = job.user
658
+ else:
659
+ self.logger = SyncLogging(job=self.pk)
660
+ user = None
661
+
662
+ if self.status == DataSourceStatusChoices.SYNCING:
663
+ raise SyncError("Cannot initiate sync; ingestion already in progress.")
664
+
665
+ pre_sync.send(sender=self.__class__, instance=self)
666
+
667
+ self.status = DataSourceStatusChoices.SYNCING
668
+ IPFabricSync.objects.filter(pk=self.pk).update(status=self.status)
669
+
670
+ # Begin Sync
671
+ self.logger.log_info(
672
+ f"Ingesting data from {self.snapshot_data.source.name}", obj=self
673
+ )
674
+ logger.info(f"Ingesting data from {self.snapshot_data.source.name}")
675
+
676
+ self.snapshot_data.source.parameters["base_url"] = self.snapshot_data.source.url
677
+ self.parameters["snapshot_id"] = self.snapshot_data.snapshot_id
678
+ self.logger.log_info(
679
+ f"Syncing with the following data {json.dumps(self.parameters)}", obj=self
680
+ )
681
+ logger.info(f"Syncing with the following data {json.dumps(self.parameters)}")
682
+
683
+ current_time = str(timezone.now())
684
+
685
+ branch = IPFabricBranch.objects.create(
686
+ name=f"{current_time}", sync=self, job=job, user=user
687
+ )
688
+ self.logger.log_info(f"New branch Created {branch.name}", obj=branch)
689
+ logger.info(f"New branch Created {branch.name}")
690
+
691
+ try:
692
+ self.logger.log_info("Fetching IP Fabric Client", obj=branch)
693
+ logger.info("Fetching IP Fabric Client")
694
+
695
+ if self.snapshot_data.source.type == IPFabricSourceTypeChoices.LOCAL:
696
+ ipf = self.get_client(
697
+ parameters=self.snapshot_data.source.parameters,
698
+ transform_map=IPFabricTransformMap,
699
+ )
700
+ if not ipf:
701
+ logger.debug("Unable to connect to IP Fabric.")
702
+ raise SyncError("Unable to connect to IP Fabric.")
703
+ else:
704
+ ipf = None
705
+
706
+ runner = IPFabricSyncRunner(
707
+ client=ipf,
708
+ settings=self.parameters,
709
+ transform_map=IPFabricTransformMap,
710
+ sync=self,
711
+ )
712
+
713
+ with checkout(branch):
714
+ runner.sync_devices(branch=branch)
715
+
716
+ self.status = DataSourceStatusChoices.COMPLETED
717
+ except Exception as e:
718
+ self.status = DataSourceStatusChoices.FAILED
719
+ self.logger.log_failure(f"Branch Failed: `{e}`", obj=branch)
720
+ self.logger.log_failure(
721
+ f"Stack Trace: `{traceback.format_exc()}`", obj=branch
722
+ )
723
+ logger.debug(f"Branch Failed: `{e}`")
724
+
725
+ logger.debug(f"Completed ingesting data from {self.snapshot_data.source.name}")
726
+ self.logger.log_info(
727
+ f"Completed ingesting data from {self.snapshot_data.source.name}", obj=self
728
+ )
729
+
730
+ for change in branch.staged_changes.all():
731
+ if hasattr(change.object, "pk"):
732
+ prechange_data = prechange_data = serialize_object(
733
+ change.object, resolve_tags=False
734
+ )
735
+ prechange_data = dict(sorted(prechange_data.items()))
736
+ else:
737
+ prechange_data = None
738
+ if hasattr(change, "data"):
739
+ postchange_data = dict(sorted(change.data.items()))
740
+
741
+ diff_added = shallow_compare_dict(
742
+ prechange_data or dict(),
743
+ postchange_data or dict(),
744
+ exclude=["last_updated"],
745
+ )
746
+
747
+ if not diff_added:
748
+ change.delete()
749
+
750
+ self.last_synced = timezone.now()
751
+
752
+ if self.auto_merge and self.status == DataSourceStatusChoices.COMPLETED:
753
+ self.logger.log_info("Auto Merging Branch", obj=branch)
754
+ logger.info("Auto Merging Branch")
755
+ branch.enqueue_merge_job(user=user)
756
+ self.logger.log_info("Auto Merge Job Enqueued", obj=branch)
757
+ logger.info("Auto Merge Job Enqueued")
758
+
759
+ IPFabricSync.objects.filter(pk=self.pk).update(
760
+ status=self.status, last_synced=self.last_synced
761
+ )
762
+ if job:
763
+ job.data = self.logger.log_data
764
+
765
+
766
+ class IPFabricBranch(JobsMixin, Branch):
767
+ """
768
+ A collection of related StagedChanges.
769
+ """
770
+
771
+ sync = models.ForeignKey(IPFabricSync, on_delete=models.CASCADE)
772
+ job = models.ForeignKey(Job, on_delete=models.SET_NULL, null=True)
773
+
774
+ class Meta:
775
+ ordering = ("name",)
776
+ verbose_name = "IP Fabric Branch"
777
+ verbose_name_plural = "IP Fabric Branches"
778
+
779
+ def __str__(self):
780
+ return f"{self.name} ({self.pk})"
781
+
782
+ def get_absolute_url(self):
783
+ return reverse("plugins:ipfabric_netbox:ipfabricbranch", args=[self.pk])
784
+
785
+ def enqueue_merge_job(self, user):
786
+ # Set the status to "syncing"
787
+ self.status = DataSourceStatusChoices.QUEUED
788
+ IPFabricSync.objects.filter(ipfabricbranch=self.pk).update(status=self.status)
789
+
790
+ # Enqueue a sync job
791
+ return Job.enqueue(
792
+ import_string("ipfabric_netbox.jobs.merge_ipfabric_branch"),
793
+ name=f"{self.name} Merge",
794
+ instance=self,
795
+ user=user,
796
+ )
797
+
798
+ def get_logs(self):
799
+ if self.job.data:
800
+ job_results = self.job.data
801
+ else:
802
+ job_results = cache.get(f"ipfabric_sync_{self.job.pk}")
803
+ if not job_results:
804
+ job_results = cache.get(f"ipfabric_sync_{self.sync.pk}")
805
+ return job_results
806
+
807
+ def get_statistics(self):
808
+ job_results = self.get_logs()
809
+ statistics = {}
810
+ if job_results:
811
+ for model, stats in job_results["statistics"].items():
812
+ if stats["total"] > 0:
813
+ statistics[model] = stats["current"] / stats["total"] * 100
814
+ else:
815
+ statistics[model] = stats["current"] / 1 * 100
816
+ return {"job_results": job_results, "statistics": statistics}
817
+
818
+ def merge(self):
819
+ logger.info(f"Merging changes in branch {self}")
820
+ with transaction.atomic():
821
+ for change in self.staged_changes.all():
822
+ logger.debug("Applying change: %s", change)
823
+ change.apply()
824
+ signals.pre_delete.disconnect(handle_deleted_object)
825
+ self.staged_changes.all().delete()
826
+ signals.pre_delete.connect(handle_deleted_object, sender=StagedChange)
827
+
828
+ def sync_merge(self):
829
+ ipfabricsync = self.sync
830
+ if ipfabricsync.status == DataSourceStatusChoices.SYNCING:
831
+ raise SyncError("Cannot initiate merge; merge already in progress.")
832
+
833
+ pre_sync.send(sender=self.__class__, instance=self)
834
+
835
+ ipfabricsync.status = DataSourceStatusChoices.SYNCING
836
+ IPFabricSync.objects.filter(ipfabricbranch=self.pk).update(
837
+ status=self.sync.status
838
+ )
839
+
840
+ # Begin Sync
841
+ logger.debug(f"Merging {self.name}")
842
+ try:
843
+ signals.post_save.disconnect(
844
+ assign_virtualchassis_master, sender=VirtualChassis
845
+ )
846
+ self.merge()
847
+ signals.post_save.connect(
848
+ assign_virtualchassis_master, sender=VirtualChassis
849
+ )
850
+ ipfabricsync.status = DataSourceStatusChoices.COMPLETED
851
+ except Exception as e:
852
+ ipfabricsync.status = DataSourceStatusChoices.FAILED
853
+ logger.debug(f"Merging {self.name} Failed: `{e}`")
854
+
855
+ logger.debug(f"Completed merge {self.name}")
856
+
857
+ ipfabricsync.last_synced = timezone.now()
858
+ IPFabricSync.objects.filter(ipfabricbranch=self.pk).update(
859
+ status=ipfabricsync.status, last_synced=ipfabricsync.last_synced
860
+ )
861
+
862
+
863
+ class IPFabricData(models.Model):
864
+ snapshot_data = models.ForeignKey(
865
+ to=IPFabricSnapshot,
866
+ on_delete=models.CASCADE,
867
+ related_name="ipf_data",
868
+ )
869
+ data = models.JSONField(blank=True, null=True)
870
+ type = models.CharField(
871
+ max_length=50,
872
+ choices=IPFabricRawDataTypeChoices,
873
+ )
874
+ objects = RestrictedQuerySet.as_manager()