ipfabric_netbox 3.2.3__py3-none-any.whl → 3.2.4b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ipfabric_netbox might be problematic. Click here for more details.
- ipfabric_netbox/__init__.py +1 -1
- ipfabric_netbox/api/nested_serializers.py +0 -20
- ipfabric_netbox/api/serializers.py +7 -13
- ipfabric_netbox/api/urls.py +2 -2
- ipfabric_netbox/api/views.py +5 -5
- ipfabric_netbox/data/transform_map.json +21 -35
- ipfabric_netbox/filtersets.py +15 -13
- ipfabric_netbox/forms.py +14 -42
- ipfabric_netbox/jobs.py +12 -7
- ipfabric_netbox/migrations/0001_initial.py +1 -1
- ipfabric_netbox/migrations/0001_initial_squashed_0013_switch_to_branching_plugin.py +503 -0
- ipfabric_netbox/migrations/0007_prepare_custom_fields.py +3 -3
- ipfabric_netbox/migrations/0010_remove_uuid_from_get_or_create.py +20 -11
- ipfabric_netbox/migrations/0011_update_part_number_DCIM_inventory_item_template.py +57 -0
- ipfabric_netbox/migrations/0012_remove_status_field.py +18 -0
- ipfabric_netbox/migrations/0013_switch_to_branching_plugin.py +270 -0
- ipfabric_netbox/models.py +120 -91
- ipfabric_netbox/navigation.py +1 -1
- ipfabric_netbox/signals.py +9 -2
- ipfabric_netbox/tables.py +40 -46
- ipfabric_netbox/templates/ipfabric_netbox/{ipfabricbranch.html → ipfabricingestion.html} +14 -9
- ipfabric_netbox/templates/ipfabric_netbox/ipfabricsource.html +3 -3
- ipfabric_netbox/templates/ipfabric_netbox/ipfabricsync.html +3 -3
- ipfabric_netbox/templates/ipfabric_netbox/ipfabricsync_list.html +71 -0
- ipfabric_netbox/templates/ipfabric_netbox/ipfabrictransformmap.html +0 -4
- ipfabric_netbox/templates/ipfabric_netbox/partials/ingestion_all.html +10 -0
- ipfabric_netbox/templates/ipfabric_netbox/partials/{branch_progress.html → ingestion_progress.html} +1 -1
- ipfabric_netbox/templates/ipfabric_netbox/partials/sync_last_ingestion.html +1 -0
- ipfabric_netbox/urls.py +13 -3
- ipfabric_netbox/utilities/ipfutils.py +52 -19
- ipfabric_netbox/views.py +162 -155
- {ipfabric_netbox-3.2.3.dist-info → ipfabric_netbox-3.2.4b2.dist-info}/METADATA +12 -4
- {ipfabric_netbox-3.2.3.dist-info → ipfabric_netbox-3.2.4b2.dist-info}/RECORD +35 -32
- {ipfabric_netbox-3.2.3.dist-info → ipfabric_netbox-3.2.4b2.dist-info}/WHEEL +1 -1
- ipfabric_netbox/templates/ipfabric_netbox/inc/sync_delete.html +0 -19
- ipfabric_netbox/templates/ipfabric_netbox/partials/branch_all.html +0 -10
- ipfabric_netbox/templates/ipfabric_netbox/partials/sync_last_branch.html +0 -1
- ipfabric_netbox/templates/ipfabric_netbox/sync_list.html +0 -126
- /ipfabric_netbox/templates/ipfabric_netbox/partials/{branch_status.html → ingestion_status.html} +0 -0
|
@@ -0,0 +1,270 @@
|
|
|
1
|
+
# Generated by Django 5.1.6 on 2025-04-24 11:46
|
|
2
|
+
import contextlib
|
|
3
|
+
from enum import IntEnum
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
5
|
+
|
|
6
|
+
import django.db.models.deletion
|
|
7
|
+
from django.db import migrations
|
|
8
|
+
from django.db import models
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
from django.apps import apps as apps_type
|
|
12
|
+
from django.db.backends.base.schema import BaseDatabaseSchemaEditor
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def modify_custom_field(
|
|
16
|
+
apps: "apps_type",
|
|
17
|
+
current_name: str,
|
|
18
|
+
new_name: str,
|
|
19
|
+
new_label: str,
|
|
20
|
+
new_object_type: models.Model,
|
|
21
|
+
):
|
|
22
|
+
CustomField = apps.get_model("extras", "CustomField")
|
|
23
|
+
ObjectType = apps.get_model("core", "ObjectType")
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
custom_field = CustomField.objects.get(name=current_name)
|
|
27
|
+
except CustomField.DoesNotExist:
|
|
28
|
+
return
|
|
29
|
+
|
|
30
|
+
custom_field.name = new_name
|
|
31
|
+
custom_field.label = new_label
|
|
32
|
+
custom_field.related_object_type = ObjectType.objects.get_for_model(new_object_type)
|
|
33
|
+
custom_field.full_clean()
|
|
34
|
+
custom_field.save()
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def rename_ipfabric_branch_cf(
|
|
38
|
+
apps: "apps_type", schema_editor: "BaseDatabaseSchemaEditor"
|
|
39
|
+
):
|
|
40
|
+
"""Forward migration to rename ipfabric_branch CustomField to ipfabric_ingestion."""
|
|
41
|
+
modify_custom_field(
|
|
42
|
+
apps,
|
|
43
|
+
"ipfabric_branch",
|
|
44
|
+
"ipfabric_ingestion",
|
|
45
|
+
"IP Fabric Last Ingestion",
|
|
46
|
+
apps.get_model("ipfabric_netbox", "IPFabricIngestion"),
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def rename_ipfabric_ingestion_cf(
|
|
51
|
+
apps: "apps_type", schema_editor: "BaseDatabaseSchemaEditor"
|
|
52
|
+
):
|
|
53
|
+
"""Forward migration to rename ipfabric_ingestion CustomField to ipfabric_branch."""
|
|
54
|
+
modify_custom_field(
|
|
55
|
+
apps,
|
|
56
|
+
"ipfabric_ingestion",
|
|
57
|
+
"ipfabric_branch",
|
|
58
|
+
"IP Fabric Last Sync",
|
|
59
|
+
apps.get_model("ipfabric_netbox", "IPFabricBranch"),
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def clean_redisual_branch_data(
|
|
64
|
+
apps: "apps_type", schema_editor: "BaseDatabaseSchemaEditor"
|
|
65
|
+
):
|
|
66
|
+
"""
|
|
67
|
+
Clean up any residual data in the Branch model that have been left behind
|
|
68
|
+
after the migration.
|
|
69
|
+
"""
|
|
70
|
+
ContentType = apps.get_model("contenttypes", "contenttype")
|
|
71
|
+
Permission = apps.get_model("auth", "permission")
|
|
72
|
+
|
|
73
|
+
try:
|
|
74
|
+
branch_content_type = ContentType.objects.get(
|
|
75
|
+
app_label="ipfabric_netbox", model="ipfabricbranch"
|
|
76
|
+
)
|
|
77
|
+
except ContentType.DoesNotExist:
|
|
78
|
+
return
|
|
79
|
+
|
|
80
|
+
Permission.objects.filter(content_type=branch_content_type).delete()
|
|
81
|
+
branch_content_type.delete()
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
# interface fix templates
|
|
85
|
+
DUPLEX_TEMPLATE_BEFORE = ""
|
|
86
|
+
DUPLEX_TEMPLATE_AFTER = '{% if not object.duplex or object.duplex=="unknown" %}None{% else %}{{ object.duplex }}{% endif %}'
|
|
87
|
+
SPEED_TEMPLATE_BEFORE = "{% if not object.speedValue %}None{% else %}{{ object.speedValue|int // 1000 }}{% endif %}"
|
|
88
|
+
SPEED_TEMPLATE_AFTER = '{% if not object.speedValue or object.speedValue=="unknown" %}None{% else %}{{ object.speedValue|int // 1000 }}{% endif %}'
|
|
89
|
+
|
|
90
|
+
# devicetype fix templates
|
|
91
|
+
DEVICETYPE_RELATIONSHIP_TEMPLATE_BEFORE = '{% if object.model != none %}{% set SLUG = object.model | string | slugify %}{% else %}{% set SLUG = object.vendor | slugify ~ "-" ~ object.family | slugify ~ "-" ~ object.platform %}{% endif %}{{ dcim.DeviceType.objects.get(slug=SLUG).pk }}'
|
|
92
|
+
DEVICETYPE_RELATIONSHIP_TEMPLATE_AFTER = '{% if object.model %}{% set SLUG = object.model | string | slugify %}{% else %}{% set SLUG = object.vendor | slugify ~ "-" ~ object.family | slugify ~ "-" ~ object.platform | slugify %}{% endif %}{{ dcim.DeviceType.objects.get(slug=SLUG).pk }}'
|
|
93
|
+
MODEL_SLUG_TEMPLATE_BEFORE = "{% if object.model != none %}{{ object.model | string | slugify }}{% else %}{{ object.vendor | slugify }}-{{ object.family | slugify}}-{{ object.platform }}{% endif %}"
|
|
94
|
+
MODEL_SLUG_TEMPLATE_AFTER = "{% if object.model %}{{ object.model | string | slugify }}{% else %}{{ object.vendor | slugify }}-{{ object.family | slugify}}-{{ object.platform | slugify }}{% endif %}"
|
|
95
|
+
MODEL_TEMPLATE_BEFORE = "{% if object.model != none %}{{ object.model | string }}{% else %}{{ object.vendor }} - {{ object.family }} - {{ object.platform }}{% endif %}"
|
|
96
|
+
MODEL_TEMPLATE_AFTER = "{% if object.model %}{{ object.model | string }}{% else %}{{ object.vendor }} - {{ object.family }} - {{ object.platform }}{% endif %}"
|
|
97
|
+
|
|
98
|
+
TEMPLATES = {
|
|
99
|
+
"duplex": (DUPLEX_TEMPLATE_BEFORE, DUPLEX_TEMPLATE_AFTER),
|
|
100
|
+
"speed": (SPEED_TEMPLATE_BEFORE, SPEED_TEMPLATE_AFTER),
|
|
101
|
+
"devicetype_relationship": (
|
|
102
|
+
DEVICETYPE_RELATIONSHIP_TEMPLATE_BEFORE,
|
|
103
|
+
DEVICETYPE_RELATIONSHIP_TEMPLATE_AFTER,
|
|
104
|
+
),
|
|
105
|
+
"model_slug": (MODEL_SLUG_TEMPLATE_BEFORE, MODEL_SLUG_TEMPLATE_AFTER),
|
|
106
|
+
"model": (MODEL_TEMPLATE_BEFORE, MODEL_TEMPLATE_AFTER),
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
class MigrationType(IntEnum):
|
|
111
|
+
FORWARD = 0
|
|
112
|
+
REVERSE = 1
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def change_transform_maps(
|
|
116
|
+
apps: "apps_type", schema_editor: "BaseDatabaseSchemaEditor", migration_type: int
|
|
117
|
+
):
|
|
118
|
+
"""
|
|
119
|
+
* Ignore 'unknown' values for duplex and speed on Interface.
|
|
120
|
+
* Correctly set DeviceType value when model is empty (not None!).
|
|
121
|
+
* Slugify platform when using it for DeviceType slug.
|
|
122
|
+
"""
|
|
123
|
+
ContentType = apps.get_model("contenttypes", "ContentType")
|
|
124
|
+
TransformMap = apps.get_model("ipfabric_netbox", "IPFabricTransformMap")
|
|
125
|
+
TransformField = apps.get_model("ipfabric_netbox", "IPFabricTransformField")
|
|
126
|
+
RelationshipField = apps.get_model("ipfabric_netbox", "IPFabricRelationshipField")
|
|
127
|
+
|
|
128
|
+
with contextlib.suppress(TransformMap.DoesNotExist, TransformField.DoesNotExist):
|
|
129
|
+
interface_transform_map = TransformMap.objects.get(
|
|
130
|
+
source_model="interface",
|
|
131
|
+
target_model__app_label="dcim",
|
|
132
|
+
target_model__model="interface",
|
|
133
|
+
)
|
|
134
|
+
TransformField.objects.filter(
|
|
135
|
+
transform_map=interface_transform_map,
|
|
136
|
+
template=TEMPLATES["duplex"][migration_type],
|
|
137
|
+
source_field="duplex",
|
|
138
|
+
target_field="duplex",
|
|
139
|
+
).update(template=TEMPLATES["duplex"][int(not migration_type)])
|
|
140
|
+
TransformField.objects.filter(
|
|
141
|
+
transform_map=interface_transform_map,
|
|
142
|
+
template=TEMPLATES["speed"][migration_type],
|
|
143
|
+
source_field="speedValue",
|
|
144
|
+
target_field="speed",
|
|
145
|
+
).update(template=TEMPLATES["speed"][int(not migration_type)])
|
|
146
|
+
|
|
147
|
+
with contextlib.suppress(
|
|
148
|
+
TransformMap.DoesNotExist,
|
|
149
|
+
TransformField.DoesNotExist,
|
|
150
|
+
RelationshipField.DoesNotExist,
|
|
151
|
+
):
|
|
152
|
+
device_transform_map = TransformMap.objects.get(
|
|
153
|
+
source_model="device",
|
|
154
|
+
target_model__app_label="dcim",
|
|
155
|
+
target_model__model="device",
|
|
156
|
+
)
|
|
157
|
+
RelationshipField.objects.filter(
|
|
158
|
+
transform_map=device_transform_map,
|
|
159
|
+
template=TEMPLATES["devicetype_relationship"][migration_type],
|
|
160
|
+
source_model=ContentType.objects.get(app_label="dcim", model="devicetype"),
|
|
161
|
+
target_field="device_type",
|
|
162
|
+
).update(template=TEMPLATES["devicetype_relationship"][int(not migration_type)])
|
|
163
|
+
|
|
164
|
+
devicetype_transform_map = TransformMap.objects.get(
|
|
165
|
+
source_model="device",
|
|
166
|
+
target_model__app_label="dcim",
|
|
167
|
+
target_model__model="devicetype",
|
|
168
|
+
)
|
|
169
|
+
TransformField.objects.filter(
|
|
170
|
+
transform_map=devicetype_transform_map,
|
|
171
|
+
template=TEMPLATES["model_slug"][migration_type],
|
|
172
|
+
source_field="model",
|
|
173
|
+
target_field="slug",
|
|
174
|
+
).update(template=TEMPLATES["model_slug"][int(not migration_type)])
|
|
175
|
+
TransformField.objects.filter(
|
|
176
|
+
transform_map=devicetype_transform_map,
|
|
177
|
+
template=TEMPLATES["model"][migration_type],
|
|
178
|
+
source_field="model",
|
|
179
|
+
target_field="model",
|
|
180
|
+
).update(template=TEMPLATES["model"][int(not migration_type)])
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def forward_change_transform_maps(
|
|
184
|
+
apps: "apps_type", schema_editor: "BaseDatabaseSchemaEditor"
|
|
185
|
+
):
|
|
186
|
+
"""
|
|
187
|
+
* Ignore 'unknown' values for duplex and speed on Interface.
|
|
188
|
+
* Correctly set DeviceType value when model is empty (not None!).
|
|
189
|
+
* Slugify platform when using it for DeviceType slug.
|
|
190
|
+
"""
|
|
191
|
+
change_transform_maps(apps, schema_editor, migration_type=MigrationType.FORWARD)
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def reverse_change_transform_maps(
|
|
195
|
+
apps: "apps_type", schema_editor: "BaseDatabaseSchemaEditor"
|
|
196
|
+
):
|
|
197
|
+
"""
|
|
198
|
+
Return changes done in `change_transform_maps` to the original state.
|
|
199
|
+
"""
|
|
200
|
+
change_transform_maps(apps, schema_editor, migration_type=MigrationType.REVERSE)
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
class Migration(migrations.Migration):
|
|
204
|
+
dependencies = [
|
|
205
|
+
("core", "0012_job_object_type_optional"),
|
|
206
|
+
("ipfabric_netbox", "0012_remove_status_field"),
|
|
207
|
+
("netbox_branching", "0003_rename_indexes"),
|
|
208
|
+
("extras", "0123_journalentry_kind_default"),
|
|
209
|
+
]
|
|
210
|
+
|
|
211
|
+
operations = [
|
|
212
|
+
migrations.RunPython(migrations.RunPython.noop, rename_ipfabric_ingestion_cf),
|
|
213
|
+
migrations.DeleteModel(
|
|
214
|
+
name="IPFabricBranch",
|
|
215
|
+
),
|
|
216
|
+
migrations.CreateModel(
|
|
217
|
+
name="IPFabricIngestion",
|
|
218
|
+
fields=[
|
|
219
|
+
(
|
|
220
|
+
"id",
|
|
221
|
+
models.BigAutoField(
|
|
222
|
+
auto_created=True, primary_key=True, serialize=False
|
|
223
|
+
),
|
|
224
|
+
),
|
|
225
|
+
(
|
|
226
|
+
"branch",
|
|
227
|
+
models.OneToOneField(
|
|
228
|
+
null=True,
|
|
229
|
+
on_delete=django.db.models.deletion.SET_NULL,
|
|
230
|
+
to="netbox_branching.branch",
|
|
231
|
+
),
|
|
232
|
+
),
|
|
233
|
+
(
|
|
234
|
+
"job",
|
|
235
|
+
models.ForeignKey(
|
|
236
|
+
null=True,
|
|
237
|
+
on_delete=django.db.models.deletion.SET_NULL,
|
|
238
|
+
to="core.job",
|
|
239
|
+
),
|
|
240
|
+
),
|
|
241
|
+
(
|
|
242
|
+
"sync",
|
|
243
|
+
models.ForeignKey(
|
|
244
|
+
on_delete=django.db.models.deletion.CASCADE,
|
|
245
|
+
to="ipfabric_netbox.ipfabricsync",
|
|
246
|
+
),
|
|
247
|
+
),
|
|
248
|
+
],
|
|
249
|
+
options={
|
|
250
|
+
"verbose_name": "IP Fabric Ingestion",
|
|
251
|
+
"verbose_name_plural": "IP Fabric Ingestion",
|
|
252
|
+
"ordering": ("pk",),
|
|
253
|
+
},
|
|
254
|
+
),
|
|
255
|
+
migrations.AlterField(
|
|
256
|
+
model_name="ipfabricrelationshipfield",
|
|
257
|
+
name="template",
|
|
258
|
+
field=models.TextField(blank=True, default=""),
|
|
259
|
+
),
|
|
260
|
+
migrations.AlterField(
|
|
261
|
+
model_name="ipfabrictransformfield",
|
|
262
|
+
name="template",
|
|
263
|
+
field=models.TextField(blank=True, default=""),
|
|
264
|
+
),
|
|
265
|
+
migrations.RunPython(rename_ipfabric_branch_cf, migrations.RunPython.noop),
|
|
266
|
+
migrations.RunPython(clean_redisual_branch_data, migrations.RunPython.noop),
|
|
267
|
+
migrations.RunPython(
|
|
268
|
+
forward_change_transform_maps, reverse_change_transform_maps
|
|
269
|
+
),
|
|
270
|
+
]
|
ipfabric_netbox/models.py
CHANGED
|
@@ -3,12 +3,13 @@ import json
|
|
|
3
3
|
import logging
|
|
4
4
|
import traceback
|
|
5
5
|
from copy import deepcopy
|
|
6
|
+
from uuid import uuid4
|
|
6
7
|
|
|
7
8
|
import httpx
|
|
8
9
|
from core.choices import DataSourceStatusChoices
|
|
9
10
|
from core.exceptions import SyncError
|
|
10
11
|
from core.models import Job
|
|
11
|
-
from core.
|
|
12
|
+
from core.models import ObjectType
|
|
12
13
|
from core.signals import pre_sync
|
|
13
14
|
from dcim.models import Device
|
|
14
15
|
from dcim.models import VirtualChassis
|
|
@@ -22,23 +23,22 @@ from django.db import models
|
|
|
22
23
|
from django.db import transaction
|
|
23
24
|
from django.db.models import Q
|
|
24
25
|
from django.db.models import signals
|
|
25
|
-
from django.forms.models import model_to_dict
|
|
26
26
|
from django.urls import reverse
|
|
27
27
|
from django.utils import timezone
|
|
28
28
|
from django.utils.module_loading import import_string
|
|
29
29
|
from django.utils.translation import gettext as _
|
|
30
|
-
from
|
|
31
|
-
from extras.models import StagedChange
|
|
30
|
+
from netbox.context import current_request
|
|
32
31
|
from netbox.models import ChangeLoggedModel
|
|
33
32
|
from netbox.models import NetBoxModel
|
|
34
33
|
from netbox.models import PrimaryModel
|
|
35
34
|
from netbox.models.features import JobsMixin
|
|
36
35
|
from netbox.models.features import TagsMixin
|
|
37
36
|
from netbox.registry import registry
|
|
38
|
-
from
|
|
39
|
-
from
|
|
37
|
+
from netbox_branching.choices import BranchStatusChoices
|
|
38
|
+
from netbox_branching.contextvars import active_branch
|
|
39
|
+
from netbox_branching.models import Branch
|
|
40
40
|
from utilities.querysets import RestrictedQuerySet
|
|
41
|
-
from utilities.
|
|
41
|
+
from utilities.request import NetBoxFakeRequest
|
|
42
42
|
|
|
43
43
|
from .choices import IPFabricRawDataTypeChoices
|
|
44
44
|
from .choices import IPFabricSnapshotStatusModelChoices
|
|
@@ -55,12 +55,13 @@ from .utilities.logging import SyncLogging
|
|
|
55
55
|
logger = logging.getLogger("ipfabric_netbox.models")
|
|
56
56
|
|
|
57
57
|
|
|
58
|
-
def apply_tags(object, tags):
|
|
58
|
+
def apply_tags(object, tags, connection_name=None):
|
|
59
59
|
def _apply(object):
|
|
60
|
+
object.snapshot()
|
|
60
61
|
for tag in tags:
|
|
61
62
|
if hasattr(object, "tags"):
|
|
62
63
|
object.tags.add(tag)
|
|
63
|
-
object.save()
|
|
64
|
+
object.save(using=connection_name)
|
|
64
65
|
|
|
65
66
|
_apply(object)
|
|
66
67
|
|
|
@@ -108,9 +109,6 @@ class IPFabricTransformMap(NetBoxModel):
|
|
|
108
109
|
blank=False,
|
|
109
110
|
null=False,
|
|
110
111
|
)
|
|
111
|
-
status = models.CharField(
|
|
112
|
-
max_length=50,
|
|
113
|
-
)
|
|
114
112
|
|
|
115
113
|
class Meta:
|
|
116
114
|
verbose_name = "IP Fabric Transform Map"
|
|
@@ -188,12 +186,34 @@ class IPFabricTransformMap(NetBoxModel):
|
|
|
188
186
|
context = self.render(new_data)
|
|
189
187
|
return context
|
|
190
188
|
|
|
191
|
-
def update_or_create_instance(self, context, tags=[]):
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
189
|
+
def update_or_create_instance(self, context, tags=[], connection_name=None):
|
|
190
|
+
target_class = self.target_model.model_class()
|
|
191
|
+
queryset = target_class.objects.using(connection_name)
|
|
192
|
+
|
|
193
|
+
defaults = context.pop("defaults", {})
|
|
194
|
+
|
|
195
|
+
with transaction.atomic(using=connection_name):
|
|
196
|
+
try:
|
|
197
|
+
# For correct ObjectChange on UPDATE we need to create snapshot
|
|
198
|
+
# NetBox does this in UI using views, we need to do it manually
|
|
199
|
+
# See NetBox docs Customization -> Custom Scripts -> Change Logging
|
|
200
|
+
instance = queryset.get(**context)
|
|
201
|
+
instance.snapshot()
|
|
202
|
+
for attr, value in defaults.items():
|
|
203
|
+
setattr(instance, attr, value)
|
|
204
|
+
instance.full_clean()
|
|
205
|
+
instance.save(using=connection_name)
|
|
206
|
+
except target_class.DoesNotExist:
|
|
207
|
+
for field in list(context.keys()):
|
|
208
|
+
# When assigning we need to replace `field__isnull=True` with `field=None`
|
|
209
|
+
if field.endswith("__isnull"):
|
|
210
|
+
context[field[:-8]] = None
|
|
211
|
+
del context[field]
|
|
212
|
+
instance = queryset.create(**context, **defaults)
|
|
213
|
+
instance.full_clean()
|
|
214
|
+
|
|
215
|
+
apply_tags(instance, tags, connection_name)
|
|
216
|
+
|
|
197
217
|
return instance
|
|
198
218
|
|
|
199
219
|
def render(self, source_data):
|
|
@@ -240,9 +260,6 @@ class IPFabricTransformMap(NetBoxModel):
|
|
|
240
260
|
if relationship_coalesce := source_data.get("relationship_coalesce"):
|
|
241
261
|
data.update(relationship_coalesce)
|
|
242
262
|
|
|
243
|
-
if self.status:
|
|
244
|
-
data["defaults"]["status"] = self.status
|
|
245
|
-
|
|
246
263
|
return data
|
|
247
264
|
|
|
248
265
|
|
|
@@ -269,7 +286,7 @@ class IPFabricRelationshipField(models.Model):
|
|
|
269
286
|
"Jinja2 template code, return an integer to create a relationship between the source and target model. True, False and None are also supported."
|
|
270
287
|
),
|
|
271
288
|
blank=True,
|
|
272
|
-
|
|
289
|
+
default="",
|
|
273
290
|
)
|
|
274
291
|
|
|
275
292
|
objects = RestrictedQuerySet.as_manager()
|
|
@@ -301,7 +318,7 @@ class IPFabricTransformField(models.Model):
|
|
|
301
318
|
template = models.TextField(
|
|
302
319
|
help_text=_("Jinja2 template code to be rendered into the target field."),
|
|
303
320
|
blank=True,
|
|
304
|
-
|
|
321
|
+
default="",
|
|
305
322
|
)
|
|
306
323
|
|
|
307
324
|
class Meta:
|
|
@@ -671,14 +688,28 @@ class IPFabricSync(IPFabricClient, JobsMixin, TagsMixin, ChangeLoggedModel):
|
|
|
671
688
|
logger.info(f"Syncing with the following data {json.dumps(self.parameters)}")
|
|
672
689
|
|
|
673
690
|
current_time = str(timezone.now())
|
|
691
|
+
ingestion = IPFabricIngestion.objects.create(sync=self, job=job)
|
|
692
|
+
try:
|
|
693
|
+
branch = Branch(name=f"IP Fabric Sync {current_time}")
|
|
694
|
+
branch.save(provision=False)
|
|
695
|
+
ingestion.branch = branch
|
|
696
|
+
ingestion.save()
|
|
674
697
|
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
698
|
+
if job:
|
|
699
|
+
# Re-assign the Job from IPFSync to IPFabricIngestion so it is listed in the ingestion
|
|
700
|
+
job.object_type = ObjectType.objects.get_for_model(ingestion)
|
|
701
|
+
job.object_id = ingestion.pk
|
|
702
|
+
job.save()
|
|
703
|
+
branch.provision(user=user)
|
|
704
|
+
branch.refresh_from_db()
|
|
705
|
+
if branch.status == BranchStatusChoices.FAILED:
|
|
706
|
+
print("Branch Failed")
|
|
707
|
+
self.logger.log_failure(f"Branch Failed: `{branch}`", obj=branch)
|
|
708
|
+
raise SyncError("Branch Creation Failed")
|
|
709
|
+
|
|
710
|
+
self.logger.log_info(f"New branch Created {branch.name}", obj=branch)
|
|
711
|
+
logger.info(f"New branch Created {branch.name}")
|
|
680
712
|
|
|
681
|
-
try:
|
|
682
713
|
self.logger.log_info("Fetching IP Fabric Client", obj=branch)
|
|
683
714
|
logger.info("Fetching IP Fabric Client")
|
|
684
715
|
|
|
@@ -695,58 +726,62 @@ class IPFabricSync(IPFabricClient, JobsMixin, TagsMixin, ChangeLoggedModel):
|
|
|
695
726
|
|
|
696
727
|
runner = IPFabricSyncRunner(
|
|
697
728
|
client=ipf,
|
|
729
|
+
ingestion=ingestion,
|
|
698
730
|
settings=self.parameters,
|
|
699
731
|
transform_map=IPFabricTransformMap,
|
|
700
732
|
sync=self,
|
|
701
733
|
)
|
|
702
734
|
|
|
703
|
-
|
|
704
|
-
|
|
735
|
+
# Not using `deactivate_branch` since that does not clean up on Exception
|
|
736
|
+
current_branch = active_branch.get()
|
|
737
|
+
if not (token := current_request.get()):
|
|
738
|
+
# This allows for ChangeLoggingMiddleware to create ObjectChanges
|
|
739
|
+
token = current_request.set(
|
|
740
|
+
NetBoxFakeRequest({"id": uuid4(), "user": user})
|
|
741
|
+
)
|
|
742
|
+
try:
|
|
743
|
+
active_branch.set(branch)
|
|
744
|
+
try:
|
|
745
|
+
runner.collect_and_sync(
|
|
746
|
+
ingestion=IPFabricIngestion.objects.get(pk=ingestion.pk)
|
|
747
|
+
)
|
|
748
|
+
finally:
|
|
749
|
+
active_branch.set(None)
|
|
750
|
+
finally:
|
|
751
|
+
current_request.set(token.old_value)
|
|
752
|
+
active_branch.set(current_branch)
|
|
705
753
|
|
|
706
754
|
if self.status != DataSourceStatusChoices.FAILED:
|
|
707
755
|
self.status = DataSourceStatusChoices.COMPLETED
|
|
708
756
|
|
|
709
757
|
except Exception as e:
|
|
710
758
|
self.status = DataSourceStatusChoices.FAILED
|
|
711
|
-
self.logger.log_failure(f"
|
|
759
|
+
self.logger.log_failure(f"Ingestion Failed: `{e}`", obj=ingestion)
|
|
712
760
|
self.logger.log_failure(
|
|
713
|
-
f"Stack Trace: `{traceback.format_exc()}`", obj=
|
|
761
|
+
f"Stack Trace: `{traceback.format_exc()}`", obj=ingestion
|
|
714
762
|
)
|
|
715
|
-
logger.debug(f"
|
|
763
|
+
logger.debug(f"Ingestion Failed: `{e}`")
|
|
716
764
|
|
|
717
765
|
logger.debug(f"Completed ingesting data from {self.snapshot_data.source.name}")
|
|
718
766
|
self.logger.log_info(
|
|
719
767
|
f"Completed ingesting data from {self.snapshot_data.source.name}", obj=self
|
|
720
768
|
)
|
|
721
769
|
|
|
722
|
-
for change in branch.staged_changes.all():
|
|
723
|
-
if hasattr(change.object, "pk"):
|
|
724
|
-
prechange_data = prechange_data = serialize_object(
|
|
725
|
-
change.object, resolve_tags=False
|
|
726
|
-
)
|
|
727
|
-
prechange_data = dict(sorted(prechange_data.items()))
|
|
728
|
-
else:
|
|
729
|
-
prechange_data = None
|
|
730
|
-
if hasattr(change, "data"):
|
|
731
|
-
postchange_data = dict(sorted(change.data.items()))
|
|
732
|
-
|
|
733
|
-
diff_added = shallow_compare_dict(
|
|
734
|
-
prechange_data or dict(),
|
|
735
|
-
postchange_data or dict(),
|
|
736
|
-
exclude=["last_updated"],
|
|
737
|
-
)
|
|
738
|
-
|
|
739
|
-
if not diff_added:
|
|
740
|
-
change.delete()
|
|
741
|
-
|
|
742
770
|
self.last_synced = timezone.now()
|
|
743
771
|
|
|
744
772
|
if self.auto_merge and self.status == DataSourceStatusChoices.COMPLETED:
|
|
745
|
-
self.logger.log_info("Auto Merging
|
|
746
|
-
logger.info("Auto Merging
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
773
|
+
self.logger.log_info("Auto Merging Ingestion", obj=ingestion)
|
|
774
|
+
logger.info("Auto Merging Ingestion")
|
|
775
|
+
try:
|
|
776
|
+
ingestion.enqueue_merge_job(user=user)
|
|
777
|
+
self.logger.log_info("Auto Merge Job Enqueued", obj=ingestion)
|
|
778
|
+
logger.info("Auto Merge Job Enqueued")
|
|
779
|
+
except NameError:
|
|
780
|
+
self.logger.log_failure(
|
|
781
|
+
"Failed to Auto Merge, IPFabricIngestion does not exist",
|
|
782
|
+
obj=ingestion,
|
|
783
|
+
)
|
|
784
|
+
logger.debug("Failed to Auto Merge, IPFabricIngestion does not exist")
|
|
750
785
|
|
|
751
786
|
IPFabricSync.objects.filter(pk=self.pk).update(
|
|
752
787
|
status=self.status, last_synced=self.last_synced
|
|
@@ -755,36 +790,48 @@ class IPFabricSync(IPFabricClient, JobsMixin, TagsMixin, ChangeLoggedModel):
|
|
|
755
790
|
job.data = self.logger.log_data
|
|
756
791
|
|
|
757
792
|
|
|
758
|
-
class
|
|
793
|
+
class IPFabricIngestion(JobsMixin, models.Model):
|
|
759
794
|
"""
|
|
760
|
-
|
|
795
|
+
Links IP Fabric Sync to its Branches.
|
|
761
796
|
"""
|
|
762
797
|
|
|
798
|
+
objects = RestrictedQuerySet.as_manager()
|
|
799
|
+
|
|
763
800
|
sync = models.ForeignKey(IPFabricSync, on_delete=models.CASCADE)
|
|
764
801
|
job = models.ForeignKey(Job, on_delete=models.SET_NULL, null=True)
|
|
802
|
+
branch = models.OneToOneField(Branch, on_delete=models.SET_NULL, null=True)
|
|
765
803
|
|
|
766
804
|
class Meta:
|
|
767
|
-
ordering = ("
|
|
768
|
-
verbose_name = "IP Fabric
|
|
769
|
-
verbose_name_plural = "IP Fabric
|
|
805
|
+
ordering = ("pk",)
|
|
806
|
+
verbose_name = "IP Fabric Ingestion"
|
|
807
|
+
verbose_name_plural = "IP Fabric Ingestions"
|
|
770
808
|
|
|
771
809
|
def __str__(self):
|
|
772
|
-
return
|
|
810
|
+
return self.name
|
|
811
|
+
|
|
812
|
+
@property
|
|
813
|
+
def name(self):
|
|
814
|
+
if self.branch:
|
|
815
|
+
return self.branch.name
|
|
816
|
+
return f"{self.sync.name} (Ingestion {self.pk})"
|
|
773
817
|
|
|
774
818
|
def get_absolute_url(self):
|
|
775
|
-
return reverse("plugins:ipfabric_netbox:
|
|
819
|
+
return reverse("plugins:ipfabric_netbox:ipfabricingestion", args=[self.pk])
|
|
776
820
|
|
|
777
|
-
def enqueue_merge_job(self, user):
|
|
778
|
-
# Set the status to "
|
|
821
|
+
def enqueue_merge_job(self, user, remove_branch=False):
|
|
822
|
+
# Set the status to "queued"
|
|
779
823
|
self.status = DataSourceStatusChoices.QUEUED
|
|
780
|
-
IPFabricSync.objects.filter(
|
|
824
|
+
IPFabricSync.objects.filter(ipfabricingestion=self.pk).update(
|
|
825
|
+
status=self.status
|
|
826
|
+
)
|
|
781
827
|
|
|
782
828
|
# Enqueue a sync job
|
|
783
829
|
return Job.enqueue(
|
|
784
|
-
import_string("ipfabric_netbox.jobs.
|
|
830
|
+
import_string("ipfabric_netbox.jobs.merge_ipfabric_ingestion"),
|
|
785
831
|
name=f"{self.name} Merge",
|
|
786
832
|
instance=self,
|
|
787
833
|
user=user,
|
|
834
|
+
remove_branch=remove_branch,
|
|
788
835
|
)
|
|
789
836
|
|
|
790
837
|
def get_logs(self):
|
|
@@ -809,24 +856,6 @@ class IPFabricBranch(JobsMixin, Branch):
|
|
|
809
856
|
statistics[model] = stats["current"] / 1 * 100
|
|
810
857
|
return {"job_results": job_results, "statistics": statistics}
|
|
811
858
|
|
|
812
|
-
def merge(self):
|
|
813
|
-
logger.info(f"Merging changes in branch {self}")
|
|
814
|
-
with transaction.atomic():
|
|
815
|
-
for change in self.staged_changes.all():
|
|
816
|
-
logger.debug("Applying change: %s", change)
|
|
817
|
-
try:
|
|
818
|
-
change.apply()
|
|
819
|
-
except Exception as err:
|
|
820
|
-
content_type = ContentType.objects.get(pk=change.object_type.pk)
|
|
821
|
-
data = model_to_dict(change)["data"]
|
|
822
|
-
logger.error(
|
|
823
|
-
f"Got error applying change ({content_type}: {data}): {err}"
|
|
824
|
-
)
|
|
825
|
-
raise
|
|
826
|
-
signals.pre_delete.disconnect(handle_deleted_object)
|
|
827
|
-
self.staged_changes.all().delete()
|
|
828
|
-
signals.pre_delete.connect(handle_deleted_object, sender=StagedChange)
|
|
829
|
-
|
|
830
859
|
def sync_merge(self):
|
|
831
860
|
ipfabricsync = self.sync
|
|
832
861
|
if ipfabricsync.status == DataSourceStatusChoices.SYNCING:
|
|
@@ -835,7 +864,7 @@ class IPFabricBranch(JobsMixin, Branch):
|
|
|
835
864
|
pre_sync.send(sender=self.__class__, instance=self)
|
|
836
865
|
|
|
837
866
|
ipfabricsync.status = DataSourceStatusChoices.SYNCING
|
|
838
|
-
IPFabricSync.objects.filter(
|
|
867
|
+
IPFabricSync.objects.filter(ipfabricingestion=self.pk).update(
|
|
839
868
|
status=self.sync.status
|
|
840
869
|
)
|
|
841
870
|
|
|
@@ -846,7 +875,7 @@ class IPFabricBranch(JobsMixin, Branch):
|
|
|
846
875
|
signals.post_save.disconnect(
|
|
847
876
|
assign_virtualchassis_master, sender=VirtualChassis
|
|
848
877
|
)
|
|
849
|
-
self.merge()
|
|
878
|
+
self.branch.merge(user=self.sync.user)
|
|
850
879
|
signals.post_save.connect(
|
|
851
880
|
assign_virtualchassis_master, sender=VirtualChassis
|
|
852
881
|
)
|
|
@@ -859,7 +888,7 @@ class IPFabricBranch(JobsMixin, Branch):
|
|
|
859
888
|
logger.debug(f"Completed merge {self.name}")
|
|
860
889
|
|
|
861
890
|
ipfabricsync.last_synced = timezone.now()
|
|
862
|
-
IPFabricSync.objects.filter(
|
|
891
|
+
IPFabricSync.objects.filter(ipfabricingestion=self.pk).update(
|
|
863
892
|
status=ipfabricsync.status, last_synced=ipfabricsync.last_synced
|
|
864
893
|
)
|
|
865
894
|
|
ipfabric_netbox/navigation.py
CHANGED
ipfabric_netbox/signals.py
CHANGED
|
@@ -2,6 +2,7 @@ import logging
|
|
|
2
2
|
|
|
3
3
|
from dcim.models import Device
|
|
4
4
|
from ipam.models import IPAddress
|
|
5
|
+
from netbox_branching.contextvars import active_branch
|
|
5
6
|
|
|
6
7
|
logger = logging.getLogger("ipfabric_netbox.utilities.ipf_utils")
|
|
7
8
|
|
|
@@ -21,9 +22,15 @@ def clear_other_primary_ip(instance: Device, **kwargs) -> None:
|
|
|
21
22
|
# THe IP is not created yet, cannot be assigned
|
|
22
23
|
return
|
|
23
24
|
try:
|
|
24
|
-
|
|
25
|
+
connection_name = None
|
|
26
|
+
if branch := active_branch.get():
|
|
27
|
+
connection_name = branch.connection_name
|
|
28
|
+
other_device = Device.objects.using(connection_name).get(
|
|
29
|
+
primary_ip4=instance.primary_ip
|
|
30
|
+
)
|
|
25
31
|
if other_device and instance != other_device:
|
|
32
|
+
other_device.snapshot()
|
|
26
33
|
other_device.primary_ip4 = None
|
|
27
|
-
other_device.save()
|
|
34
|
+
other_device.save(using=connection_name)
|
|
28
35
|
except Device.DoesNotExist:
|
|
29
36
|
pass
|