nautobot 2.3.15__py3-none-any.whl → 2.3.16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of nautobot might be problematic. Click here for more details.
- nautobot/circuits/views.py +3 -3
- nautobot/cloud/models.py +1 -1
- nautobot/core/api/fields.py +5 -5
- nautobot/core/api/serializers.py +9 -9
- nautobot/core/api/views.py +3 -2
- nautobot/core/apps/__init__.py +5 -2
- nautobot/core/celery/schedulers.py +1 -1
- nautobot/core/filters.py +19 -16
- nautobot/core/forms/fields.py +5 -5
- nautobot/core/graphql/types.py +1 -1
- nautobot/core/jobs/__init__.py +4 -4
- nautobot/core/jobs/cleanup.py +1 -1
- nautobot/core/jobs/groups.py +1 -1
- nautobot/core/management/commands/validate_models.py +1 -1
- nautobot/core/models/__init__.py +1 -1
- nautobot/core/models/query_functions.py +2 -2
- nautobot/core/models/tree_queries.py +2 -2
- nautobot/core/tables.py +5 -5
- nautobot/core/testing/filters.py +7 -3
- nautobot/core/testing/views.py +5 -0
- nautobot/core/tests/runner.py +1 -1
- nautobot/core/views/generic.py +51 -43
- nautobot/core/views/mixins.py +21 -11
- nautobot/dcim/api/serializers.py +48 -48
- nautobot/dcim/forms.py +2 -0
- nautobot/dcim/graphql/types.py +2 -2
- nautobot/dcim/models/device_component_templates.py +2 -2
- nautobot/dcim/models/device_components.py +22 -20
- nautobot/dcim/models/devices.py +1 -1
- nautobot/dcim/models/locations.py +3 -3
- nautobot/dcim/models/power.py +6 -5
- nautobot/dcim/models/racks.py +4 -4
- nautobot/dcim/tables/__init__.py +3 -3
- nautobot/dcim/tables/devicetypes.py +2 -2
- nautobot/dcim/tests/test_filters.py +1 -0
- nautobot/dcim/tests/test_graphql.py +52 -0
- nautobot/dcim/tests/test_models.py +4 -1
- nautobot/dcim/views.py +1 -1
- nautobot/extras/api/customfields.py +2 -2
- nautobot/extras/api/serializers.py +72 -69
- nautobot/extras/api/views.py +4 -4
- nautobot/extras/health_checks.py +1 -2
- nautobot/extras/jobs.py +5 -5
- nautobot/extras/managers.py +3 -1
- nautobot/extras/migrations/0018_joblog_data_migration.py +7 -9
- nautobot/extras/models/groups.py +13 -9
- nautobot/extras/models/jobs.py +4 -4
- nautobot/extras/models/models.py +2 -2
- nautobot/extras/plugins/views.py +1 -1
- nautobot/extras/tables.py +5 -5
- nautobot/extras/test_jobs/api_test_job.py +1 -1
- nautobot/extras/test_jobs/atomic_transaction.py +2 -2
- nautobot/extras/test_jobs/dry_run.py +1 -1
- nautobot/extras/test_jobs/fail.py +5 -5
- nautobot/extras/test_jobs/file_output.py +1 -1
- nautobot/extras/test_jobs/file_upload_fail.py +1 -1
- nautobot/extras/test_jobs/file_upload_pass.py +1 -1
- nautobot/extras/test_jobs/ipaddress_vars.py +3 -1
- nautobot/extras/test_jobs/jobs_module/jobs_submodule/jobs.py +1 -1
- nautobot/extras/test_jobs/location_with_custom_field.py +1 -1
- nautobot/extras/test_jobs/log_redaction.py +1 -1
- nautobot/extras/test_jobs/log_skip_db_logging.py +1 -1
- nautobot/extras/test_jobs/modify_db.py +1 -1
- nautobot/extras/test_jobs/object_var_optional.py +1 -1
- nautobot/extras/test_jobs/object_var_required.py +1 -1
- nautobot/extras/test_jobs/object_vars.py +1 -1
- nautobot/extras/test_jobs/pass.py +3 -3
- nautobot/extras/test_jobs/profiling.py +1 -1
- nautobot/extras/test_jobs/relative_import.py +3 -3
- nautobot/extras/test_jobs/soft_time_limit_greater_than_time_limit.py +1 -1
- nautobot/extras/test_jobs/task_queues.py +1 -1
- nautobot/extras/tests/test_api.py +13 -13
- nautobot/extras/tests/test_customfields.py +1 -1
- nautobot/extras/tests/test_datasources.py +2 -1
- nautobot/extras/tests/test_dynamicgroups.py +1 -1
- nautobot/extras/tests/test_filters.py +6 -6
- nautobot/extras/tests/test_jobs.py +11 -11
- nautobot/extras/tests/test_models.py +10 -10
- nautobot/extras/tests/test_relationships.py +1 -1
- nautobot/extras/tests/test_views.py +16 -16
- nautobot/extras/views.py +20 -16
- nautobot/ipam/api/fields.py +3 -3
- nautobot/ipam/api/serializers.py +33 -33
- nautobot/ipam/api/views.py +37 -61
- nautobot/ipam/querysets.py +2 -2
- nautobot/ipam/tests/test_api.py +12 -1
- nautobot/ipam/tests/test_forms.py +51 -47
- nautobot/ipam/tests/test_migrations.py +30 -30
- nautobot/ipam/tests/test_querysets.py +14 -0
- nautobot/project-static/docs/code-reference/nautobot/apps/forms.html +1 -1
- nautobot/project-static/docs/code-reference/nautobot/apps/testing.html +1 -1
- nautobot/project-static/docs/code-reference/nautobot/apps/views.html +2 -2
- nautobot/project-static/docs/release-notes/version-2.3.html +181 -99
- nautobot/project-static/docs/search/search_index.json +1 -1
- nautobot/project-static/docs/sitemap.xml +270 -270
- nautobot/project-static/docs/sitemap.xml.gz +0 -0
- nautobot/users/admin.py +1 -1
- nautobot/users/api/serializers.py +4 -4
- nautobot/users/api/views.py +1 -1
- nautobot/virtualization/api/serializers.py +4 -4
- {nautobot-2.3.15.dist-info → nautobot-2.3.16.dist-info}/METADATA +1 -1
- {nautobot-2.3.15.dist-info → nautobot-2.3.16.dist-info}/RECORD +106 -106
- {nautobot-2.3.15.dist-info → nautobot-2.3.16.dist-info}/WHEEL +1 -1
- {nautobot-2.3.15.dist-info → nautobot-2.3.16.dist-info}/LICENSE.txt +0 -0
- {nautobot-2.3.15.dist-info → nautobot-2.3.16.dist-info}/NOTICE +0 -0
- {nautobot-2.3.15.dist-info → nautobot-2.3.16.dist-info}/entry_points.txt +0 -0
|
@@ -1691,21 +1691,21 @@ class ScheduledJobTestCase(
|
|
|
1691
1691
|
user = User.objects.create(username="user1", is_active=True)
|
|
1692
1692
|
ScheduledJob.objects.create(
|
|
1693
1693
|
name="test1",
|
|
1694
|
-
task="pass.
|
|
1694
|
+
task="pass.TestPassJob",
|
|
1695
1695
|
interval=JobExecutionType.TYPE_IMMEDIATELY,
|
|
1696
1696
|
user=user,
|
|
1697
1697
|
start_time=timezone.now(),
|
|
1698
1698
|
)
|
|
1699
1699
|
ScheduledJob.objects.create(
|
|
1700
1700
|
name="test2",
|
|
1701
|
-
task="pass.
|
|
1701
|
+
task="pass.TestPassJob",
|
|
1702
1702
|
interval=JobExecutionType.TYPE_DAILY,
|
|
1703
1703
|
user=user,
|
|
1704
1704
|
start_time=timezone.now(),
|
|
1705
1705
|
)
|
|
1706
1706
|
ScheduledJob.objects.create(
|
|
1707
1707
|
name="test3",
|
|
1708
|
-
task="pass.
|
|
1708
|
+
task="pass.TestPassJob",
|
|
1709
1709
|
interval=JobExecutionType.TYPE_CUSTOM,
|
|
1710
1710
|
user=user,
|
|
1711
1711
|
start_time=timezone.now(),
|
|
@@ -1719,7 +1719,7 @@ class ScheduledJobTestCase(
|
|
|
1719
1719
|
ScheduledJob.objects.create(
|
|
1720
1720
|
enabled=False,
|
|
1721
1721
|
name="test4",
|
|
1722
|
-
task="pass.
|
|
1722
|
+
task="pass.TestPassJob",
|
|
1723
1723
|
interval=JobExecutionType.TYPE_IMMEDIATELY,
|
|
1724
1724
|
user=self.user,
|
|
1725
1725
|
start_time=timezone.now(),
|
|
@@ -1736,7 +1736,7 @@ class ScheduledJobTestCase(
|
|
|
1736
1736
|
ScheduledJob.objects.create(
|
|
1737
1737
|
enabled=True,
|
|
1738
1738
|
name=name,
|
|
1739
|
-
task="pass.
|
|
1739
|
+
task="pass.TestPassJob",
|
|
1740
1740
|
interval=JobExecutionType.TYPE_CUSTOM,
|
|
1741
1741
|
user=self.user,
|
|
1742
1742
|
start_time=timezone.now(),
|
|
@@ -1767,7 +1767,7 @@ class ScheduledJobTestCase(
|
|
|
1767
1767
|
ScheduledJob.objects.create(
|
|
1768
1768
|
enabled=True,
|
|
1769
1769
|
name="test11",
|
|
1770
|
-
task="pass.
|
|
1770
|
+
task="pass.TestPassJob",
|
|
1771
1771
|
interval=JobExecutionType.TYPE_CUSTOM,
|
|
1772
1772
|
user=self.user,
|
|
1773
1773
|
start_time=timezone.now(),
|
|
@@ -1801,7 +1801,7 @@ class ApprovalQueueTestCase(
|
|
|
1801
1801
|
def setUp(self):
|
|
1802
1802
|
super().setUp()
|
|
1803
1803
|
self.job_model = Job.objects.get_for_class_path("dry_run.TestDryRun")
|
|
1804
|
-
self.job_model_2 = Job.objects.get_for_class_path("fail.
|
|
1804
|
+
self.job_model_2 = Job.objects.get_for_class_path("fail.TestFailJob")
|
|
1805
1805
|
|
|
1806
1806
|
ScheduledJob.objects.create(
|
|
1807
1807
|
name="test1",
|
|
@@ -1814,7 +1814,7 @@ class ApprovalQueueTestCase(
|
|
|
1814
1814
|
)
|
|
1815
1815
|
ScheduledJob.objects.create(
|
|
1816
1816
|
name="test2",
|
|
1817
|
-
task="fail.
|
|
1817
|
+
task="fail.TestFailJob",
|
|
1818
1818
|
job_model=self.job_model_2,
|
|
1819
1819
|
interval=JobExecutionType.TYPE_IMMEDIATELY,
|
|
1820
1820
|
user=self.user,
|
|
@@ -1827,7 +1827,7 @@ class ApprovalQueueTestCase(
|
|
|
1827
1827
|
|
|
1828
1828
|
ScheduledJob.objects.create(
|
|
1829
1829
|
name="test4",
|
|
1830
|
-
task="pass.
|
|
1830
|
+
task="pass.TestPassJob",
|
|
1831
1831
|
job_model=self.job_model,
|
|
1832
1832
|
interval=JobExecutionType.TYPE_IMMEDIATELY,
|
|
1833
1833
|
user=self.user,
|
|
@@ -2159,8 +2159,8 @@ class JobResultTestCase(
|
|
|
2159
2159
|
|
|
2160
2160
|
@classmethod
|
|
2161
2161
|
def setUpTestData(cls):
|
|
2162
|
-
JobResult.objects.create(name="pass.
|
|
2163
|
-
JobResult.objects.create(name="fail.
|
|
2162
|
+
JobResult.objects.create(name="pass.TestPassJob")
|
|
2163
|
+
JobResult.objects.create(name="fail.TestFailJob")
|
|
2164
2164
|
JobLogEntry.objects.create(
|
|
2165
2165
|
log_level=LogLevelChoices.LOG_INFO,
|
|
2166
2166
|
job_result=JobResult.objects.first(),
|
|
@@ -2214,7 +2214,7 @@ class JobTestCase(
|
|
|
2214
2214
|
# Job model objects are automatically created during database migrations
|
|
2215
2215
|
|
|
2216
2216
|
# But we do need to make sure the ones we're testing are flagged appropriately
|
|
2217
|
-
cls.test_pass = Job.objects.get(job_class_name="
|
|
2217
|
+
cls.test_pass = Job.objects.get(job_class_name="TestPassJob")
|
|
2218
2218
|
cls.test_pass.enabled = True
|
|
2219
2219
|
cls.test_pass.save()
|
|
2220
2220
|
|
|
@@ -2433,7 +2433,7 @@ class JobTestCase(
|
|
|
2433
2433
|
self.add_permissions("extras.run_job")
|
|
2434
2434
|
for run_url in self.run_urls:
|
|
2435
2435
|
response = self.client.get(run_url)
|
|
2436
|
-
self.assertBodyContains(response, "
|
|
2436
|
+
self.assertBodyContains(response, "TestPassJob")
|
|
2437
2437
|
|
|
2438
2438
|
@override_settings(EXEMPT_VIEW_PERMISSIONS=[])
|
|
2439
2439
|
def test_get_run_with_constrained_permission(self):
|
|
@@ -2532,12 +2532,12 @@ class JobTestCase(
|
|
|
2532
2532
|
self.add_permissions("extras.run_job")
|
|
2533
2533
|
|
|
2534
2534
|
for run_url in (
|
|
2535
|
-
reverse("extras:job_run_by_class_path", kwargs={"class_path": "fail.
|
|
2536
|
-
reverse("extras:job_run", kwargs={"pk": Job.objects.get(job_class_name="
|
|
2535
|
+
reverse("extras:job_run_by_class_path", kwargs={"class_path": "fail.TestFailJob"}),
|
|
2536
|
+
reverse("extras:job_run", kwargs={"pk": Job.objects.get(job_class_name="TestFailJob").pk}),
|
|
2537
2537
|
):
|
|
2538
2538
|
response = self.client.post(run_url, self.data_run_immediately)
|
|
2539
2539
|
self.assertBodyContains(response, "Job is not enabled to be run")
|
|
2540
|
-
self.assertFalse(JobResult.objects.filter(name="fail.
|
|
2540
|
+
self.assertFalse(JobResult.objects.filter(name="fail.TestFailJob").exists())
|
|
2541
2541
|
|
|
2542
2542
|
def test_run_now_missing_args(self):
|
|
2543
2543
|
self.add_permissions("extras.run_job")
|
nautobot/extras/views.py
CHANGED
|
@@ -1060,10 +1060,10 @@ class GitRepositoryEditView(generic.ObjectEditView):
|
|
|
1060
1060
|
obj.user = request.user
|
|
1061
1061
|
return super().alter_obj(obj, request, url_args, url_kwargs)
|
|
1062
1062
|
|
|
1063
|
-
def get_return_url(self, request, obj):
|
|
1063
|
+
def get_return_url(self, request, obj=None, default_return_url=None):
|
|
1064
1064
|
if request.method == "POST":
|
|
1065
1065
|
return reverse("extras:gitrepository_result", kwargs={"pk": obj.pk})
|
|
1066
|
-
return super().get_return_url(request, obj)
|
|
1066
|
+
return super().get_return_url(request, obj=obj, default_return_url=default_return_url)
|
|
1067
1067
|
|
|
1068
1068
|
|
|
1069
1069
|
class GitRepositoryDeleteView(generic.ObjectDeleteView):
|
|
@@ -1211,27 +1211,27 @@ class ImageAttachmentEditView(generic.ObjectEditView):
|
|
|
1211
1211
|
return get_object_or_404(self.queryset, pk=kwargs["pk"])
|
|
1212
1212
|
return self.queryset.model()
|
|
1213
1213
|
|
|
1214
|
-
def alter_obj(self,
|
|
1215
|
-
if not
|
|
1214
|
+
def alter_obj(self, obj, request, url_args, url_kwargs):
|
|
1215
|
+
if not obj.present_in_database:
|
|
1216
1216
|
# Assign the parent object based on URL kwargs
|
|
1217
|
-
model =
|
|
1218
|
-
if "object_id" in
|
|
1219
|
-
|
|
1220
|
-
elif "slug" in
|
|
1221
|
-
|
|
1217
|
+
model = url_kwargs.get("model")
|
|
1218
|
+
if "object_id" in url_kwargs:
|
|
1219
|
+
obj.parent = get_object_or_404(model, pk=url_kwargs["object_id"])
|
|
1220
|
+
elif "slug" in url_kwargs:
|
|
1221
|
+
obj.parent = get_object_or_404(model, slug=url_kwargs["slug"])
|
|
1222
1222
|
else:
|
|
1223
1223
|
raise RuntimeError("Neither object_id nor slug were provided?")
|
|
1224
|
-
return
|
|
1224
|
+
return obj
|
|
1225
1225
|
|
|
1226
|
-
def get_return_url(self, request,
|
|
1227
|
-
return
|
|
1226
|
+
def get_return_url(self, request, obj=None, default_return_url=None):
|
|
1227
|
+
return obj.parent.get_absolute_url()
|
|
1228
1228
|
|
|
1229
1229
|
|
|
1230
1230
|
class ImageAttachmentDeleteView(generic.ObjectDeleteView):
|
|
1231
1231
|
queryset = ImageAttachment.objects.all()
|
|
1232
1232
|
|
|
1233
|
-
def get_return_url(self, request,
|
|
1234
|
-
return
|
|
1233
|
+
def get_return_url(self, request, obj=None, default_return_url=None):
|
|
1234
|
+
return obj.parent.get_absolute_url()
|
|
1235
1235
|
|
|
1236
1236
|
|
|
1237
1237
|
#
|
|
@@ -1337,7 +1337,9 @@ class JobRunView(ObjectPermissionRequiredMixin, View):
|
|
|
1337
1337
|
get_template(job_class.template_name)
|
|
1338
1338
|
template_name = job_class.template_name
|
|
1339
1339
|
except TemplateDoesNotExist as err:
|
|
1340
|
-
messages.error(
|
|
1340
|
+
messages.error(
|
|
1341
|
+
request, f'Unable to render requested custom job template "{job_class.template_name}": {err}'
|
|
1342
|
+
)
|
|
1341
1343
|
except RuntimeError as err:
|
|
1342
1344
|
messages.error(request, f"Unable to run or schedule '{job_model}': {err}")
|
|
1343
1345
|
return redirect("extras:job_list")
|
|
@@ -1447,7 +1449,9 @@ class JobRunView(ObjectPermissionRequiredMixin, View):
|
|
|
1447
1449
|
get_template(job_class.template_name)
|
|
1448
1450
|
template_name = job_class.template_name
|
|
1449
1451
|
except TemplateDoesNotExist as err:
|
|
1450
|
-
messages.error(
|
|
1452
|
+
messages.error(
|
|
1453
|
+
request, f'Unable to render requested custom job template "{job_class.template_name}": {err}'
|
|
1454
|
+
)
|
|
1451
1455
|
|
|
1452
1456
|
return render(
|
|
1453
1457
|
request,
|
nautobot/ipam/api/fields.py
CHANGED
|
@@ -7,11 +7,11 @@ class IPFieldSerializer(serializers.CharField):
|
|
|
7
7
|
"""Convert internal (IPNetwork) representation to API (string) representation."""
|
|
8
8
|
return str(value)
|
|
9
9
|
|
|
10
|
-
def to_internal_value(self,
|
|
10
|
+
def to_internal_value(self, data):
|
|
11
11
|
"""Convert API (string) representation to internal (IPNetwork) representation."""
|
|
12
12
|
try:
|
|
13
|
-
return netaddr.IPNetwork(
|
|
13
|
+
return netaddr.IPNetwork(data)
|
|
14
14
|
except netaddr.AddrFormatError:
|
|
15
|
-
raise serializers.ValidationError(f"Invalid IP address format: {
|
|
15
|
+
raise serializers.ValidationError(f"Invalid IP address format: {data}")
|
|
16
16
|
except (TypeError, ValueError) as e:
|
|
17
17
|
raise serializers.ValidationError(e)
|
nautobot/ipam/api/serializers.py
CHANGED
|
@@ -64,16 +64,16 @@ class VRFDeviceAssignmentSerializer(ValidatedModelSerializer):
|
|
|
64
64
|
fields = "__all__"
|
|
65
65
|
validators = []
|
|
66
66
|
|
|
67
|
-
def validate(self,
|
|
68
|
-
if
|
|
67
|
+
def validate(self, attrs):
|
|
68
|
+
if attrs.get("device"):
|
|
69
69
|
validator = UniqueTogetherValidator(queryset=VRFDeviceAssignment.objects.all(), fields=("device", "vrf"))
|
|
70
|
-
validator(
|
|
71
|
-
if
|
|
70
|
+
validator(attrs, self)
|
|
71
|
+
if attrs.get("virtual_machine"):
|
|
72
72
|
validator = UniqueTogetherValidator(
|
|
73
73
|
queryset=VRFDeviceAssignment.objects.all(), fields=("virtual_machine", "vrf")
|
|
74
74
|
)
|
|
75
|
-
validator(
|
|
76
|
-
return super().validate(
|
|
75
|
+
validator(attrs, self)
|
|
76
|
+
return super().validate(attrs)
|
|
77
77
|
|
|
78
78
|
|
|
79
79
|
class VRFPrefixAssignmentSerializer(ValidatedModelSerializer):
|
|
@@ -156,17 +156,17 @@ class VLANSerializer(NautobotModelSerializer, TaggedModelSerializerMixin):
|
|
|
156
156
|
validators = []
|
|
157
157
|
extra_kwargs = {"locations": {"read_only": True}}
|
|
158
158
|
|
|
159
|
-
def validate(self,
|
|
159
|
+
def validate(self, attrs):
|
|
160
160
|
# Validate uniqueness of vid and name if a group has been assigned.
|
|
161
|
-
if
|
|
161
|
+
if attrs.get("vlan_group", None):
|
|
162
162
|
for field in ["vid", "name"]:
|
|
163
163
|
validator = UniqueTogetherValidator(queryset=VLAN.objects.all(), fields=("vlan_group", field))
|
|
164
|
-
validator(
|
|
164
|
+
validator(attrs, self)
|
|
165
165
|
|
|
166
166
|
# Enforce model validation
|
|
167
|
-
super().validate(
|
|
167
|
+
super().validate(attrs)
|
|
168
168
|
|
|
169
|
-
return
|
|
169
|
+
return attrs
|
|
170
170
|
|
|
171
171
|
|
|
172
172
|
class VLANLegacySerializer(VLANSerializer):
|
|
@@ -417,16 +417,16 @@ class IPAddressSerializer(NautobotModelSerializer, TaggedModelSerializerMixin):
|
|
|
417
417
|
],
|
|
418
418
|
}
|
|
419
419
|
|
|
420
|
-
def validate(self,
|
|
421
|
-
namespace =
|
|
422
|
-
parent =
|
|
420
|
+
def validate(self, attrs):
|
|
421
|
+
namespace = attrs.get("namespace", None)
|
|
422
|
+
parent = attrs.get("parent", None)
|
|
423
423
|
|
|
424
424
|
# Only assert namespace/parent on create.
|
|
425
425
|
if self.instance is None and not any([namespace, parent]):
|
|
426
426
|
raise ValidationError({"__all__": "One of parent or namespace must be provided"})
|
|
427
427
|
|
|
428
|
-
super().validate(
|
|
429
|
-
return
|
|
428
|
+
super().validate(attrs)
|
|
429
|
+
return attrs
|
|
430
430
|
|
|
431
431
|
def get_field_names(self, declared_fields, info):
|
|
432
432
|
"""Add reverse relations to the automatically discovered fields."""
|
|
@@ -476,9 +476,9 @@ class IPAllocationSerializer(NautobotModelSerializer, TaggedModelSerializerMixin
|
|
|
476
476
|
"custom_fields",
|
|
477
477
|
)
|
|
478
478
|
|
|
479
|
-
def validate(self,
|
|
480
|
-
|
|
481
|
-
return super().validate(
|
|
479
|
+
def validate(self, attrs):
|
|
480
|
+
attrs["mask_length"] = self.context["prefix"].prefix_length
|
|
481
|
+
return super().validate(attrs)
|
|
482
482
|
|
|
483
483
|
|
|
484
484
|
class VLANAllocationSerializer(NautobotModelSerializer, TaggedModelSerializerMixin):
|
|
@@ -488,11 +488,11 @@ class VLANAllocationSerializer(NautobotModelSerializer, TaggedModelSerializerMix
|
|
|
488
488
|
|
|
489
489
|
vid = serializers.IntegerField(required=False, min_value=constants.VLAN_VID_MIN, max_value=constants.VLAN_VID_MAX)
|
|
490
490
|
|
|
491
|
-
def validate(self,
|
|
491
|
+
def validate(self, attrs):
|
|
492
492
|
"""Skip `ValidatedModel` validation.
|
|
493
493
|
This allows to skip `vid` attribute of `VLAN` model, while validate name and status.
|
|
494
494
|
"""
|
|
495
|
-
return
|
|
495
|
+
return attrs
|
|
496
496
|
|
|
497
497
|
class Meta(VLANSerializer.Meta):
|
|
498
498
|
model = VLAN
|
|
@@ -521,19 +521,19 @@ class IPAddressToInterfaceSerializer(ValidatedModelSerializer):
|
|
|
521
521
|
fields = "__all__"
|
|
522
522
|
validators = []
|
|
523
523
|
|
|
524
|
-
def validate(self,
|
|
524
|
+
def validate(self, attrs):
|
|
525
525
|
# Validate uniqueness of (parent, name) since we omitted the automatically created validator from Meta.
|
|
526
|
-
if
|
|
526
|
+
if attrs.get("interface"):
|
|
527
527
|
validator = UniqueTogetherValidator(
|
|
528
528
|
queryset=IPAddressToInterface.objects.all(), fields=("interface", "ip_address")
|
|
529
529
|
)
|
|
530
|
-
validator(
|
|
531
|
-
if
|
|
530
|
+
validator(attrs, self)
|
|
531
|
+
if attrs.get("vm_interface"):
|
|
532
532
|
validator = UniqueTogetherValidator(
|
|
533
533
|
queryset=IPAddressToInterface.objects.all(), fields=("vm_interface", "ip_address")
|
|
534
534
|
)
|
|
535
|
-
validator(
|
|
536
|
-
return super().validate(
|
|
535
|
+
validator(attrs, self)
|
|
536
|
+
return super().validate(attrs)
|
|
537
537
|
|
|
538
538
|
|
|
539
539
|
#
|
|
@@ -565,11 +565,11 @@ class ServiceSerializer(NautobotModelSerializer, TaggedModelSerializerMixin):
|
|
|
565
565
|
# list_display_fields = ["name", "parent", "protocol", "ports", "description"]
|
|
566
566
|
list_display_fields = ["name", "device", "protocol", "ports", "description"]
|
|
567
567
|
|
|
568
|
-
def validate(self,
|
|
569
|
-
if
|
|
568
|
+
def validate(self, attrs):
|
|
569
|
+
if attrs.get("device"):
|
|
570
570
|
validator = UniqueTogetherValidator(queryset=Service.objects.all(), fields=("name", "device"))
|
|
571
|
-
validator(
|
|
572
|
-
if
|
|
571
|
+
validator(attrs, self)
|
|
572
|
+
if attrs.get("virtual_machine"):
|
|
573
573
|
validator = UniqueTogetherValidator(queryset=Service.objects.all(), fields=("name", "virtual_machine"))
|
|
574
|
-
validator(
|
|
575
|
-
return super().validate(
|
|
574
|
+
validator(attrs, self)
|
|
575
|
+
return super().validate(attrs)
|
nautobot/ipam/api/views.py
CHANGED
|
@@ -139,15 +139,15 @@ class PrefixViewSet(NautobotModelViewSet):
|
|
|
139
139
|
)
|
|
140
140
|
default_code = "precondition_failed"
|
|
141
141
|
|
|
142
|
-
def retrieve(self, request, pk=None):
|
|
142
|
+
def retrieve(self, request, *args, pk=None, **kwargs):
|
|
143
143
|
try:
|
|
144
|
-
return super().retrieve(request, pk)
|
|
144
|
+
return super().retrieve(request, *args, pk=pk, **kwargs)
|
|
145
145
|
except Location.MultipleObjectsReturned as e:
|
|
146
146
|
raise self.LocationIncompatibleLegacyBehavior from e
|
|
147
147
|
|
|
148
|
-
def list(self, request):
|
|
148
|
+
def list(self, request, *args, **kwargs):
|
|
149
149
|
try:
|
|
150
|
-
return super().list(request)
|
|
150
|
+
return super().list(request, *args, **kwargs)
|
|
151
151
|
except Location.MultipleObjectsReturned as e:
|
|
152
152
|
raise self.LocationIncompatibleLegacyBehavior from e
|
|
153
153
|
|
|
@@ -185,20 +185,15 @@ class PrefixViewSet(NautobotModelViewSet):
|
|
|
185
185
|
available_prefixes = prefix.get_available_prefixes()
|
|
186
186
|
|
|
187
187
|
# Validate Requested Prefixes' length
|
|
188
|
-
|
|
189
|
-
data=request.data if isinstance(request.data, list) else [request.data],
|
|
190
|
-
many=True,
|
|
191
|
-
context={
|
|
192
|
-
"request": request,
|
|
193
|
-
"prefix": prefix,
|
|
194
|
-
},
|
|
195
|
-
)
|
|
196
|
-
serializer.is_valid(raise_exception=True)
|
|
197
|
-
|
|
198
|
-
requested_prefixes = serializer.validated_data
|
|
199
|
-
# Allocate prefixes to the requested objects based on availability within the parent
|
|
188
|
+
requested_prefixes = request.data if isinstance(request.data, list) else [request.data]
|
|
200
189
|
for requested_prefix in requested_prefixes:
|
|
201
|
-
#
|
|
190
|
+
# If the prefix_length is not an integer, return a 400 using the
|
|
191
|
+
# serializer.is_valid(raise_exception=True) method call below
|
|
192
|
+
if not isinstance(requested_prefix["prefix_length"], int):
|
|
193
|
+
return Response(
|
|
194
|
+
{"prefix_length": "This field must be an integer."},
|
|
195
|
+
status=status.HTTP_400_BAD_REQUEST,
|
|
196
|
+
)
|
|
202
197
|
for available_prefix in available_prefixes.iter_cidrs():
|
|
203
198
|
if requested_prefix["prefix_length"] >= available_prefix.prefixlen:
|
|
204
199
|
allocated_prefix = f"{available_prefix.network}/{requested_prefix['prefix_length']}"
|
|
@@ -210,11 +205,6 @@ class PrefixViewSet(NautobotModelViewSet):
|
|
|
210
205
|
{"detail": "Insufficient space is available to accommodate the requested prefix size(s)"},
|
|
211
206
|
status=status.HTTP_204_NO_CONTENT,
|
|
212
207
|
)
|
|
213
|
-
|
|
214
|
-
# The serializer usage above has mapped "custom_fields" dict to "_custom_field_data".
|
|
215
|
-
# We need to convert it back to "custom_fields" as we're going to deserialize it a second time below
|
|
216
|
-
requested_prefix["custom_fields"] = requested_prefix.pop("_custom_field_data", {})
|
|
217
|
-
|
|
218
208
|
# Remove the allocated prefix from the list of available prefixes
|
|
219
209
|
available_prefixes.remove(allocated_prefix)
|
|
220
210
|
|
|
@@ -275,17 +265,7 @@ class PrefixViewSet(NautobotModelViewSet):
|
|
|
275
265
|
"nautobot.ipam.api.views.available_ips", blocking_timeout=5, timeout=settings.REDIS_LOCK_TIMEOUT
|
|
276
266
|
):
|
|
277
267
|
# Normalize to a list of objects
|
|
278
|
-
|
|
279
|
-
data=request.data if isinstance(request.data, list) else [request.data],
|
|
280
|
-
many=True,
|
|
281
|
-
context={
|
|
282
|
-
"request": request,
|
|
283
|
-
"prefix": prefix,
|
|
284
|
-
},
|
|
285
|
-
)
|
|
286
|
-
serializer.is_valid(raise_exception=True)
|
|
287
|
-
|
|
288
|
-
requested_ips = serializer.validated_data
|
|
268
|
+
requested_ips = request.data if isinstance(request.data, list) else [request.data]
|
|
289
269
|
|
|
290
270
|
# Determine if the requested number of IPs is available
|
|
291
271
|
available_ips = prefix.get_available_ips()
|
|
@@ -306,9 +286,6 @@ class PrefixViewSet(NautobotModelViewSet):
|
|
|
306
286
|
for requested_ip in requested_ips:
|
|
307
287
|
requested_ip["address"] = f"{next(available_ips)}/{prefix_length}"
|
|
308
288
|
requested_ip["namespace"] = prefix.namespace
|
|
309
|
-
# The serializer usage above has mapped "custom_fields" dict to "_custom_field_data".
|
|
310
|
-
# We need to convert it back to "custom_fields" as we're going to deserialize it a second time below
|
|
311
|
-
requested_ip["custom_fields"] = requested_ip.pop("_custom_field_data", {})
|
|
312
289
|
|
|
313
290
|
# Initialize the serializer with a list or a single object depending on what was requested
|
|
314
291
|
context = {"request": request, "depth": 0}
|
|
@@ -442,16 +419,7 @@ class VLANGroupViewSet(NautobotModelViewSet):
|
|
|
442
419
|
"nautobot.ipam.api.views.available_vlans", blocking_timeout=5, timeout=settings.REDIS_LOCK_TIMEOUT
|
|
443
420
|
):
|
|
444
421
|
# Normalize to a list of objects
|
|
445
|
-
|
|
446
|
-
data=request.data if isinstance(request.data, list) else [request.data],
|
|
447
|
-
many=True,
|
|
448
|
-
context={
|
|
449
|
-
"request": request,
|
|
450
|
-
"vlan_group": vlan_group,
|
|
451
|
-
},
|
|
452
|
-
)
|
|
453
|
-
serializer.is_valid(raise_exception=True)
|
|
454
|
-
requested_vlans = serializer.validated_data
|
|
422
|
+
requested_vlans = request.data if isinstance(request.data, list) else [request.data]
|
|
455
423
|
|
|
456
424
|
# Determine if the requested number of VLANs is available
|
|
457
425
|
available_vids = vlan_group.available_vids
|
|
@@ -487,20 +455,28 @@ class VLANGroupViewSet(NautobotModelViewSet):
|
|
|
487
455
|
requested_vlan["vid"] = next(_available_vids)
|
|
488
456
|
|
|
489
457
|
# Check requested `vlan_group`
|
|
490
|
-
if "vlan_group" in requested_vlan
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
458
|
+
if "vlan_group" in requested_vlan:
|
|
459
|
+
requested_vlan_group = None
|
|
460
|
+
requested_vlan_group_pk = requested_vlan["vlan_group"]
|
|
461
|
+
try:
|
|
462
|
+
requested_vlan_group = VLANGroup.objects.get(pk=requested_vlan_group_pk)
|
|
463
|
+
except VLANGroup.DoesNotExist:
|
|
464
|
+
return Response(
|
|
465
|
+
{"detail": f"VLAN Group with pk {requested_vlan_group_pk} does not exist."},
|
|
466
|
+
status=status.HTTP_204_NO_CONTENT,
|
|
467
|
+
)
|
|
468
|
+
|
|
469
|
+
if requested_vlan_group != vlan_group:
|
|
470
|
+
return Response(
|
|
471
|
+
{
|
|
472
|
+
"detail": f"Invalid VLAN Group requested: {requested_vlan_group}. "
|
|
473
|
+
f"Only VLAN Group {vlan_group} is permitted."
|
|
474
|
+
},
|
|
475
|
+
status=status.HTTP_204_NO_CONTENT,
|
|
476
|
+
)
|
|
498
477
|
else:
|
|
499
478
|
requested_vlan["vlan_group"] = vlan_group.pk
|
|
500
479
|
|
|
501
|
-
# Rewrite custom field data
|
|
502
|
-
requested_vlan["custom_fields"] = requested_vlan.pop("_custom_field_data", {})
|
|
503
|
-
|
|
504
480
|
# Initialize the serializer with a list or a single object depending on what was requested
|
|
505
481
|
context = {"request": request, "depth": 0}
|
|
506
482
|
|
|
@@ -604,15 +580,15 @@ class VLANViewSet(NautobotModelViewSet):
|
|
|
604
580
|
return serializers.VLANLegacySerializer
|
|
605
581
|
return super().get_serializer_class()
|
|
606
582
|
|
|
607
|
-
def retrieve(self, request, pk=None):
|
|
583
|
+
def retrieve(self, request, *args, pk=None, **kwargs):
|
|
608
584
|
try:
|
|
609
|
-
return super().retrieve(request, pk)
|
|
585
|
+
return super().retrieve(request, *args, pk=pk, **kwargs)
|
|
610
586
|
except Location.MultipleObjectsReturned as e:
|
|
611
587
|
raise self.LocationIncompatibleLegacyBehavior from e
|
|
612
588
|
|
|
613
|
-
def list(self, request):
|
|
589
|
+
def list(self, request, *args, **kwargs):
|
|
614
590
|
try:
|
|
615
|
-
return super().list(request)
|
|
591
|
+
return super().list(request, *args, **kwargs)
|
|
616
592
|
except Location.MultipleObjectsReturned as e:
|
|
617
593
|
raise self.LocationIncompatibleLegacyBehavior from e
|
|
618
594
|
|
nautobot/ipam/querysets.py
CHANGED
|
@@ -398,7 +398,7 @@ class IPAddressQuerySet(BaseNetworkQuerySet):
|
|
|
398
398
|
"""
|
|
399
399
|
return super().order_by("host")
|
|
400
400
|
|
|
401
|
-
def get_or_create(self, **kwargs):
|
|
401
|
+
def get_or_create(self, defaults=None, **kwargs):
|
|
402
402
|
from nautobot.ipam.models import get_default_namespace, Prefix
|
|
403
403
|
|
|
404
404
|
parent = kwargs.get("parent")
|
|
@@ -421,7 +421,7 @@ class IPAddressQuerySet(BaseNetworkQuerySet):
|
|
|
421
421
|
raise ValidationError(f"{cidr} does not appear to be an IPv4 or IPv6 network.") from err
|
|
422
422
|
parent = Prefix.objects.filter(namespace=namespace).get_closest_parent(cidr=cidr, include_self=True)
|
|
423
423
|
kwargs["parent"] = parent
|
|
424
|
-
return super().get_or_create(**kwargs)
|
|
424
|
+
return super().get_or_create(defaults=defaults, **kwargs)
|
|
425
425
|
|
|
426
426
|
def string_search(self, search):
|
|
427
427
|
"""
|
nautobot/ipam/tests/test_api.py
CHANGED
|
@@ -2,6 +2,7 @@ from concurrent.futures.thread import ThreadPoolExecutor
|
|
|
2
2
|
import json
|
|
3
3
|
from random import shuffle
|
|
4
4
|
from unittest import skip
|
|
5
|
+
import uuid
|
|
5
6
|
|
|
6
7
|
from django.contrib.contenttypes.models import ContentType
|
|
7
8
|
from django.db import connection
|
|
@@ -506,7 +507,8 @@ class PrefixTest(APIViewTestCases.APIViewTestCase):
|
|
|
506
507
|
url, {"prefix_length": "hello", "status": self.status.pk}, format="json", **self.header
|
|
507
508
|
)
|
|
508
509
|
self.assertHttpStatus(response, status.HTTP_400_BAD_REQUEST)
|
|
509
|
-
self.assertIn("prefix_length", response.data
|
|
510
|
+
self.assertIn("prefix_length", response.data)
|
|
511
|
+
self.assertEqual(response.data["prefix_length"], "This field must be an integer.")
|
|
510
512
|
|
|
511
513
|
def test_create_multiple_available_prefixes(self):
|
|
512
514
|
"""
|
|
@@ -1482,6 +1484,15 @@ class VLANGroupTest(APIViewTestCases.APIViewTestCase):
|
|
|
1482
1484
|
f"Invalid VLAN Group requested: {some_other_vlan_group}. Only VLAN Group {self.vlan_group} is permitted.",
|
|
1483
1485
|
response.data["detail"],
|
|
1484
1486
|
)
|
|
1487
|
+
invalid_id = uuid.uuid4()
|
|
1488
|
+
data[0]["vlan_group"] = invalid_id # Invalid UUID
|
|
1489
|
+
response = self.client.post(url, data, format="json", **self.header)
|
|
1490
|
+
self.assertHttpStatus(response, status.HTTP_204_NO_CONTENT)
|
|
1491
|
+
self.assertIn("detail", response.data)
|
|
1492
|
+
self.assertEqual(
|
|
1493
|
+
f"VLAN Group with pk {invalid_id} does not exist.",
|
|
1494
|
+
response.data["detail"],
|
|
1495
|
+
)
|
|
1485
1496
|
|
|
1486
1497
|
def test_create_available_vlans_with_permissions_constraint(self):
|
|
1487
1498
|
url = reverse("ipam-api:vlangroup-available-vlans", kwargs={"pk": self.vlan_group.pk})
|