nautobot 2.2.1__py3-none-any.whl → 2.2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- nautobot/apps/jobs.py +2 -0
- nautobot/core/api/utils.py +12 -9
- nautobot/core/apps/__init__.py +2 -2
- nautobot/core/celery/__init__.py +79 -68
- nautobot/core/celery/backends.py +9 -1
- nautobot/core/celery/control.py +4 -7
- nautobot/core/celery/schedulers.py +4 -2
- nautobot/core/celery/task.py +78 -5
- nautobot/core/graphql/schema.py +2 -1
- nautobot/core/jobs/__init__.py +2 -1
- nautobot/core/templates/generic/object_list.html +3 -3
- nautobot/core/templatetags/helpers.py +66 -9
- nautobot/core/testing/__init__.py +6 -1
- nautobot/core/testing/api.py +12 -13
- nautobot/core/testing/mixins.py +2 -2
- nautobot/core/testing/views.py +50 -51
- nautobot/core/tests/test_api.py +23 -2
- nautobot/core/tests/test_templatetags_helpers.py +32 -0
- nautobot/core/tests/test_views.py +21 -1
- nautobot/core/tests/test_views_utils.py +22 -1
- nautobot/core/utils/module_loading.py +89 -0
- nautobot/core/views/generic.py +4 -4
- nautobot/core/views/mixins.py +4 -3
- nautobot/core/views/utils.py +3 -2
- nautobot/core/wsgi.py +9 -2
- nautobot/dcim/choices.py +14 -0
- nautobot/dcim/forms.py +59 -4
- nautobot/dcim/models/device_components.py +9 -5
- nautobot/dcim/templates/dcim/device/lldp_neighbors.html +2 -2
- nautobot/dcim/templates/dcim/devicefamily_retrieve.html +1 -1
- nautobot/dcim/templates/dcim/location.html +32 -13
- nautobot/dcim/templates/dcim/location_migrate_data_to_contact.html +102 -0
- nautobot/dcim/tests/test_forms.py +49 -2
- nautobot/dcim/tests/test_views.py +137 -0
- nautobot/dcim/urls.py +5 -0
- nautobot/dcim/views.py +149 -1
- nautobot/extras/api/views.py +21 -10
- nautobot/extras/constants.py +3 -3
- nautobot/extras/context_managers.py +56 -0
- nautobot/extras/datasources/git.py +47 -58
- nautobot/extras/forms/forms.py +3 -1
- nautobot/extras/jobs.py +79 -146
- nautobot/extras/models/datasources.py +0 -2
- nautobot/extras/models/jobs.py +36 -18
- nautobot/extras/plugins/__init__.py +1 -20
- nautobot/extras/signals.py +88 -57
- nautobot/extras/test_jobs/__init__.py +8 -0
- nautobot/extras/test_jobs/dry_run.py +3 -2
- nautobot/extras/test_jobs/fail.py +43 -0
- nautobot/extras/test_jobs/ipaddress_vars.py +40 -1
- nautobot/extras/test_jobs/jobs_module/__init__.py +5 -0
- nautobot/extras/test_jobs/jobs_module/jobs_submodule/__init__.py +1 -0
- nautobot/extras/test_jobs/jobs_module/jobs_submodule/jobs.py +6 -0
- nautobot/extras/test_jobs/pass.py +40 -0
- nautobot/extras/test_jobs/relative_import.py +11 -0
- nautobot/extras/tests/test_api.py +3 -0
- nautobot/extras/tests/test_context_managers.py +98 -1
- nautobot/extras/tests/test_datasources.py +125 -118
- nautobot/extras/tests/test_job_variables.py +57 -15
- nautobot/extras/tests/test_jobs.py +135 -1
- nautobot/extras/tests/test_models.py +26 -19
- nautobot/extras/tests/test_plugins.py +1 -3
- nautobot/extras/tests/test_views.py +2 -4
- nautobot/extras/utils.py +37 -0
- nautobot/extras/views.py +47 -95
- nautobot/ipam/api/views.py +8 -1
- nautobot/ipam/graphql/types.py +11 -0
- nautobot/ipam/mixins.py +32 -0
- nautobot/ipam/models.py +2 -1
- nautobot/ipam/querysets.py +6 -1
- nautobot/ipam/tables.py +1 -1
- nautobot/ipam/tests/test_models.py +82 -0
- nautobot/project-static/docs/assets/extra.css +4 -0
- nautobot/project-static/docs/code-reference/nautobot/apps/api.html +1 -1
- nautobot/project-static/docs/code-reference/nautobot/apps/jobs.html +180 -211
- nautobot/project-static/docs/development/apps/api/platform-features/jobs.html +1 -1
- nautobot/project-static/docs/development/core/application-registry.html +126 -84
- nautobot/project-static/docs/development/core/model-checklist.html +49 -1
- nautobot/project-static/docs/development/core/model-features.html +1 -1
- nautobot/project-static/docs/development/jobs/index.html +334 -58
- nautobot/project-static/docs/development/jobs/migration/from-v1.html +1 -1
- nautobot/project-static/docs/objects.inv +0 -0
- nautobot/project-static/docs/release-notes/version-1.6.html +504 -201
- nautobot/project-static/docs/release-notes/version-2.2.html +392 -43
- nautobot/project-static/docs/search/search_index.json +1 -1
- nautobot/project-static/docs/sitemap.xml +254 -254
- nautobot/project-static/docs/sitemap.xml.gz +0 -0
- nautobot/project-static/docs/user-guide/administration/upgrading/from-v1/upgrading-from-nautobot-v1.html +7 -4
- nautobot/project-static/docs/user-guide/core-data-model/ipam/vlan.html +111 -0
- nautobot/project-static/docs/user-guide/platform-functionality/jobs/index.html +15 -28
- nautobot/project-static/docs/user-guide/platform-functionality/jobs/models.html +4 -4
- nautobot/project-static/js/forms.js +18 -11
- {nautobot-2.2.1.dist-info → nautobot-2.2.3.dist-info}/METADATA +3 -3
- {nautobot-2.2.1.dist-info → nautobot-2.2.3.dist-info}/RECORD +98 -92
- nautobot/extras/test_jobs/job_variables.py +0 -93
- {nautobot-2.2.1.dist-info → nautobot-2.2.3.dist-info}/LICENSE.txt +0 -0
- {nautobot-2.2.1.dist-info → nautobot-2.2.3.dist-info}/NOTICE +0 -0
- {nautobot-2.2.1.dist-info → nautobot-2.2.3.dist-info}/WHEEL +0 -0
- {nautobot-2.2.1.dist-info → nautobot-2.2.3.dist-info}/entry_points.txt +0 -0
|
@@ -31,7 +31,6 @@ logger = getLogger(__name__)
|
|
|
31
31
|
registry["plugin_banners"] = []
|
|
32
32
|
registry["plugin_custom_validators"] = collections.defaultdict(list)
|
|
33
33
|
registry["plugin_graphql_types"] = []
|
|
34
|
-
registry["plugin_jobs"] = []
|
|
35
34
|
registry["plugin_template_extensions"] = collections.defaultdict(list)
|
|
36
35
|
registry["app_metrics"] = []
|
|
37
36
|
|
|
@@ -141,9 +140,9 @@ class NautobotAppConfig(NautobotConfig):
|
|
|
141
140
|
register_graphql_types(graphql_types)
|
|
142
141
|
|
|
143
142
|
# Import jobs (if present)
|
|
143
|
+
# Note that we do *not* auto-call `register_jobs()` - the App is responsible for doing so when imported.
|
|
144
144
|
jobs = import_object(f"{self.__module__}.{self.jobs}")
|
|
145
145
|
if jobs is not None:
|
|
146
|
-
register_jobs(jobs)
|
|
147
146
|
self.features["jobs"] = jobs
|
|
148
147
|
|
|
149
148
|
# Import metrics (if present)
|
|
@@ -423,24 +422,6 @@ def register_graphql_types(class_list):
|
|
|
423
422
|
registry["plugin_graphql_types"].append(item)
|
|
424
423
|
|
|
425
424
|
|
|
426
|
-
def register_jobs(class_list):
|
|
427
|
-
"""
|
|
428
|
-
Register a list of Job classes
|
|
429
|
-
"""
|
|
430
|
-
from nautobot.extras.jobs import Job
|
|
431
|
-
|
|
432
|
-
for job in class_list:
|
|
433
|
-
if not inspect.isclass(job):
|
|
434
|
-
raise TypeError(f"Job class {job} was passed as an instance!")
|
|
435
|
-
if not issubclass(job, Job):
|
|
436
|
-
raise TypeError(f"{job} is not a subclass of extras.jobs.Job!")
|
|
437
|
-
|
|
438
|
-
registry["plugin_jobs"].append(job)
|
|
439
|
-
|
|
440
|
-
# Note that we do not (and cannot) update the Job records in the Nautobot database at this time.
|
|
441
|
-
# That is done in response to the `nautobot_database_ready` signal, see nautobot.extras.signals.refresh_job_models
|
|
442
|
-
|
|
443
|
-
|
|
444
425
|
def register_metrics(function_list):
|
|
445
426
|
"""
|
|
446
427
|
Register a list of metric functions
|
nautobot/extras/signals.py
CHANGED
|
@@ -21,7 +21,7 @@ from django.utils import timezone
|
|
|
21
21
|
from django_prometheus.models import model_deletes, model_inserts, model_updates
|
|
22
22
|
import redis.exceptions
|
|
23
23
|
|
|
24
|
-
from nautobot.core.celery import app,
|
|
24
|
+
from nautobot.core.celery import app, import_jobs
|
|
25
25
|
from nautobot.core.models import BaseModel
|
|
26
26
|
from nautobot.core.utils.config import get_settings_or_config
|
|
27
27
|
from nautobot.core.utils.logging import sanitize
|
|
@@ -103,7 +103,9 @@ def _handle_changed_object(sender, instance, raw=False, **kwargs):
|
|
|
103
103
|
if raw:
|
|
104
104
|
return
|
|
105
105
|
|
|
106
|
-
|
|
106
|
+
change_context = change_context_state.get()
|
|
107
|
+
|
|
108
|
+
if change_context is None:
|
|
107
109
|
return
|
|
108
110
|
|
|
109
111
|
# Determine the type of change being made
|
|
@@ -119,36 +121,49 @@ def _handle_changed_object(sender, instance, raw=False, **kwargs):
|
|
|
119
121
|
|
|
120
122
|
# Record an ObjectChange if applicable
|
|
121
123
|
if hasattr(instance, "to_objectchange"):
|
|
122
|
-
user =
|
|
124
|
+
user = change_context.get_user(instance)
|
|
123
125
|
# save a copy of this instance's field cache so it can be restored after serialization
|
|
124
126
|
# to prevent unexpected behavior when chaining multiple signal handlers
|
|
125
127
|
original_cache = instance._state.fields_cache.copy()
|
|
126
128
|
|
|
129
|
+
changed_object_type = ContentType.objects.get_for_model(instance)
|
|
130
|
+
changed_object_id = instance.id
|
|
131
|
+
|
|
132
|
+
# Generate a unique identifier for this change to stash in the change context
|
|
133
|
+
# This is used for deferred change logging and for looking up related changes without querying the database
|
|
134
|
+
unique_object_change_id = f"{changed_object_type.pk}__{changed_object_id}__{user.pk}"
|
|
135
|
+
|
|
127
136
|
# If a change already exists for this change_id, user, and object, update it instead of creating a new one.
|
|
128
137
|
# If the object was deleted then recreated with the same pk (don't do this), change the action to update.
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
if
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
138
|
+
if unique_object_change_id in change_context.deferred_object_changes:
|
|
139
|
+
related_changes = ObjectChange.objects.filter(
|
|
140
|
+
changed_object_type=changed_object_type,
|
|
141
|
+
changed_object_id=changed_object_id,
|
|
142
|
+
user=user,
|
|
143
|
+
request_id=change_context.change_id,
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
# Skip the database check when deferring object changes
|
|
147
|
+
if not change_context.defer_object_changes and related_changes.exists():
|
|
148
|
+
objectchange = instance.to_objectchange(action)
|
|
149
|
+
most_recent_change = related_changes.order_by("-time").first()
|
|
150
|
+
if most_recent_change.action == ObjectChangeActionChoices.ACTION_DELETE:
|
|
151
|
+
most_recent_change.action = ObjectChangeActionChoices.ACTION_UPDATE
|
|
152
|
+
most_recent_change.object_data = objectchange.object_data
|
|
153
|
+
most_recent_change.object_data_v2 = objectchange.object_data_v2
|
|
154
|
+
most_recent_change.save()
|
|
155
|
+
|
|
144
156
|
else:
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
objectchange.change_context = change_context_state.get().context
|
|
148
|
-
objectchange.change_context_detail = change_context_state.get().context_detail[
|
|
149
|
-
:CHANGELOG_MAX_CHANGE_CONTEXT_DETAIL
|
|
157
|
+
change_context.deferred_object_changes[unique_object_change_id] = [
|
|
158
|
+
{"action": action, "instance": instance, "user": user}
|
|
150
159
|
]
|
|
151
|
-
|
|
160
|
+
if not change_context.defer_object_changes:
|
|
161
|
+
objectchange = instance.to_objectchange(action)
|
|
162
|
+
objectchange.user = user
|
|
163
|
+
objectchange.request_id = change_context.change_id
|
|
164
|
+
objectchange.change_context = change_context.context
|
|
165
|
+
objectchange.change_context_detail = change_context.context_detail[:CHANGELOG_MAX_CHANGE_CONTEXT_DETAIL]
|
|
166
|
+
objectchange.save()
|
|
152
167
|
|
|
153
168
|
# restore field cache
|
|
154
169
|
instance._state.fields_cache = original_cache
|
|
@@ -171,7 +186,9 @@ def _handle_deleted_object(sender, instance, **kwargs):
|
|
|
171
186
|
"""
|
|
172
187
|
Fires when an object is deleted.
|
|
173
188
|
"""
|
|
174
|
-
|
|
189
|
+
change_context = change_context_state.get()
|
|
190
|
+
|
|
191
|
+
if change_context is None:
|
|
175
192
|
return
|
|
176
193
|
|
|
177
194
|
if isinstance(instance, BaseModel):
|
|
@@ -186,41 +203,58 @@ def _handle_deleted_object(sender, instance, **kwargs):
|
|
|
186
203
|
|
|
187
204
|
# Record an ObjectChange if applicable
|
|
188
205
|
if hasattr(instance, "to_objectchange"):
|
|
189
|
-
user =
|
|
206
|
+
user = change_context.get_user(instance)
|
|
190
207
|
|
|
191
208
|
# save a copy of this instance's field cache so it can be restored after serialization
|
|
192
209
|
# to prevent unexpected behavior when chaining multiple signal handlers
|
|
193
210
|
original_cache = instance._state.fields_cache.copy()
|
|
194
211
|
|
|
212
|
+
changed_object_type = ContentType.objects.get_for_model(instance)
|
|
213
|
+
changed_object_id = instance.id
|
|
214
|
+
|
|
215
|
+
# Generate a unique identifier for this change to stash in the change context
|
|
216
|
+
# This is used for deferred change logging and for looking up related changes without querying the database
|
|
217
|
+
unique_object_change_id = f"{changed_object_type.pk}__{changed_object_id}__{user.pk}"
|
|
218
|
+
save_new_objectchange = True
|
|
219
|
+
|
|
195
220
|
# if a change already exists for this change_id, user, and object, update it instead of creating a new one
|
|
196
221
|
# except in the case that the object was created and deleted in the same change_id
|
|
197
222
|
# we don't want to create a delete change for an object that never existed
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
request_id=change_context_state.get().change_id,
|
|
203
|
-
)
|
|
204
|
-
objectchange = instance.to_objectchange(ObjectChangeActionChoices.ACTION_DELETE)
|
|
205
|
-
save_new_objectchange = True
|
|
206
|
-
if related_changes.exists():
|
|
207
|
-
most_recent_change = related_changes.order_by("-time").first()
|
|
208
|
-
if most_recent_change.action != ObjectChangeActionChoices.ACTION_CREATE:
|
|
209
|
-
most_recent_change.action = ObjectChangeActionChoices.ACTION_DELETE
|
|
210
|
-
most_recent_change.object_data = objectchange.object_data
|
|
211
|
-
most_recent_change.object_data_v2 = objectchange.object_data_v2
|
|
212
|
-
most_recent_change.save()
|
|
213
|
-
objectchange = most_recent_change
|
|
223
|
+
if unique_object_change_id in change_context.deferred_object_changes:
|
|
224
|
+
cached_related_change = change_context.deferred_object_changes[unique_object_change_id][-1]
|
|
225
|
+
if cached_related_change["action"] != ObjectChangeActionChoices.ACTION_CREATE:
|
|
226
|
+
cached_related_change["action"] = ObjectChangeActionChoices.ACTION_DELETE
|
|
214
227
|
save_new_objectchange = False
|
|
215
228
|
|
|
229
|
+
related_changes = ObjectChange.objects.filter(
|
|
230
|
+
changed_object_type=changed_object_type,
|
|
231
|
+
changed_object_id=changed_object_id,
|
|
232
|
+
user=user,
|
|
233
|
+
request_id=change_context.change_id,
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
# Skip the database check when deferring object changes
|
|
237
|
+
if not change_context.defer_object_changes and related_changes.exists():
|
|
238
|
+
objectchange = instance.to_objectchange(ObjectChangeActionChoices.ACTION_DELETE)
|
|
239
|
+
most_recent_change = related_changes.order_by("-time").first()
|
|
240
|
+
if most_recent_change.action != ObjectChangeActionChoices.ACTION_CREATE:
|
|
241
|
+
most_recent_change.action = ObjectChangeActionChoices.ACTION_DELETE
|
|
242
|
+
most_recent_change.object_data = objectchange.object_data
|
|
243
|
+
most_recent_change.object_data_v2 = objectchange.object_data_v2
|
|
244
|
+
most_recent_change.save()
|
|
245
|
+
save_new_objectchange = False
|
|
246
|
+
|
|
216
247
|
if save_new_objectchange:
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
248
|
+
change_context.deferred_object_changes.setdefault(unique_object_change_id, []).append(
|
|
249
|
+
{"action": ObjectChangeActionChoices.ACTION_DELETE, "instance": instance, "user": user}
|
|
250
|
+
)
|
|
251
|
+
if not change_context.defer_object_changes:
|
|
252
|
+
objectchange = instance.to_objectchange(ObjectChangeActionChoices.ACTION_DELETE)
|
|
253
|
+
objectchange.user = user
|
|
254
|
+
objectchange.request_id = change_context.change_id
|
|
255
|
+
objectchange.change_context = change_context.context
|
|
256
|
+
objectchange.change_context_detail = change_context.context_detail[:CHANGELOG_MAX_CHANGE_CONTEXT_DETAIL]
|
|
257
|
+
objectchange.save()
|
|
224
258
|
|
|
225
259
|
# restore field cache
|
|
226
260
|
instance._state.fields_cache = original_cache
|
|
@@ -299,7 +333,7 @@ def git_repository_pre_delete(instance, **kwargs):
|
|
|
299
333
|
app.control.broadcast("discard_git_repository", repository_slug=instance.slug)
|
|
300
334
|
# But we don't have an equivalent way to broadcast to any other Django instances.
|
|
301
335
|
# For now we just delete the one that we have locally and rely on other methods,
|
|
302
|
-
# such as the
|
|
336
|
+
# such as the import_jobs() signal that runs on server startup,
|
|
303
337
|
# to clean up other clones as they're encountered.
|
|
304
338
|
if os.path.isdir(instance.filesystem_path):
|
|
305
339
|
shutil.rmtree(instance.filesystem_path)
|
|
@@ -428,7 +462,7 @@ def refresh_job_models(sender, *, apps, **kwargs):
|
|
|
428
462
|
"""
|
|
429
463
|
Callback for the nautobot_database_ready signal; updates Jobs in the database based on Job source file availability.
|
|
430
464
|
"""
|
|
431
|
-
from nautobot.extras.jobs import
|
|
465
|
+
from nautobot.extras.jobs import get_jobs # avoid circular import
|
|
432
466
|
|
|
433
467
|
Job = apps.get_model("extras", "Job")
|
|
434
468
|
|
|
@@ -437,15 +471,12 @@ def refresh_job_models(sender, *, apps, **kwargs):
|
|
|
437
471
|
logger.info("Skipping refresh_job_models() as it appears Job model has not yet been migrated to latest.")
|
|
438
472
|
return
|
|
439
473
|
|
|
440
|
-
|
|
474
|
+
import_jobs()
|
|
441
475
|
|
|
442
476
|
job_models = []
|
|
443
|
-
for task in app.tasks.values():
|
|
444
|
-
# Skip Celery tasks that aren't Jobs
|
|
445
|
-
if not isinstance(task, JobClass):
|
|
446
|
-
continue
|
|
447
477
|
|
|
448
|
-
|
|
478
|
+
for job_class in get_jobs().values():
|
|
479
|
+
job_model, _ = refresh_job_model_from_job_class(Job, job_class)
|
|
449
480
|
if job_model is not None:
|
|
450
481
|
job_models.append(job_model)
|
|
451
482
|
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
def load_tests(*args):
|
|
2
|
+
"""Implement unittest discovery for this submodule as a no-op.
|
|
3
|
+
|
|
4
|
+
This prevents unittest from recursively loading all of the modules under this directory to inspect whether they
|
|
5
|
+
define test cases. This is necessary because otherwise the `jobs_module` submodule will get loaded when tests run,
|
|
6
|
+
which will in turn call `register_jobs()`, incorrectly/unexpectedly registering the test Job defined in that module
|
|
7
|
+
as if it were a system Job, which will cause tests to fail due to the unexpected presence of this Job.
|
|
8
|
+
"""
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from nautobot.core.celery import register_jobs
|
|
2
|
-
from nautobot.extras.jobs import DryRunVar, get_task_logger, Job
|
|
2
|
+
from nautobot.extras.jobs import DryRunVar, get_task_logger, IntegerVar, Job
|
|
3
3
|
from nautobot.extras.models import Status
|
|
4
4
|
|
|
5
5
|
logger = get_task_logger(__name__)
|
|
@@ -11,8 +11,9 @@ class TestDryRun(Job):
|
|
|
11
11
|
"""
|
|
12
12
|
|
|
13
13
|
dryrun = DryRunVar()
|
|
14
|
+
value = IntegerVar(required=False)
|
|
14
15
|
|
|
15
|
-
def run(self, dryrun):
|
|
16
|
+
def run(self, dryrun, value=None):
|
|
16
17
|
"""
|
|
17
18
|
Job function.
|
|
18
19
|
"""
|
|
@@ -1,4 +1,7 @@
|
|
|
1
|
+
from billiard.einfo import ExceptionInfo
|
|
2
|
+
|
|
1
3
|
from nautobot.core.celery import register_jobs
|
|
4
|
+
from nautobot.extras.choices import JobResultStatusChoices
|
|
2
5
|
from nautobot.extras.jobs import get_task_logger, Job, RunJobTaskFailed
|
|
3
6
|
|
|
4
7
|
logger = get_task_logger(__name__)
|
|
@@ -11,6 +14,15 @@ class TestFail(Job):
|
|
|
11
14
|
|
|
12
15
|
description = "Validate job import"
|
|
13
16
|
|
|
17
|
+
def before_start(self, task_id, args, kwargs):
|
|
18
|
+
if task_id != self.request.id:
|
|
19
|
+
raise RuntimeError(f"Expected task_id {task_id} to equal self.request.id {self.request.id}")
|
|
20
|
+
if args:
|
|
21
|
+
raise RuntimeError(f"Expected args to be empty, but it was {args!r}")
|
|
22
|
+
if kwargs:
|
|
23
|
+
raise RuntimeError(f"Expected kwargs to be empty, but it was {kwargs!r}")
|
|
24
|
+
logger.info("before_start() was called as expected")
|
|
25
|
+
|
|
14
26
|
def run(self):
|
|
15
27
|
"""
|
|
16
28
|
Job function.
|
|
@@ -18,6 +30,37 @@ class TestFail(Job):
|
|
|
18
30
|
logger.info("I'm a test job that fails!")
|
|
19
31
|
raise RunJobTaskFailed("Test failure")
|
|
20
32
|
|
|
33
|
+
def on_success(self, retval, task_id, args, kwargs):
|
|
34
|
+
raise RuntimeError("on_success() was unexpectedly called!")
|
|
35
|
+
|
|
36
|
+
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
|
37
|
+
if not isinstance(exc, RunJobTaskFailed):
|
|
38
|
+
raise RuntimeError(f"Expected exc to be a RunJobTaskFailed, but it was {exc!r}")
|
|
39
|
+
if task_id != self.request.id:
|
|
40
|
+
raise RuntimeError(f"Expected task_id {task_id} to equal self.request.id {self.request.id}")
|
|
41
|
+
if args:
|
|
42
|
+
raise RuntimeError(f"Expected args to be empty, but it was {args!r}")
|
|
43
|
+
if kwargs:
|
|
44
|
+
raise RuntimeError(f"Expected kwargs to be empty, but it was {kwargs!r}")
|
|
45
|
+
if not isinstance(einfo, ExceptionInfo):
|
|
46
|
+
raise RuntimeError(f"Expected einfo to be an ExceptionInfo, but it was {einfo!r}")
|
|
47
|
+
logger.info("on_failure() was called as expected")
|
|
48
|
+
|
|
49
|
+
def after_return(self, status, retval, task_id, args, kwargs, einfo):
|
|
50
|
+
if status is not JobResultStatusChoices.STATUS_FAILURE:
|
|
51
|
+
raise RuntimeError(f"Expected status to be {JobResultStatusChoices.STATUS_FAILURE}, but it was {status!r}")
|
|
52
|
+
if not isinstance(retval, RunJobTaskFailed):
|
|
53
|
+
raise RuntimeError(f"Expected retval to be a RunJobTaskFailed, but it was {retval!r}")
|
|
54
|
+
if task_id != self.request.id:
|
|
55
|
+
raise RuntimeError(f"Expected task_id {task_id} to equal self.request.id {self.request.id}")
|
|
56
|
+
if args:
|
|
57
|
+
raise RuntimeError(f"Expected args to be empty, but it was {args!r}")
|
|
58
|
+
if kwargs:
|
|
59
|
+
raise RuntimeError(f"Expected kwargs to be empty, but it was {kwargs!r}")
|
|
60
|
+
if not isinstance(einfo, ExceptionInfo):
|
|
61
|
+
raise RuntimeError(f"Expected einfo to be an ExceptionInfo, but it was {einfo!r}")
|
|
62
|
+
logger.info("after_return() was called as expected")
|
|
63
|
+
|
|
21
64
|
|
|
22
65
|
class TestFailWithSanitization(Job):
|
|
23
66
|
"""
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import json
|
|
2
2
|
|
|
3
|
+
import netaddr
|
|
4
|
+
|
|
3
5
|
from nautobot.core.celery import register_jobs
|
|
4
6
|
from nautobot.extras.jobs import get_task_logger, IPAddressVar, IPAddressWithMaskVar, Job
|
|
5
7
|
|
|
@@ -30,7 +32,26 @@ class TestIPAddresses(Job):
|
|
|
30
32
|
description="IPv6 network",
|
|
31
33
|
)
|
|
32
34
|
|
|
33
|
-
def
|
|
35
|
+
def before_start(self, task_id, args, kwargs):
|
|
36
|
+
for expected_kwarg in self._get_vars().keys():
|
|
37
|
+
if expected_kwarg not in kwargs:
|
|
38
|
+
raise RuntimeError(f"kwargs should contain {expected_kwarg} but it doesn't!")
|
|
39
|
+
if kwargs[expected_kwarg] is None:
|
|
40
|
+
raise RuntimeError(f"kwargs[{expected_kwarg}] is unexpectedly None!")
|
|
41
|
+
|
|
42
|
+
def run(self, *, ipv4_address, ipv4_with_mask, ipv4_network, ipv6_address, ipv6_with_mask, ipv6_network):
|
|
43
|
+
if not isinstance(ipv4_address, netaddr.IPAddress):
|
|
44
|
+
raise RuntimeError(f"Expected ipv4_address to be a netaddr.IPAddress, but it was {ipv4_address!r}")
|
|
45
|
+
if not isinstance(ipv4_with_mask, netaddr.IPNetwork):
|
|
46
|
+
raise RuntimeError(f"Expected ipv4_with_mask to be a netaddr.IPNetwork, but it was {ipv4_with_mask!r}")
|
|
47
|
+
if not isinstance(ipv4_network, netaddr.IPNetwork):
|
|
48
|
+
raise RuntimeError(f"Expected ipv4_network to be a netaddr.IPNetwork, but it was {ipv4_network!r}")
|
|
49
|
+
if not isinstance(ipv6_address, netaddr.IPAddress):
|
|
50
|
+
raise RuntimeError(f"Expected ipv6_address to be a netaddr.IPAddress, but it was {ipv6_address!r}")
|
|
51
|
+
if not isinstance(ipv6_with_mask, netaddr.IPNetwork):
|
|
52
|
+
raise RuntimeError(f"Expected ipv6_with_mask to be a netaddr.IPNetwork, but it was {ipv6_with_mask!r}")
|
|
53
|
+
if not isinstance(ipv6_network, netaddr.IPNetwork):
|
|
54
|
+
raise RuntimeError(f"Expected ipv6_network to be a netaddr.IPNetwork, but it was {ipv6_network!r}")
|
|
34
55
|
# Log the data as JSON so we can pull it back out for testing.
|
|
35
56
|
logger.info(
|
|
36
57
|
"IP Address Test",
|
|
@@ -59,5 +80,23 @@ class TestIPAddresses(Job):
|
|
|
59
80
|
|
|
60
81
|
return "Nice IPs, bro."
|
|
61
82
|
|
|
83
|
+
def on_success(self, retval, task_id, args, kwargs):
|
|
84
|
+
if retval != "Nice IPs, bro.":
|
|
85
|
+
raise RuntimeError(f"retval is unexpected: {retval!r}")
|
|
86
|
+
for expected_kwarg in self._get_vars().keys():
|
|
87
|
+
if expected_kwarg not in kwargs:
|
|
88
|
+
raise RuntimeError(f"kwargs should contain {expected_kwarg} but it doesn't!")
|
|
89
|
+
if kwargs[expected_kwarg] is None:
|
|
90
|
+
raise RuntimeError(f"kwargs[{expected_kwarg}] is unexpectedly None!")
|
|
91
|
+
|
|
92
|
+
def after_return(self, status, retval, task_id, args, kwargs, einfo):
|
|
93
|
+
if retval != "Nice IPs, bro.":
|
|
94
|
+
raise RuntimeError(f"retval is unexpected: {retval!r}")
|
|
95
|
+
for expected_kwarg in self._get_vars().keys():
|
|
96
|
+
if expected_kwarg not in kwargs:
|
|
97
|
+
raise RuntimeError(f"kwargs should contain {expected_kwarg} but it doesn't!")
|
|
98
|
+
if kwargs[expected_kwarg] is None:
|
|
99
|
+
raise RuntimeError(f"kwargs[{expected_kwarg}] is unexpectedly None!")
|
|
100
|
+
|
|
62
101
|
|
|
63
102
|
register_jobs(TestIPAddresses)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .jobs import ChildJob # noqa: F401
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from nautobot.core.celery import register_jobs
|
|
2
|
+
from nautobot.extras.choices import JobResultStatusChoices
|
|
2
3
|
from nautobot.extras.jobs import get_task_logger, Job
|
|
3
4
|
|
|
4
5
|
logger = get_task_logger(__name__)
|
|
@@ -14,11 +15,50 @@ class TestPass(Job):
|
|
|
14
15
|
class Meta:
|
|
15
16
|
has_sensitive_variables = False
|
|
16
17
|
|
|
18
|
+
def before_start(self, task_id, args, kwargs):
|
|
19
|
+
if task_id != self.request.id:
|
|
20
|
+
raise RuntimeError(f"Expected task_id {task_id} to equal self.request.id {self.request.id}")
|
|
21
|
+
if args:
|
|
22
|
+
raise RuntimeError(f"Expected args to be empty, but it was {args!r}")
|
|
23
|
+
if kwargs:
|
|
24
|
+
raise RuntimeError(f"Expected kwargs to be empty, but it was {kwargs!r}")
|
|
25
|
+
logger.info("before_start() was called as expected")
|
|
26
|
+
|
|
17
27
|
def run(self):
|
|
18
28
|
"""
|
|
19
29
|
Job function.
|
|
20
30
|
"""
|
|
21
31
|
logger.info("Success")
|
|
32
|
+
return True
|
|
33
|
+
|
|
34
|
+
def on_success(self, retval, task_id, args, kwargs):
|
|
35
|
+
if retval is not True:
|
|
36
|
+
raise RuntimeError(f"Expected retval to be True, but it was {retval!r}")
|
|
37
|
+
if task_id != self.request.id:
|
|
38
|
+
raise RuntimeError(f"Expected task_id {task_id} to equal self.request.id {self.request.id}")
|
|
39
|
+
if args:
|
|
40
|
+
raise RuntimeError(f"Expected args to be empty, but it was {args!r}")
|
|
41
|
+
if kwargs:
|
|
42
|
+
raise RuntimeError(f"Expected kwargs to be empty, but it was {kwargs!r}")
|
|
43
|
+
logger.info("on_success() was called as expected")
|
|
44
|
+
|
|
45
|
+
def on_failure(self, exc, task_id, args, kwargs, einfo):
|
|
46
|
+
raise RuntimeError("on_failure() was unexpectedly called!")
|
|
47
|
+
|
|
48
|
+
def after_return(self, status, retval, task_id, args, kwargs, einfo):
|
|
49
|
+
if status is not JobResultStatusChoices.STATUS_SUCCESS:
|
|
50
|
+
raise RuntimeError(f"Expected status to be {JobResultStatusChoices.STATUS_SUCCESS}, but it was {status!r}")
|
|
51
|
+
if retval is not True:
|
|
52
|
+
raise RuntimeError(f"Expected retval to be True, but it was {retval!r}")
|
|
53
|
+
if task_id != self.request.id:
|
|
54
|
+
raise RuntimeError(f"Expected task_id {task_id} to equal self.request.id {self.request.id}")
|
|
55
|
+
if args:
|
|
56
|
+
raise RuntimeError(f"Expected args to be empty, but it was {args!r}")
|
|
57
|
+
if kwargs:
|
|
58
|
+
raise RuntimeError(f"Expected kwargs to be empty, but it was {kwargs!r}")
|
|
59
|
+
if einfo is not None:
|
|
60
|
+
raise RuntimeError(f"Expected einfo to be None, but it was {einfo!r}")
|
|
61
|
+
logger.info("after_return() was called as expected")
|
|
22
62
|
|
|
23
63
|
|
|
24
64
|
register_jobs(TestPass)
|
|
@@ -2303,6 +2303,7 @@ class JobApprovalTest(APITestCase):
|
|
|
2303
2303
|
name="test dryrun",
|
|
2304
2304
|
task="dry_run.TestDryRun",
|
|
2305
2305
|
job_model=cls.dryrun_job_model,
|
|
2306
|
+
kwargs={"value": 1},
|
|
2306
2307
|
interval=JobExecutionType.TYPE_IMMEDIATELY,
|
|
2307
2308
|
user=cls.additional_user,
|
|
2308
2309
|
approval_required=True,
|
|
@@ -2442,6 +2443,8 @@ class JobApprovalTest(APITestCase):
|
|
|
2442
2443
|
url = reverse("extras-api:scheduledjob-dry-run", kwargs={"pk": self.dryrun_scheduled_job.pk})
|
|
2443
2444
|
response = self.client.post(url, **self.header)
|
|
2444
2445
|
self.assertHttpStatus(response, status.HTTP_200_OK)
|
|
2446
|
+
# The below fails because JobResult.task_kwargs doesn't get set until *after* the task begins executing.
|
|
2447
|
+
# self.assertEqual(response.data["task_kwargs"], {"dryrun": True, "value": 1}, response.data)
|
|
2445
2448
|
|
|
2446
2449
|
@override_settings(EXEMPT_VIEW_PERMISSIONS=["*"])
|
|
2447
2450
|
def test_dry_run_not_supported(self):
|
|
@@ -7,7 +7,10 @@ from nautobot.core.testing import TransactionTestCase
|
|
|
7
7
|
from nautobot.core.utils.lookup import get_changes_for_model
|
|
8
8
|
from nautobot.dcim.models import Location, LocationType
|
|
9
9
|
from nautobot.extras.choices import ObjectChangeActionChoices, ObjectChangeEventContextChoices
|
|
10
|
-
from nautobot.extras.context_managers import
|
|
10
|
+
from nautobot.extras.context_managers import (
|
|
11
|
+
deferred_change_logging_for_bulk_operation,
|
|
12
|
+
web_request_context,
|
|
13
|
+
)
|
|
11
14
|
from nautobot.extras.models import Status, Webhook
|
|
12
15
|
|
|
13
16
|
# Use the proper swappable User model
|
|
@@ -193,3 +196,97 @@ class WebRequestContextTransactionTestCase(TransactionTestCase):
|
|
|
193
196
|
Status.objects.create(name="Test Status 2")
|
|
194
197
|
|
|
195
198
|
self.assertEqual(get_changes_for_model(Status).count(), 2)
|
|
199
|
+
|
|
200
|
+
|
|
201
|
+
class BulkEditDeleteChangeLogging(TestCase):
|
|
202
|
+
def setUp(self):
|
|
203
|
+
self.user = User.objects.create_user(
|
|
204
|
+
username="jacob",
|
|
205
|
+
email="jacob@example.com",
|
|
206
|
+
password="top_secret", # noqa: S106 # hardcoded-password-func-arg -- ok as this is test code only
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
def test_change_log_created(self):
|
|
210
|
+
location_type = LocationType.objects.get(name="Campus")
|
|
211
|
+
location_status = Status.objects.get_for_model(Location).first()
|
|
212
|
+
with web_request_context(self.user):
|
|
213
|
+
with deferred_change_logging_for_bulk_operation():
|
|
214
|
+
location = Location(name="Test Location 1", location_type=location_type, status=location_status)
|
|
215
|
+
location.save()
|
|
216
|
+
|
|
217
|
+
location = Location.objects.get(name="Test Location 1")
|
|
218
|
+
oc_list = get_changes_for_model(location).order_by("pk")
|
|
219
|
+
self.assertEqual(len(oc_list), 1)
|
|
220
|
+
self.assertEqual(oc_list[0].changed_object, location)
|
|
221
|
+
self.assertEqual(oc_list[0].action, ObjectChangeActionChoices.ACTION_CREATE)
|
|
222
|
+
|
|
223
|
+
def test_delete(self):
|
|
224
|
+
"""Test that deletes raise an exception"""
|
|
225
|
+
location_type = LocationType.objects.get(name="Campus")
|
|
226
|
+
location_status = Status.objects.get_for_model(Location).first()
|
|
227
|
+
with self.assertRaises(ValueError):
|
|
228
|
+
with web_request_context(self.user):
|
|
229
|
+
with deferred_change_logging_for_bulk_operation():
|
|
230
|
+
location = Location(name="Test Location 1", location_type=location_type, status=location_status)
|
|
231
|
+
location.save()
|
|
232
|
+
location.delete()
|
|
233
|
+
|
|
234
|
+
def test_create_then_update(self):
|
|
235
|
+
"""Test that a create followed by an update is logged as a single create"""
|
|
236
|
+
location_type = LocationType.objects.get(name="Campus")
|
|
237
|
+
location_status = Status.objects.get_for_model(Location).first()
|
|
238
|
+
with web_request_context(self.user):
|
|
239
|
+
with deferred_change_logging_for_bulk_operation():
|
|
240
|
+
location = Location(name="Test Location 1", location_type=location_type, status=location_status)
|
|
241
|
+
location.save()
|
|
242
|
+
location.description = "changed"
|
|
243
|
+
location.save()
|
|
244
|
+
|
|
245
|
+
oc_list = get_changes_for_model(location)
|
|
246
|
+
self.assertEqual(len(oc_list), 1)
|
|
247
|
+
self.assertEqual(oc_list[0].action, ObjectChangeActionChoices.ACTION_CREATE)
|
|
248
|
+
snapshots = oc_list[0].get_snapshots()
|
|
249
|
+
self.assertIsNone(snapshots["prechange"])
|
|
250
|
+
self.assertIsNotNone(snapshots["postchange"])
|
|
251
|
+
self.assertIsNone(snapshots["differences"]["removed"])
|
|
252
|
+
self.assertEqual(snapshots["differences"]["added"]["description"], "changed")
|
|
253
|
+
|
|
254
|
+
def test_bulk_edit(self):
|
|
255
|
+
"""Test that edits to multiple objects are correctly logged"""
|
|
256
|
+
location_type = LocationType.objects.get(name="Campus")
|
|
257
|
+
location_status = Status.objects.get_for_model(Location).first()
|
|
258
|
+
locations = [
|
|
259
|
+
Location(name=f"Test Location {i}", location_type=location_type, status=location_status)
|
|
260
|
+
for i in range(1, 4)
|
|
261
|
+
]
|
|
262
|
+
Location.objects.bulk_create(locations)
|
|
263
|
+
with web_request_context(self.user):
|
|
264
|
+
with deferred_change_logging_for_bulk_operation():
|
|
265
|
+
for location in locations:
|
|
266
|
+
location.description = "changed"
|
|
267
|
+
location.save()
|
|
268
|
+
|
|
269
|
+
oc_list = get_changes_for_model(Location)
|
|
270
|
+
self.assertEqual(len(oc_list), 3)
|
|
271
|
+
for oc in oc_list:
|
|
272
|
+
self.assertEqual(oc.action, ObjectChangeActionChoices.ACTION_UPDATE)
|
|
273
|
+
snapshots = oc.get_snapshots()
|
|
274
|
+
self.assertIsNone(snapshots["prechange"])
|
|
275
|
+
self.assertIsNotNone(snapshots["postchange"])
|
|
276
|
+
self.assertIsNone(snapshots["differences"]["removed"])
|
|
277
|
+
self.assertEqual(snapshots["differences"]["added"]["description"], "changed")
|
|
278
|
+
|
|
279
|
+
def test_change_log_context(self):
|
|
280
|
+
location_type = LocationType.objects.get(name="Campus")
|
|
281
|
+
location_status = Status.objects.get_for_model(Location).first()
|
|
282
|
+
with web_request_context(self.user, context_detail="test_change_log_context"):
|
|
283
|
+
with deferred_change_logging_for_bulk_operation():
|
|
284
|
+
location = Location(name="Test Location 1", location_type=location_type, status=location_status)
|
|
285
|
+
location.save()
|
|
286
|
+
|
|
287
|
+
location = Location.objects.get(name="Test Location 1")
|
|
288
|
+
oc_list = get_changes_for_model(location)
|
|
289
|
+
with self.subTest():
|
|
290
|
+
self.assertEqual(oc_list[0].change_context, ObjectChangeEventContextChoices.CONTEXT_ORM)
|
|
291
|
+
with self.subTest():
|
|
292
|
+
self.assertEqual(oc_list[0].change_context_detail, "test_change_log_context")
|