nautobot 2.2.1__py3-none-any.whl → 2.2.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. nautobot/apps/jobs.py +2 -0
  2. nautobot/core/api/utils.py +12 -9
  3. nautobot/core/apps/__init__.py +2 -2
  4. nautobot/core/celery/__init__.py +79 -68
  5. nautobot/core/celery/backends.py +9 -1
  6. nautobot/core/celery/control.py +4 -7
  7. nautobot/core/celery/schedulers.py +4 -2
  8. nautobot/core/celery/task.py +78 -5
  9. nautobot/core/graphql/schema.py +2 -1
  10. nautobot/core/jobs/__init__.py +2 -1
  11. nautobot/core/templates/generic/object_list.html +3 -3
  12. nautobot/core/templatetags/helpers.py +66 -9
  13. nautobot/core/testing/__init__.py +6 -1
  14. nautobot/core/testing/api.py +12 -13
  15. nautobot/core/testing/mixins.py +2 -2
  16. nautobot/core/testing/views.py +50 -51
  17. nautobot/core/tests/test_api.py +23 -2
  18. nautobot/core/tests/test_templatetags_helpers.py +32 -0
  19. nautobot/core/tests/test_views.py +21 -1
  20. nautobot/core/tests/test_views_utils.py +22 -1
  21. nautobot/core/utils/module_loading.py +89 -0
  22. nautobot/core/views/generic.py +4 -4
  23. nautobot/core/views/mixins.py +4 -3
  24. nautobot/core/views/utils.py +3 -2
  25. nautobot/core/wsgi.py +9 -2
  26. nautobot/dcim/choices.py +14 -0
  27. nautobot/dcim/forms.py +59 -4
  28. nautobot/dcim/models/device_components.py +9 -5
  29. nautobot/dcim/templates/dcim/device/lldp_neighbors.html +2 -2
  30. nautobot/dcim/templates/dcim/devicefamily_retrieve.html +1 -1
  31. nautobot/dcim/templates/dcim/location.html +32 -13
  32. nautobot/dcim/templates/dcim/location_migrate_data_to_contact.html +102 -0
  33. nautobot/dcim/tests/test_forms.py +49 -2
  34. nautobot/dcim/tests/test_views.py +137 -0
  35. nautobot/dcim/urls.py +5 -0
  36. nautobot/dcim/views.py +149 -1
  37. nautobot/extras/api/views.py +21 -10
  38. nautobot/extras/constants.py +3 -3
  39. nautobot/extras/context_managers.py +56 -0
  40. nautobot/extras/datasources/git.py +47 -58
  41. nautobot/extras/forms/forms.py +3 -1
  42. nautobot/extras/jobs.py +79 -146
  43. nautobot/extras/models/datasources.py +0 -2
  44. nautobot/extras/models/jobs.py +36 -18
  45. nautobot/extras/plugins/__init__.py +1 -20
  46. nautobot/extras/signals.py +88 -57
  47. nautobot/extras/test_jobs/__init__.py +8 -0
  48. nautobot/extras/test_jobs/dry_run.py +3 -2
  49. nautobot/extras/test_jobs/fail.py +43 -0
  50. nautobot/extras/test_jobs/ipaddress_vars.py +40 -1
  51. nautobot/extras/test_jobs/jobs_module/__init__.py +5 -0
  52. nautobot/extras/test_jobs/jobs_module/jobs_submodule/__init__.py +1 -0
  53. nautobot/extras/test_jobs/jobs_module/jobs_submodule/jobs.py +6 -0
  54. nautobot/extras/test_jobs/pass.py +40 -0
  55. nautobot/extras/test_jobs/relative_import.py +11 -0
  56. nautobot/extras/tests/test_api.py +3 -0
  57. nautobot/extras/tests/test_context_managers.py +98 -1
  58. nautobot/extras/tests/test_datasources.py +125 -118
  59. nautobot/extras/tests/test_job_variables.py +57 -15
  60. nautobot/extras/tests/test_jobs.py +135 -1
  61. nautobot/extras/tests/test_models.py +26 -19
  62. nautobot/extras/tests/test_plugins.py +1 -3
  63. nautobot/extras/tests/test_views.py +2 -4
  64. nautobot/extras/utils.py +37 -0
  65. nautobot/extras/views.py +47 -95
  66. nautobot/ipam/api/views.py +8 -1
  67. nautobot/ipam/graphql/types.py +11 -0
  68. nautobot/ipam/mixins.py +32 -0
  69. nautobot/ipam/models.py +2 -1
  70. nautobot/ipam/querysets.py +6 -1
  71. nautobot/ipam/tables.py +1 -1
  72. nautobot/ipam/tests/test_models.py +82 -0
  73. nautobot/project-static/docs/assets/extra.css +4 -0
  74. nautobot/project-static/docs/code-reference/nautobot/apps/api.html +1 -1
  75. nautobot/project-static/docs/code-reference/nautobot/apps/jobs.html +180 -211
  76. nautobot/project-static/docs/development/apps/api/platform-features/jobs.html +1 -1
  77. nautobot/project-static/docs/development/core/application-registry.html +126 -84
  78. nautobot/project-static/docs/development/core/model-checklist.html +49 -1
  79. nautobot/project-static/docs/development/core/model-features.html +1 -1
  80. nautobot/project-static/docs/development/jobs/index.html +334 -58
  81. nautobot/project-static/docs/development/jobs/migration/from-v1.html +1 -1
  82. nautobot/project-static/docs/objects.inv +0 -0
  83. nautobot/project-static/docs/release-notes/version-1.6.html +504 -201
  84. nautobot/project-static/docs/release-notes/version-2.2.html +392 -43
  85. nautobot/project-static/docs/search/search_index.json +1 -1
  86. nautobot/project-static/docs/sitemap.xml +254 -254
  87. nautobot/project-static/docs/sitemap.xml.gz +0 -0
  88. nautobot/project-static/docs/user-guide/administration/upgrading/from-v1/upgrading-from-nautobot-v1.html +7 -4
  89. nautobot/project-static/docs/user-guide/core-data-model/ipam/vlan.html +111 -0
  90. nautobot/project-static/docs/user-guide/platform-functionality/jobs/index.html +15 -28
  91. nautobot/project-static/docs/user-guide/platform-functionality/jobs/models.html +4 -4
  92. nautobot/project-static/js/forms.js +18 -11
  93. {nautobot-2.2.1.dist-info → nautobot-2.2.3.dist-info}/METADATA +3 -3
  94. {nautobot-2.2.1.dist-info → nautobot-2.2.3.dist-info}/RECORD +98 -92
  95. nautobot/extras/test_jobs/job_variables.py +0 -93
  96. {nautobot-2.2.1.dist-info → nautobot-2.2.3.dist-info}/LICENSE.txt +0 -0
  97. {nautobot-2.2.1.dist-info → nautobot-2.2.3.dist-info}/NOTICE +0 -0
  98. {nautobot-2.2.1.dist-info → nautobot-2.2.3.dist-info}/WHEEL +0 -0
  99. {nautobot-2.2.1.dist-info → nautobot-2.2.3.dist-info}/entry_points.txt +0 -0
@@ -7,7 +7,6 @@ import mimetypes
7
7
  import os
8
8
  from pathlib import Path
9
9
  import re
10
- import sys
11
10
  from urllib.parse import quote
12
11
 
13
12
  from django.conf import settings
@@ -17,9 +16,9 @@ from django.db import transaction
17
16
  from git import InvalidGitRepositoryError, Repo
18
17
  import yaml
19
18
 
20
- from nautobot.core.celery import app as celery_app
21
19
  from nautobot.core.utils.git import GitRepo
22
- from nautobot.dcim.models import Device, DeviceType, Location, Platform
20
+ from nautobot.core.utils.module_loading import import_modules_privately
21
+ from nautobot.dcim.models import Device, DeviceRedundancyGroup, DeviceType, Location, Platform
23
22
  from nautobot.extras.choices import (
24
23
  LogLevelChoices,
25
24
  SecretsGroupAccessTypeChoices,
@@ -36,7 +35,7 @@ from nautobot.extras.models import (
36
35
  Role,
37
36
  Tag,
38
37
  )
39
- from nautobot.extras.registry import DatasourceContent, register_datasource_contents
38
+ from nautobot.extras.registry import DatasourceContent, register_datasource_contents, registry
40
39
  from nautobot.extras.utils import refresh_job_model_from_job_class
41
40
  from nautobot.tenancy.models import Tenant, TenantGroup
42
41
  from nautobot.virtualization.models import Cluster, ClusterGroup, VirtualMachine
@@ -186,7 +185,7 @@ def ensure_git_repository(repository_record, logger=None, head=None): # pylint:
186
185
  def git_repository_dry_run(repository_record, logger): # pylint: disable=redefined-outer-name
187
186
  """Log the difference between local branch and remote branch files.
188
187
  Args:
189
- repository_record (GitRepository): The GitRepostiory instance to diff.
188
+ repository_record (GitRepository): The GitRepository instance to diff.
190
189
  logger (logging.Logger): Logger to log results to.
191
190
  """
192
191
  from_url, to_path, from_branch = get_repo_from_url_to_path_and_from_branch(repository_record)
@@ -272,6 +271,7 @@ def update_git_config_contexts(repository_record, job_result):
272
271
  "tenants",
273
272
  "tags",
274
273
  "dynamic_groups",
274
+ "device_redundancy_groups",
275
275
  ):
276
276
  if os.path.isdir(os.path.join(repository_record.filesystem_path, filter_type)):
277
277
  msg = (
@@ -402,6 +402,7 @@ def import_config_context(context_data, repository_record, job_result):
402
402
  ("tenants", Tenant),
403
403
  ("tags", Tag),
404
404
  ("dynamic_groups", DynamicGroup),
405
+ ("device_redundancy_groups", DeviceRedundancyGroup),
405
406
  ]:
406
407
  relations[key] = []
407
408
  for object_data in context_metadata.get(key, ()):
@@ -714,56 +715,41 @@ def delete_git_config_context_schemas(repository_record, job_result, preserve=()
714
715
  #
715
716
 
716
717
 
717
- def refresh_code_from_repository(repository_slug, consumer=None, skip_reimport=False):
718
+ def refresh_job_code_from_repository(repository_slug, skip_reimport=False, ignore_import_errors=True):
718
719
  """
719
- After cloning/updating a GitRepository on disk, call this function to reload and reregister the repo's Python code.
720
+ After cloning/updating/deleting a GitRepository on disk, call this function to reload and reregister its Python.
720
721
 
721
722
  Args:
722
- repository_slug (str): Repository directory in GIT_ROOT that was refreshed.
723
- consumer (celery.worker.Consumer): Celery Consumer to update as well
723
+ repository_slug (str): Repository directory in GIT_ROOT that was updated or deleted.
724
724
  skip_reimport (bool): If True, unload existing code from this repository but do not re-import it.
725
+ ignore_import_errors (bool): If True, any exceptions raised in the import will be caught and logged.
726
+ If False, exceptions will be re-raised after logging.
725
727
  """
726
- if settings.GIT_ROOT not in sys.path:
727
- sys.path.append(settings.GIT_ROOT)
728
-
729
- app = consumer.app if consumer is not None else celery_app
730
- # TODO: This is ugly, but when app.use_fast_trace_task is set (true by default), Celery calls
731
- # celery.app.trace.fast_trace_task(...) which assumes that all tasks are cached and have a valid `__trace__()`
732
- # function defined. In theory consumer.update_strategies() (below) should ensure this, but it doesn't
733
- # go far enough (possibly a discrepancy between the main worker process and the prefork executors?)
734
- # as we can and do still encounter errors where `task.__trace__` is unexpectedly None.
735
- # For now, simply disabling use_fast_trace_task forces the task trace function to be rebuilt each time,
736
- # which avoids the issue at the cost of very slight overhead.
737
- app.use_fast_trace_task = False
738
-
739
728
  # Unload any previous version of this module and its submodules if present
740
- for module_name in list(sys.modules):
741
- if module_name == repository_slug or module_name.startswith(f"{repository_slug}."):
742
- logger.debug("Unloading module %s", module_name)
743
- if module_name in app.loader.task_modules:
744
- app.loader.task_modules.remove(module_name)
745
- if module_name in sys.modules:
746
- del sys.modules[module_name]
747
-
748
- # Unregister any previous Celery tasks from this module
749
- for task_name in list(app.tasks):
750
- if task_name.startswith(f"{repository_slug}."):
751
- logger.debug("Unregistering Celery task %s", task_name)
752
- app.tasks.unregister(task_name)
753
- if consumer is not None and task_name in consumer.strategies:
754
- del consumer.strategies[task_name]
755
-
756
- if not skip_reimport:
757
- try:
758
- repository = GitRepository.objects.get(slug=repository_slug)
759
- if "extras.job" in repository.provided_contents:
760
- # Re-import Celery tasks from this module
761
- logger.debug("Importing Jobs from %s.jobs in GIT_ROOT", repository_slug)
762
- app.loader.import_task_module(f"{repository_slug}.jobs")
763
- if consumer is not None:
764
- consumer.update_strategies()
765
- except GitRepository.DoesNotExist as exc:
766
- logger.error("Unable to reload Jobs from %s.jobs: %s", repository_slug, exc)
729
+ for job_class_path in list(registry["jobs"]):
730
+ if job_class_path.startswith(f"{repository_slug}."):
731
+ del registry["jobs"][job_class_path]
732
+
733
+ if skip_reimport:
734
+ return
735
+
736
+ try:
737
+ repository = GitRepository.objects.get(slug=repository_slug)
738
+ if "extras.job" in repository.provided_contents:
739
+ if not (
740
+ os.path.isdir(os.path.join(repository.filesystem_path, "jobs"))
741
+ or os.path.isfile(os.path.join(repository.filesystem_path, "jobs.py"))
742
+ ):
743
+ logger.error("No `jobs` submodule found in Git repository %s", repository)
744
+ if not ignore_import_errors:
745
+ raise FileNotFoundError(f"No `jobs` submodule found in Git repository {repository}")
746
+ else:
747
+ import_modules_privately(
748
+ settings.GIT_ROOT, module_path=[repository_slug, "jobs"], ignore_import_errors=ignore_import_errors
749
+ )
750
+ except GitRepository.DoesNotExist as exc:
751
+ logger.error("Unable to reload Jobs from %s.jobs: %s", repository_slug, exc)
752
+ if not ignore_import_errors:
767
753
  raise
768
754
 
769
755
 
@@ -773,13 +759,13 @@ def refresh_git_jobs(repository_record, job_result, delete=False):
773
759
  if "extras.job" in repository_record.provided_contents and not delete:
774
760
  found_jobs = False
775
761
  try:
776
- refresh_code_from_repository(repository_record.slug)
762
+ refresh_job_code_from_repository(repository_record.slug, ignore_import_errors=False)
777
763
 
778
- for task_name, task in celery_app.tasks.items():
779
- if not task_name.startswith(f"{repository_record.slug}."):
764
+ for job_class_path, job_class in registry["jobs"].items():
765
+ if not job_class_path.startswith(f"{repository_record.slug}.jobs."):
780
766
  continue
781
767
  found_jobs = True
782
- job_model, created = refresh_job_model_from_job_class(Job, task.__class__)
768
+ job_model, created = refresh_job_model_from_job_class(Job, job_class)
783
769
 
784
770
  if job_model is None:
785
771
  msg = "Failed to create Job record; check Nautobot logs for details"
@@ -788,15 +774,18 @@ def refresh_git_jobs(repository_record, job_result, delete=False):
788
774
  continue
789
775
 
790
776
  if created:
791
- message = "Created Job record"
777
+ message = f"Created Job record for {job_class_path}"
792
778
  else:
793
- message = "Refreshed Job record"
779
+ message = f"Refreshed Job record for {job_class_path}"
794
780
  logger.info(message)
795
781
  job_result.log(message=message, obj=job_model, grouping="jobs", level_choice=LogLevelChoices.LOG_INFO)
796
782
  installed_jobs.append(job_model)
797
783
 
798
784
  if not found_jobs:
799
- msg = "No jobs were registered on loading the `jobs` submodule. Did you miss a `register_jobs()` call?"
785
+ msg = (
786
+ f"No jobs were registered on loading the `{repository_record.slug}.jobs` submodule. "
787
+ "Did you miss a `register_jobs()` call? Or was there a syntax error or similar in your code?"
788
+ )
800
789
  logger.warning(msg)
801
790
  job_result.log(msg, grouping="jobs", level_choice=LogLevelChoices.LOG_WARNING)
802
791
  except Exception as exc:
@@ -804,8 +793,8 @@ def refresh_git_jobs(repository_record, job_result, delete=False):
804
793
  logger.error(msg)
805
794
  job_result.log(msg, grouping="jobs", level_choice=LogLevelChoices.LOG_ERROR)
806
795
  else:
807
- # Unload code from this repository, do not reimport it
808
- refresh_code_from_repository(repository_record.slug, skip_reimport=True)
796
+ # Flush this repository's job classes
797
+ refresh_job_code_from_repository(repository_record.slug, skip_reimport=True)
809
798
 
810
799
  for job_model in Job.objects.filter(module_name__startswith=f"{repository_record.slug}."):
811
800
  if job_model.installed and job_model not in installed_jobs:
@@ -844,8 +844,10 @@ class JobEditForm(NautobotModelForm):
844
844
  """
845
845
  For all overridable fields, if they aren't marked as overridden, revert them to the underlying value if known.
846
846
  """
847
+ from nautobot.extras.jobs import get_job # avoid circular import
848
+
847
849
  cleaned_data = super().clean() or self.cleaned_data
848
- job_class = self.instance.job_class
850
+ job_class = get_job(self.instance.class_path, reload=True)
849
851
  if job_class is not None:
850
852
  for field_name in JOB_OVERRIDABLE_FIELDS:
851
853
  if not cleaned_data.get(f"{field_name}_override", False):
nautobot/extras/jobs.py CHANGED
@@ -6,18 +6,14 @@ import inspect
6
6
  import json
7
7
  import logging
8
8
  import os
9
+ import sys
9
10
  import tempfile
10
11
  from textwrap import dedent
11
12
  from typing import final
12
13
  import warnings
13
14
 
14
- from billiard.einfo import ExceptionInfo, ExceptionWithTraceback
15
- from celery import states
16
- from celery.exceptions import NotRegistered, Retry
17
- from celery.result import EagerResult
18
- from celery.utils.functional import maybe_list
15
+ from billiard.einfo import ExceptionInfo
19
16
  from celery.utils.log import get_task_logger
20
- from celery.utils.nodenames import gethostname
21
17
  from db_file_storage.form_widgets import DBClearableFileInput
22
18
  from django import forms
23
19
  from django.conf import settings
@@ -30,12 +26,10 @@ from django.db.models import Model
30
26
  from django.db.models.query import QuerySet
31
27
  from django.forms import ValidationError
32
28
  from django.utils.functional import classproperty
33
- from kombu.utils.uuid import uuid
34
29
  import netaddr
35
30
  import yaml
36
31
 
37
- from nautobot.core.celery import app as celery_app
38
- from nautobot.core.celery.task import Task
32
+ from nautobot.core.celery import import_jobs, nautobot_task
39
33
  from nautobot.core.forms import (
40
34
  DynamicModelChoiceField,
41
35
  DynamicModelMultipleChoiceField,
@@ -53,6 +47,7 @@ from nautobot.extras.models import (
53
47
  JobResult,
54
48
  ObjectChange,
55
49
  )
50
+ from nautobot.extras.registry import registry
56
51
  from nautobot.extras.utils import change_logged_models_queryset, task_queues_as_choices
57
52
  from nautobot.ipam.formfields import IPAddressFormField, IPNetworkFormField
58
53
  from nautobot.ipam.validators import (
@@ -88,7 +83,7 @@ class RunJobTaskFailed(Exception):
88
83
  """Celery task failed for some reason."""
89
84
 
90
85
 
91
- class BaseJob(Task):
86
+ class BaseJob:
92
87
  """Base model for jobs.
93
88
 
94
89
  Users can subclass this directly if they want to provide their own base class for implementing multiple jobs
@@ -158,38 +153,6 @@ class BaseJob(Task):
158
153
 
159
154
  # See https://github.com/PyCQA/pylint-django/issues/240 for why we have a pylint disable on each classproperty below
160
155
 
161
- # TODO(jathan): Could be interesting for custom stuff when the Job is
162
- # enabled in the database and then therefore registered in Celery
163
- @classmethod
164
- def on_bound(cls, app):
165
- """Called when the task is bound to an app.
166
-
167
- Note:
168
- This class method can be defined to do additional actions when
169
- the task class is bound to an app.
170
- """
171
-
172
- # TODO(jathan): Could be interesting for showing the Job's class path as the
173
- # shadow name vs. the Celery task_name?
174
- def shadow_name(self, args, kwargs, options):
175
- """Override for custom task name in worker logs/monitoring.
176
-
177
- Example:
178
- from celery.utils.imports import qualname
179
-
180
- def shadow_name(task, args, kwargs, options):
181
- return qualname(args[0])
182
-
183
- @app.task(shadow_name=shadow_name, serializer='pickle')
184
- def apply_function_async(fun, *args, **kwargs):
185
- return fun(*args, **kwargs)
186
-
187
- Arguments:
188
- args (Tuple): Task positional arguments.
189
- kwargs (Dict): Task keyword arguments.
190
- options (Dict): Task execution options.
191
- """
192
-
193
156
  def before_start(self, task_id, args, kwargs):
194
157
  """Handler called before the task starts.
195
158
 
@@ -201,8 +164,6 @@ class BaseJob(Task):
201
164
  Returns:
202
165
  (None): The return value of this handler is ignored.
203
166
  """
204
- self.clear_cache()
205
-
206
167
  try:
207
168
  self.job_result
208
169
  except ObjectDoesNotExist as err:
@@ -234,7 +195,7 @@ class BaseJob(Task):
234
195
  extra={"grouping": "initialization"},
235
196
  )
236
197
 
237
- self.logger.info("Running job", extra={"grouping": "initialization"})
198
+ self.logger.info("Running job", extra={"grouping": "initialization", "object": self.job_model})
238
199
 
239
200
  def run(self, *args, **kwargs):
240
201
  """
@@ -314,84 +275,10 @@ class BaseJob(Task):
314
275
  if status == JobResultStatusChoices.STATUS_SUCCESS:
315
276
  self.logger.info("Job completed", extra={"grouping": "post_run"})
316
277
 
317
- # TODO(gary): document this in job author docs
318
- # Super.after_return must be called for chords to function properly
319
- super().after_return(status, retval, task_id, args, kwargs, einfo=einfo)
320
-
321
- def apply(
322
- self,
323
- args=None,
324
- kwargs=None,
325
- link=None,
326
- link_error=None,
327
- task_id=None,
328
- retries=None,
329
- throw=None,
330
- logfile=None,
331
- loglevel=None,
332
- headers=None,
333
- **options,
334
- ):
335
- """Fix celery's apply method to propagate options to the task result"""
336
- # trace imports Task, so need to import inline.
337
- from celery.app.trace import build_tracer
338
-
339
- app = self._get_app()
340
- args = args or ()
341
- kwargs = kwargs or {}
342
- task_id = task_id or uuid()
343
- retries = retries or 0
344
- if throw is None:
345
- throw = app.conf.task_eager_propagates
346
-
347
- # Make sure we get the task instance, not class.
348
- task = app._tasks[self.name]
349
-
350
- request = {
351
- "id": task_id,
352
- "retries": retries,
353
- "is_eager": True,
354
- "logfile": logfile,
355
- "loglevel": loglevel or 0,
356
- "hostname": gethostname(),
357
- "callbacks": maybe_list(link),
358
- "errbacks": maybe_list(link_error),
359
- "headers": headers,
360
- "ignore_result": options.get("ignore_result", False),
361
- "delivery_info": {
362
- "is_eager": True,
363
- "exchange": options.get("exchange"),
364
- "routing_key": options.get("routing_key"),
365
- "priority": options.get("priority"),
366
- },
367
- "properties": options, # one line fix to overloaded method
368
- }
369
- if "stamped_headers" in options:
370
- request["stamped_headers"] = maybe_list(options["stamped_headers"])
371
- request["stamps"] = {header: maybe_list(options.get(header, [])) for header in request["stamped_headers"]}
372
-
373
- tb = None
374
- tracer = build_tracer(
375
- task.name,
376
- task,
377
- eager=True,
378
- propagate=throw,
379
- app=self._get_app(),
380
- )
381
- ret = tracer(task_id, args, kwargs, request)
382
- retval = ret.retval
383
- if isinstance(retval, ExceptionInfo):
384
- retval, tb = retval.exception, retval.traceback
385
- if isinstance(retval, ExceptionWithTraceback):
386
- retval = retval.exc
387
- if isinstance(retval, Retry) and retval.sig is not None:
388
- return retval.sig.apply(retries=retries + 1)
389
- state = states.SUCCESS if ret.info is None else ret.info.state
390
- return EagerResult(task_id, retval, state, traceback=tb)
391
-
392
278
  @final
393
279
  @classproperty
394
280
  def file_path(cls) -> str: # pylint: disable=no-self-argument
281
+ """Deprecated as of Nautobot 2.2.3."""
395
282
  return inspect.getfile(cls)
396
283
 
397
284
  @final
@@ -430,7 +317,7 @@ class BaseJob(Task):
430
317
  @classproperty
431
318
  def grouping(cls) -> str: # pylint: disable=no-self-argument
432
319
  module = inspect.getmodule(cls)
433
- return getattr(module, "name", module.__name__)
320
+ return getattr(module, "name", cls.__module__)
434
321
 
435
322
  @final
436
323
  @classmethod
@@ -530,6 +417,7 @@ class BaseJob(Task):
530
417
  @final
531
418
  @classproperty
532
419
  def registered_name(cls) -> str: # pylint: disable=no-self-argument
420
+ """Deprecated - use class_path classproperty instead."""
533
421
  return f"{cls.__module__}.{cls.__name__}"
534
422
 
535
423
  @classmethod
@@ -545,7 +433,10 @@ class BaseJob(Task):
545
433
  base_classes = reversed(inspect.getmro(cls))
546
434
  attr_names = [name for base in base_classes for name in base.__dict__.keys()]
547
435
  for name in attr_names:
548
- attr_class = getattr(cls, name, None).__class__
436
+ try:
437
+ attr_class = getattr(cls, name, None).__class__
438
+ except TypeError:
439
+ pass
549
440
  if name not in cls_vars and issubclass(attr_class, ScriptVariable):
550
441
  cls_vars[name] = getattr(cls, name)
551
442
 
@@ -612,27 +503,9 @@ class BaseJob(Task):
612
503
 
613
504
  return form
614
505
 
615
- def clear_cache(self):
616
- """
617
- Clear all cached properties on this instance without accessing them. This is required because
618
- celery reuses task instances for multiple runs.
619
- """
620
- try:
621
- del self.celery_kwargs
622
- except AttributeError:
623
- pass
624
- try:
625
- del self.job_result
626
- except AttributeError:
627
- pass
628
- try:
629
- del self.job_model
630
- except AttributeError:
631
- pass
632
-
633
506
  @functools.cached_property
634
507
  def job_model(self):
635
- return JobModel.objects.get(module_name=self.__module__, job_class_name=self.__name__)
508
+ return JobModel.objects.get(module_name=self.__module__, job_class_name=self.__class__.__name__)
636
509
 
637
510
  @functools.cached_property
638
511
  def job_result(self):
@@ -1199,16 +1072,76 @@ def is_variable(obj):
1199
1072
  return isinstance(obj, ScriptVariable)
1200
1073
 
1201
1074
 
1202
- def get_job(class_path):
1075
+ def get_jobs(*, reload=False):
1076
+ """
1077
+ Compile a dictionary of all Job classes available at this time.
1078
+
1079
+ Args:
1080
+ reload (bool): If True, reimport Jobs from `JOBS_ROOT` and all applicable GitRepositories.
1081
+
1082
+ Returns:
1083
+ (dict): `{"class_path.Job1": <job_class>, "class_path.Job2": <job_class>, ...}`
1084
+ """
1085
+ if reload:
1086
+ import_jobs()
1087
+
1088
+ return registry["jobs"]
1089
+
1090
+
1091
+ def get_job(class_path, reload=False):
1203
1092
  """
1204
1093
  Retrieve a specific job class by its class_path (`<module_name>.<JobClassName>`).
1205
1094
 
1206
- May return None if the job isn't properly registered with Celery at this time.
1095
+ May return None if the job can't be imported.
1096
+
1097
+ Args:
1098
+ reload (bool): If True, **and** the given class_path describes a JOBS_ROOT or GitRepository Job,
1099
+ then refresh **all** such Jobs before retrieving the job class.
1100
+ """
1101
+ if reload:
1102
+ if class_path.startswith("nautobot."):
1103
+ # System job - not reloadable
1104
+ reload = False
1105
+ if any(class_path.startswith(f"{app_name}.") for app_name in settings.PLUGINS):
1106
+ # App provided job - not reloadable
1107
+ reload = False
1108
+ jobs = get_jobs(reload=reload)
1109
+ return jobs.get(class_path, None)
1110
+
1111
+
1112
+ @nautobot_task(bind=True)
1113
+ def run_job(self, job_class_path, *args, **kwargs):
1114
+ """
1115
+ "Runner" function for execution of any Job class by a worker.
1116
+
1117
+ This calls the following Job APIs in the following order:
1118
+
1119
+ - `__init__()`
1120
+ - `before_start()`
1121
+ - `__call__()` (which calls `run()`)
1122
+ - If no exceptions have been raised, `on_success()`, else `on_failure()`
1123
+ - `after_return()`
1124
+
1125
+ Finally, it either returns the data returned from `run()` or re-raises any exception encountered.
1207
1126
  """
1127
+ logger.debug("Running job %s", job_class_path)
1128
+
1129
+ job_class = get_job(job_class_path, reload=True)
1130
+ if job_class is None:
1131
+ raise KeyError(f"Job class not found for class path {job_class_path}")
1132
+ job = job_class()
1133
+ job.request = self.request
1208
1134
  try:
1209
- return celery_app.tasks[class_path].__class__
1210
- except NotRegistered:
1211
- return None
1135
+ job.before_start(self.request.id, args, kwargs)
1136
+ result = job(*args, **kwargs)
1137
+ job.on_success(result, self.request.id, args, kwargs)
1138
+ job.after_return(JobResultStatusChoices.STATUS_SUCCESS, result, self.request.id, args, kwargs, None)
1139
+ return result
1140
+ except Exception as exc:
1141
+ einfo = ExceptionInfo(sys.exc_info())
1142
+ job.on_failure(exc, self.request.id, args, kwargs, einfo)
1143
+ job.after_return(JobResultStatusChoices.STATUS_FAILURE, exc, self.request.id, args, kwargs, einfo)
1144
+ raise
1212
1145
 
1213
1146
 
1214
1147
  def enqueue_job_hooks(object_change):
@@ -97,8 +97,6 @@ class GitRepository(PrimaryModel):
97
97
  if not self.present_in_database:
98
98
  check_if_key_is_graphql_safe(self.__class__.__name__, self.slug, "slug")
99
99
  # Check on create whether the proposed slug conflicts with a module name already in the Python environment.
100
- # Because we add GIT_ROOT to the end of sys.path, trying to import this repository will instead
101
- # import the earlier-found Python module in its place, which would be undesirable.
102
100
  if find_spec(self.slug) is not None:
103
101
  raise ValidationError(
104
102
  f'Please choose a different slug, as "{self.slug}" is an installed Python package or module.'
@@ -5,6 +5,7 @@ from datetime import timedelta
5
5
  import logging
6
6
 
7
7
  from celery import schedules
8
+ from celery.exceptions import NotRegistered
8
9
  from celery.utils.log import get_logger, LoggingProxy
9
10
  from django.conf import settings
10
11
  from django.contrib.contenttypes.models import ContentType
@@ -22,7 +23,6 @@ from nautobot.core.celery import (
22
23
  NautobotKombuJSONEncoder,
23
24
  setup_nautobot_job_logging,
24
25
  )
25
- from nautobot.core.celery.control import refresh_git_repository
26
26
  from nautobot.core.constants import CHARFIELD_MAX_LENGTH
27
27
  from nautobot.core.models import BaseManager, BaseModel
28
28
  from nautobot.core.models.fields import JSONArrayField
@@ -233,13 +233,20 @@ class Job(PrimaryModel):
233
233
  def __str__(self):
234
234
  return self.name
235
235
 
236
- @cached_property
236
+ @property
237
237
  def job_class(self):
238
- """Get the Job class (source code) associated with this Job model."""
238
+ """
239
+ Get the Job class (source code) associated with this Job model.
240
+
241
+ CAUTION: if the Job is provided by a Git Repository or is installed in JOBS_ROOT, you may need or wish to
242
+ call `get_job(self.class_path, reload=True)` to ensure that you have the latest Job code...
243
+ """
244
+ from nautobot.extras.jobs import get_job
245
+
239
246
  if not self.installed:
240
247
  return None
241
248
  try:
242
- return self.job_task.__class__
249
+ return get_job(self.class_path)
243
250
  except Exception as exc:
244
251
  logger.error(str(exc))
245
252
  return None
@@ -278,20 +285,23 @@ class Job(PrimaryModel):
278
285
 
279
286
  @property
280
287
  def job_task(self):
281
- """Get the registered Celery task, refreshing it if necessary."""
282
- if self.git_repository is not None:
283
- # If this Job comes from a Git repository, make sure we have the correct version of said code.
284
- refresh_git_repository(
285
- state=None, repository_pk=self.git_repository.pk, head=self.git_repository.current_head
286
- )
287
- return app.tasks[f"{self.module_name}.{self.job_class_name}"]
288
+ """Get an instance of the associated Job class, refreshing it if necessary."""
289
+ from nautobot.extras.jobs import get_job
290
+
291
+ try:
292
+ return get_job(self.class_path, reload=True)()
293
+ except TypeError as err: # keep 2.0-2.2.2 exception behavior
294
+ raise NotRegistered from err
288
295
 
289
296
  def clean(self):
290
297
  """For any non-overridden fields, make sure they get reset to the actual underlying class value if known."""
291
- if self.job_class is not None:
298
+ from nautobot.extras.jobs import get_job
299
+
300
+ job_class = get_job(self.class_path, reload=True)
301
+ if job_class is not None:
292
302
  for field_name in JOB_OVERRIDABLE_FIELDS:
293
303
  if not getattr(self, f"{field_name}_override", False):
294
- setattr(self, field_name, getattr(self.job_class, field_name))
304
+ setattr(self, field_name, getattr(job_class, field_name))
295
305
 
296
306
  # Protect against invalid input when auto-creating Job records
297
307
  if len(self.module_name) > JOB_MAX_NAME_LENGTH:
@@ -617,12 +627,14 @@ class JobResult(BaseModel, CustomFieldModel):
617
627
  schedule (ScheduledJob, optional): ScheduledJob instance to link to the JobResult. Cannot be used with synchronous=True.
618
628
  task_queue (str, optional): The celery queue to send the job to. If not set, use the default celery queue.
619
629
  synchronous (bool, optional): If True, run the job in the current process, blocking until the job completes.
620
- *job_args: positional args passed to the job task
630
+ *job_args: positional args passed to the job task (UNUSED)
621
631
  **job_kwargs: keyword args passed to the job task
622
632
 
623
633
  Returns:
624
634
  JobResult instance
625
635
  """
636
+ from nautobot.extras.jobs import run_job # TODO circular import
637
+
626
638
  if schedule is not None and synchronous:
627
639
  raise ValueError("Scheduled jobs cannot be run synchronously")
628
640
 
@@ -666,8 +678,11 @@ class JobResult(BaseModel, CustomFieldModel):
666
678
  redirect_logger = get_logger("celery.redirected")
667
679
  proxy = LoggingProxy(redirect_logger, app.conf.worker_redirect_stdouts_level)
668
680
  with contextlib.redirect_stdout(proxy), contextlib.redirect_stderr(proxy):
669
- eager_result = job_model.job_task.apply(
670
- args=job_args, kwargs=job_kwargs, task_id=str(job_result.id), **job_celery_kwargs
681
+ eager_result = run_job.apply(
682
+ args=[job_model.class_path, *job_args],
683
+ kwargs=job_kwargs,
684
+ task_id=str(job_result.id),
685
+ **job_celery_kwargs,
671
686
  )
672
687
 
673
688
  # copy fields from eager result to job result
@@ -687,8 +702,11 @@ class JobResult(BaseModel, CustomFieldModel):
687
702
  else:
688
703
  # Jobs queued inside of a transaction need to run after the transaction completes and the JobResult is saved to the database
689
704
  transaction.on_commit(
690
- lambda: job_model.job_task.apply_async(
691
- args=job_args, kwargs=job_kwargs, task_id=str(job_result.id), **job_celery_kwargs
705
+ lambda: run_job.apply_async(
706
+ args=[job_model.class_path, *job_args],
707
+ kwargs=job_kwargs,
708
+ task_id=str(job_result.id),
709
+ **job_celery_kwargs,
692
710
  )
693
711
  )
694
712