prefect 3.6.6__py3-none-any.whl → 3.6.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. prefect/__init__.py +8 -5
  2. prefect/_build_info.py +3 -3
  3. prefect/_experimental/bundles/__init__.py +8 -4
  4. prefect/_experimental/plugins/spec.py +20 -1
  5. prefect/blocks/notifications.py +1 -1
  6. prefect/cli/server.py +18 -2
  7. prefect/client/orchestration/_deployments/client.py +12 -0
  8. prefect/events/clients.py +24 -12
  9. prefect/flow_runs.py +31 -10
  10. prefect/logging/logging.yml +2 -0
  11. prefect/main.py +12 -6
  12. prefect/runner/storage.py +30 -1
  13. prefect/serializers.py +17 -1
  14. prefect/server/api/background_workers.py +5 -2
  15. prefect/server/api/server.py +1 -0
  16. prefect/server/database/configurations.py +34 -0
  17. prefect/server/events/services/triggers.py +17 -21
  18. prefect/server/models/events.py +67 -0
  19. prefect/server/models/work_queues.py +74 -11
  20. prefect/server/models/workers.py +107 -10
  21. prefect/server/orchestration/core_policy.py +111 -7
  22. prefect/server/schemas/responses.py +0 -8
  23. prefect/server/services/base.py +1 -218
  24. prefect/server/services/foreman.py +175 -201
  25. prefect/server/services/late_runs.py +56 -113
  26. prefect/server/services/perpetual_services.py +1 -1
  27. prefect/server/services/repossessor.py +66 -49
  28. prefect/server/services/scheduler.py +276 -326
  29. prefect/server/services/task_run_recorder.py +28 -4
  30. prefect/server/services/telemetry.py +86 -115
  31. prefect/settings/models/_defaults.py +0 -1
  32. prefect/tasks.py +18 -18
  33. prefect/testing/utilities.py +22 -3
  34. prefect/variables.py +59 -6
  35. prefect/workers/base.py +0 -8
  36. {prefect-3.6.6.dist-info → prefect-3.6.7.dist-info}/METADATA +3 -2
  37. {prefect-3.6.6.dist-info → prefect-3.6.7.dist-info}/RECORD +40 -40
  38. {prefect-3.6.6.dist-info → prefect-3.6.7.dist-info}/WHEEL +0 -0
  39. {prefect-3.6.6.dist-info → prefect-3.6.7.dist-info}/entry_points.txt +0 -0
  40. {prefect-3.6.6.dist-info → prefect-3.6.7.dist-info}/licenses/LICENSE +0 -0
prefect/__init__.py CHANGED
@@ -12,21 +12,22 @@ if TYPE_CHECKING:
12
12
  from importlib.machinery import ModuleSpec
13
13
  from .main import (
14
14
  allow_failure,
15
+ aresume_flow_run,
16
+ aserve,
15
17
  flow,
16
18
  Flow,
17
19
  get_client,
18
20
  get_run_logger,
21
+ pause_flow_run,
22
+ resume_flow_run,
23
+ serve,
19
24
  State,
25
+ suspend_flow_run,
20
26
  tags,
21
27
  task,
22
28
  Task,
23
29
  Transaction,
24
30
  unmapped,
25
- serve,
26
- aserve,
27
- pause_flow_run,
28
- resume_flow_run,
29
- suspend_flow_run,
30
31
  )
31
32
  from prefect.deployments.runner import deploy
32
33
 
@@ -118,6 +119,7 @@ _initialize_plugins()
118
119
 
119
120
  _public_api: dict[str, tuple[Optional[str], str]] = {
120
121
  "allow_failure": (__spec__.parent, ".main"),
122
+ "aresume_flow_run": (__spec__.parent, ".main"),
121
123
  "aserve": (__spec__.parent, ".main"),
122
124
  "deploy": (__spec__.parent, ".deployments.runner"),
123
125
  "flow": (__spec__.parent, ".main"),
@@ -140,6 +142,7 @@ _public_api: dict[str, tuple[Optional[str], str]] = {
140
142
  __all__ = [
141
143
  "__version__",
142
144
  "allow_failure",
145
+ "aresume_flow_run",
143
146
  "aserve",
144
147
  "deploy",
145
148
  "flow",
prefect/_build_info.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # Generated by versioningit
2
- __version__ = "3.6.6"
3
- __build_date__ = "2025-12-11 20:20:23.556858+00:00"
4
- __git_commit__ = "f7d4baf4e04fc31b7a7949b9bc6f2e59d848272b"
2
+ __version__ = "3.6.7"
3
+ __build_date__ = "2025-12-18 19:54:23.981675+00:00"
4
+ __git_commit__ = "ebfef643e5d55b20a7b0b6a80e9756987ac415cb"
5
5
  __dirty__ = False
@@ -38,14 +38,18 @@ logger: logging.Logger = get_logger(__name__)
38
38
 
39
39
 
40
40
  def _get_uv_path() -> str:
41
+ """
42
+ Get the path to the uv binary.
43
+
44
+ First tries to use the uv Python package to find the binary.
45
+ Falls back to "uv" string (assumes uv is in PATH).
46
+ """
41
47
  try:
42
48
  import uv
43
49
 
44
- uv_path = uv.find_uv_bin()
50
+ return uv.find_uv_bin()
45
51
  except (ImportError, ModuleNotFoundError, FileNotFoundError):
46
- uv_path = "uv"
47
-
48
- return uv_path
52
+ return "uv"
49
53
 
50
54
 
51
55
  class SerializedBundle(TypedDict):
@@ -9,7 +9,7 @@ from __future__ import annotations
9
9
 
10
10
  import logging
11
11
  from dataclasses import dataclass
12
- from typing import Callable, Mapping, Optional
12
+ from typing import Any, Callable, Mapping, Optional
13
13
 
14
14
  import pluggy
15
15
 
@@ -83,3 +83,22 @@ class HookSpec:
83
83
  - May be async or sync
84
84
  - Exceptions are caught and logged unless required=True in strict mode
85
85
  """
86
+
87
+ @hookspec
88
+ def set_database_connection_params(
89
+ self, connection_url: str, settings: Any
90
+ ) -> Mapping[str, Any]:
91
+ """
92
+ Set additional database connection parameters.
93
+
94
+ This hook is called when creating a database engine. It allows plugins
95
+ to provide additional connection parameters, such as authentication
96
+ tokens or SSL configuration.
97
+
98
+ Args:
99
+ connection_url: The database connection URL
100
+ settings: The current Prefect settings
101
+
102
+ Returns:
103
+ Dictionary of connection parameters to merge into connect_args
104
+ """
@@ -679,7 +679,7 @@ class MattermostWebhook(AbstractAppriseNotificationBlock):
679
679
  token=self.token.get_secret_value(),
680
680
  fullpath=self.path,
681
681
  host=self.hostname,
682
- botname=self.botname,
682
+ user=self.botname,
683
683
  channels=self.channels,
684
684
  include_image=self.include_image,
685
685
  port=self.port,
prefect/cli/server.py CHANGED
@@ -709,13 +709,29 @@ def run_manager_process():
709
709
 
710
710
  logger.debug("Manager process started. Starting services...")
711
711
  try:
712
- asyncio.run(Service.run_services())
712
+ asyncio.run(_run_all_services())
713
713
  except KeyboardInterrupt:
714
714
  pass
715
715
  finally:
716
716
  logger.debug("Manager process has exited.")
717
717
 
718
718
 
719
+ async def _run_all_services() -> None:
720
+ """Run Service-based services and docket-based perpetual services."""
721
+ from docket import Docket
722
+
723
+ from prefect.server.api.background_workers import background_worker
724
+ from prefect.server.services.base import Service
725
+ from prefect.settings.context import get_current_settings
726
+
727
+ docket_url = get_current_settings().server.docket.url
728
+
729
+ async with Docket(name="prefect", url=docket_url) as docket:
730
+ async with background_worker(docket, ephemeral=False, webserver_only=False):
731
+ # Run Service-based services (will block until shutdown)
732
+ await Service.run_services()
733
+
734
+
719
735
  # public, user-facing `prefect server services` commands
720
736
  @services_app.command(aliases=["ls"])
721
737
  def list_services():
@@ -772,7 +788,7 @@ def start_services(
772
788
  if not background:
773
789
  app.console.print("\n[blue]Starting services... Press CTRL+C to stop[/]\n")
774
790
  try:
775
- asyncio.run(Service.run_services())
791
+ asyncio.run(_run_all_services())
776
792
  except KeyboardInterrupt:
777
793
  pass
778
794
  app.console.print("\n[green]All services stopped.[/]")
@@ -157,6 +157,12 @@ class DeploymentClient(BaseClient):
157
157
  payload["version_info"] = deployment_create.version_info.model_dump(
158
158
  mode="json"
159
159
  )
160
+ if deployment_create.concurrency_options:
161
+ payload["concurrency_options"] = (
162
+ deployment_create.concurrency_options.model_dump(
163
+ mode="json", exclude_unset=True
164
+ )
165
+ )
160
166
 
161
167
  try:
162
168
  response = self.request("POST", "/deployments/", json=payload)
@@ -823,6 +829,12 @@ class DeploymentAsyncClient(BaseAsyncClient):
823
829
  payload["version_info"] = deployment_create.version_info.model_dump(
824
830
  mode="json"
825
831
  )
832
+ if deployment_create.concurrency_options:
833
+ payload["concurrency_options"] = (
834
+ deployment_create.concurrency_options.model_dump(
835
+ mode="json", exclude_unset=True
836
+ )
837
+ )
826
838
 
827
839
  try:
828
840
  response = await self.request("POST", "/deployments/", json=payload)
prefect/events/clients.py CHANGED
@@ -281,7 +281,11 @@ class PrefectEventsClient(EventsClient):
281
281
  # Don't handle any errors in the initial connection, because these are most
282
282
  # likely a permission or configuration issue that should propagate
283
283
  await super().__aenter__()
284
- await self._reconnect()
284
+ try:
285
+ await self._reconnect()
286
+ except Exception as e:
287
+ self._log_connection_error(e)
288
+ raise
285
289
  return self
286
290
 
287
291
  async def __aexit__(
@@ -298,6 +302,18 @@ class PrefectEventsClient(EventsClient):
298
302
  message = f"EventsClient(id={id(self)}): " + message
299
303
  logger.debug(message, *args, **kwargs)
300
304
 
305
+ def _log_connection_error(self, error: Exception) -> None:
306
+ logger.warning(
307
+ "Unable to connect to %r. "
308
+ "Please check your network settings to ensure websocket connections "
309
+ "to the API are allowed. Otherwise event data (including task run data) may be lost. "
310
+ "Reason: %s. "
311
+ "Set PREFECT_DEBUG_MODE=1 to see the full error.",
312
+ self._events_socket_url,
313
+ str(error),
314
+ exc_info=PREFECT_DEBUG_MODE.value(),
315
+ )
316
+
301
317
  async def _reconnect(self) -> None:
302
318
  logger.debug("Reconnecting websocket connection.")
303
319
 
@@ -315,15 +331,10 @@ class PrefectEventsClient(EventsClient):
315
331
  await pong
316
332
  logger.debug("Pong received. Websocket connected.")
317
333
  except Exception as e:
318
- # The client is frequently run in a background thread
319
- # so we log an additional warning to ensure
320
- # surfacing the error to the user.
321
- logger.warning(
322
- "Unable to connect to %r. "
323
- "Please check your network settings to ensure websocket connections "
324
- "to the API are allowed. Otherwise event data (including task run data) may be lost. "
325
- "Reason: %s. "
326
- "Set PREFECT_DEBUG_MODE=1 to see the full error.",
334
+ # Log at debug level during reconnection attempts - the warning will
335
+ # only be logged if all reconnection attempts fail (in _emit)
336
+ logger.debug(
337
+ "Unable to connect to %r, will retry. Reason: %s",
327
338
  self._events_socket_url,
328
339
  str(e),
329
340
  exc_info=PREFECT_DEBUG_MODE.value(),
@@ -391,10 +402,11 @@ class PrefectEventsClient(EventsClient):
391
402
  await self._checkpoint()
392
403
 
393
404
  return
394
- except ConnectionClosed:
405
+ except ConnectionClosed as e:
395
406
  self._log_debug("Got ConnectionClosed error.")
396
407
  if i == self._reconnection_attempts:
397
- # this was our final chance, raise the most recent error
408
+ # this was our final chance, log warning and raise
409
+ self._log_connection_error(e)
398
410
  raise
399
411
 
400
412
  if i > 2:
prefect/flow_runs.py CHANGED
@@ -11,6 +11,7 @@ from uuid import UUID, uuid4
11
11
 
12
12
  import anyio
13
13
 
14
+ from prefect._internal.compatibility.async_dispatch import async_dispatch
14
15
  from prefect.client.orchestration import PrefectClient, get_client
15
16
  from prefect.client.schemas import FlowRun
16
17
  from prefect.client.schemas.objects import (
@@ -42,9 +43,7 @@ from prefect.states import (
42
43
  Paused,
43
44
  Suspended,
44
45
  )
45
- from prefect.utilities.asyncutils import (
46
- sync_compatible,
47
- )
46
+ from prefect.utilities.asyncutils import sync_compatible
48
47
  from prefect.utilities.engine import (
49
48
  propose_state,
50
49
  )
@@ -459,28 +458,50 @@ async def suspend_flow_run(
459
458
  raise Pause(state=state)
460
459
 
461
460
 
462
- @sync_compatible
463
- async def resume_flow_run(
461
+ async def aresume_flow_run(
464
462
  flow_run_id: UUID, run_input: dict[str, Any] | None = None
465
463
  ) -> None:
466
464
  """
467
- Resumes a paused flow.
465
+ Resumes a paused flow asynchronously.
468
466
 
469
467
  Args:
470
468
  flow_run_id: the flow_run_id to resume
471
469
  run_input: a dictionary of inputs to provide to the flow run.
472
470
  """
473
- client = get_client()
474
- async with client:
471
+ async with get_client() as client:
475
472
  flow_run = await client.read_flow_run(flow_run_id)
476
473
 
477
- if not flow_run.state.is_paused():
474
+ if not flow_run.state or not flow_run.state.is_paused():
478
475
  raise NotPausedError("Cannot resume a run that isn't paused!")
479
476
 
480
477
  response = await client.resume_flow_run(flow_run_id, run_input=run_input)
481
478
 
482
479
  if response.status == SetStateStatus.REJECT:
483
- if response.state.type == StateType.FAILED:
480
+ if response.state and response.state.type == StateType.FAILED:
481
+ raise FlowPauseTimeout("Flow run can no longer be resumed.")
482
+ else:
483
+ raise RuntimeError(f"Cannot resume this run: {response.details.reason}")
484
+
485
+
486
+ @async_dispatch(aresume_flow_run)
487
+ def resume_flow_run(flow_run_id: UUID, run_input: dict[str, Any] | None = None) -> None:
488
+ """
489
+ Resumes a paused flow.
490
+
491
+ Args:
492
+ flow_run_id: the flow_run_id to resume
493
+ run_input: a dictionary of inputs to provide to the flow run.
494
+ """
495
+ with get_client(sync_client=True) as client:
496
+ flow_run = client.read_flow_run(flow_run_id)
497
+
498
+ if not flow_run.state or not flow_run.state.is_paused():
499
+ raise NotPausedError("Cannot resume a run that isn't paused!")
500
+
501
+ response = client.resume_flow_run(flow_run_id, run_input=run_input)
502
+
503
+ if response.status == SetStateStatus.REJECT:
504
+ if response.state and response.state.type == StateType.FAILED:
484
505
  raise FlowPauseTimeout("Flow run can no longer be resumed.")
485
506
  else:
486
507
  raise RuntimeError(f"Cannot resume this run: {response.details.reason}")
@@ -43,6 +43,7 @@ handlers:
43
43
  level: 0
44
44
  class: prefect.logging.handlers.PrefectConsoleHandler
45
45
  formatter: standard
46
+ stream: ext://sys.stderr
46
47
  styles:
47
48
  log.web_url: bright_blue
48
49
  log.local_url: bright_blue
@@ -69,6 +70,7 @@ handlers:
69
70
  level: 0
70
71
  class: logging.StreamHandler
71
72
  formatter: debug
73
+ stream: ext://sys.stderr
72
74
 
73
75
  worker_api:
74
76
  level: 0
prefect/main.py CHANGED
@@ -8,7 +8,12 @@ from prefect.tasks import task, Task
8
8
  from prefect.context import tags
9
9
  from prefect.utilities.annotations import unmapped, allow_failure
10
10
  from prefect._result_records import ResultRecordMetadata
11
- from prefect.flow_runs import pause_flow_run, resume_flow_run, suspend_flow_run
11
+ from prefect.flow_runs import (
12
+ aresume_flow_run,
13
+ pause_flow_run,
14
+ resume_flow_run,
15
+ suspend_flow_run,
16
+ )
12
17
  from prefect.client.orchestration import get_client
13
18
  from prefect.client.cloud import get_cloud_client
14
19
  import prefect.variables # pyright: ignore[reportUnusedImport] # TODO: Does this need to be imported here?
@@ -58,20 +63,21 @@ flow: FlowDecorator
58
63
  # Declare API for type-checkers
59
64
  __all__ = [
60
65
  "allow_failure",
66
+ "aresume_flow_run",
67
+ "aserve",
61
68
  "flow",
62
69
  "Flow",
63
70
  "get_client",
64
71
  "get_cloud_client",
65
72
  "get_run_logger",
73
+ "pause_flow_run",
74
+ "resume_flow_run",
75
+ "serve",
66
76
  "State",
77
+ "suspend_flow_run",
67
78
  "tags",
68
79
  "task",
69
80
  "Task",
70
81
  "Transaction",
71
82
  "unmapped",
72
- "serve",
73
- "aserve",
74
- "pause_flow_run",
75
- "resume_flow_run",
76
- "suspend_flow_run",
77
83
  ]
prefect/runner/storage.py CHANGED
@@ -919,7 +919,36 @@ def _format_token_from_credentials(
919
919
  if username:
920
920
  return f"{username}:{user_provided_token}"
921
921
 
922
- # Fallback for plain dict credentials without a block
922
+ # Netloc-based provider detection for dict credentials (e.g., from YAML block references).
923
+ # When credentials come from deployment YAML like:
924
+ # credentials: "{{ prefect.blocks.gitlab-credentials.my-block }}"
925
+ # they resolve to dicts, not Block instances, so the protocol check above doesn't apply.
926
+ # This provides sensible defaults for common git providers.
927
+ if "bitbucketserver" in netloc:
928
+ if ":" not in user_provided_token:
929
+ raise ValueError(
930
+ "Please provide a `username` and a `password` or `token` in your"
931
+ " BitBucketCredentials block to clone a repo from BitBucket Server."
932
+ )
933
+ return user_provided_token
934
+
935
+ elif "bitbucket" in netloc:
936
+ if (
937
+ user_provided_token.startswith("x-token-auth:")
938
+ or ":" in user_provided_token
939
+ ):
940
+ return user_provided_token
941
+ return f"x-token-auth:{user_provided_token}"
942
+
943
+ elif "gitlab" in netloc:
944
+ if user_provided_token.startswith("oauth2:"):
945
+ return user_provided_token
946
+ # Deploy tokens contain ":" (username:token format) and should not get oauth2: prefix
947
+ if ":" in user_provided_token:
948
+ return user_provided_token
949
+ return f"oauth2:{user_provided_token}"
950
+
951
+ # GitHub and other providers: plain token
923
952
  return user_provided_token
924
953
 
925
954
 
prefect/serializers.py CHANGED
@@ -41,6 +41,21 @@ D = TypeVar("D", default=Any)
41
41
  _TYPE_ADAPTER_CACHE: dict[str, TypeAdapter[Any]] = {}
42
42
 
43
43
 
44
+ def _get_importable_class(cls: type) -> type:
45
+ """
46
+ Get an importable class from a potentially parameterized generic.
47
+
48
+ For Pydantic generic models like `APIResult[str]`, the class name includes
49
+ type parameters (e.g., `APIResult[str]`) which cannot be imported. This
50
+ function extracts the origin class (e.g., `APIResult`) which can be imported.
51
+ """
52
+ if hasattr(cls, "__pydantic_generic_metadata__"):
53
+ origin = cls.__pydantic_generic_metadata__.get("origin")
54
+ if origin is not None:
55
+ return origin
56
+ return cls
57
+
58
+
44
59
  def prefect_json_object_encoder(obj: Any) -> Any:
45
60
  """
46
61
  `JSONEncoder.default` for encoding objects into JSON with extended type support.
@@ -58,8 +73,9 @@ def prefect_json_object_encoder(obj: Any) -> Any:
58
73
  ),
59
74
  }
60
75
  else:
76
+ importable_class = _get_importable_class(obj.__class__)
61
77
  return {
62
- "__class__": to_qualified_name(obj.__class__),
78
+ "__class__": to_qualified_name(importable_class),
63
79
  "data": custom_pydantic_encoder({}, obj),
64
80
  }
65
81
 
@@ -6,18 +6,19 @@ from docket import Docket, Worker
6
6
 
7
7
  from prefect.server.api.flow_runs import delete_flow_run_logs
8
8
  from prefect.server.api.task_runs import delete_task_run_logs
9
+ from prefect.server.events.services import triggers as _triggers_module # noqa: F401
9
10
  from prefect.server.models.deployments import mark_deployments_ready
10
11
  from prefect.server.models.work_queues import mark_work_queues_ready
11
-
12
- # Import task functions that need to be registered with docket
13
12
  from prefect.server.services.cancellation_cleanup import (
14
13
  cancel_child_task_runs,
15
14
  cancel_subflow_run,
16
15
  )
16
+ from prefect.server.services.late_runs import mark_flow_run_late
17
17
  from prefect.server.services.pause_expirations import fail_expired_pause
18
18
  from prefect.server.services.perpetual_services import (
19
19
  register_and_schedule_perpetual_services,
20
20
  )
21
+ from prefect.server.services.repossessor import revoke_expired_lease
21
22
 
22
23
  # Task functions to register with docket for background processing
23
24
  task_functions: list[Callable[..., Any]] = [
@@ -30,6 +31,8 @@ task_functions: list[Callable[..., Any]] = [
30
31
  cancel_child_task_runs,
31
32
  cancel_subflow_run,
32
33
  fail_expired_pause,
34
+ mark_flow_run_late,
35
+ revoke_expired_lease,
33
36
  ]
34
37
 
35
38
 
@@ -156,6 +156,7 @@ def _install_sqlite_locked_log_filter() -> None:
156
156
 
157
157
  filter_ = _SQLiteLockedOperationalErrorFilter()
158
158
  logging.getLogger("uvicorn.error").addFilter(filter_)
159
+ logging.getLogger("docket.worker").addFilter(filter_)
159
160
  _SQLITE_LOCKED_LOG_FILTER = filter_
160
161
 
161
162
 
@@ -1,5 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
+ import logging
3
4
  import sqlite3
4
5
  import ssl
5
6
  import traceback
@@ -25,6 +26,12 @@ from sqlalchemy.ext.asyncio import (
25
26
  from sqlalchemy.pool import ConnectionPoolEntry
26
27
  from typing_extensions import TypeAlias
27
28
 
29
+ from prefect._experimental.plugins import (
30
+ HookSpec,
31
+ build_manager,
32
+ call_async_hook,
33
+ load_entry_point_plugins,
34
+ )
28
35
  from prefect._internal.observability import configure_logfire
29
36
  from prefect.settings import (
30
37
  PREFECT_API_DATABASE_CONNECTION_TIMEOUT,
@@ -279,6 +286,33 @@ class AsyncPostgresConfiguration(BaseDatabaseConfiguration):
279
286
  pg_ctx.verify_mode = ssl.CERT_REQUIRED
280
287
  connect_args["ssl"] = pg_ctx
281
288
 
289
+ # Initialize plugin manager
290
+ if get_current_settings().experiments.plugins.enabled:
291
+ pm = build_manager(HookSpec)
292
+ load_entry_point_plugins(
293
+ pm,
294
+ allow=get_current_settings().experiments.plugins.allow,
295
+ deny=get_current_settings().experiments.plugins.deny,
296
+ logger=logging.getLogger("prefect.plugins"),
297
+ )
298
+
299
+ # Call set_database_connection_params hook
300
+ results = await call_async_hook(
301
+ pm,
302
+ "set_database_connection_params",
303
+ connection_url=self.connection_url,
304
+ settings=get_current_settings(),
305
+ )
306
+
307
+ for _, params, error in results:
308
+ if error:
309
+ # Log error but don't fail, other plugins might succeed
310
+ logging.getLogger("prefect.server.database").warning(
311
+ "Plugin failed to set database connection params: %s", error
312
+ )
313
+ elif params:
314
+ connect_args.update(params)
315
+
282
316
  if connect_args:
283
317
  kwargs["connect_args"] = connect_args
284
318
 
@@ -1,16 +1,18 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import asyncio
4
- from typing import TYPE_CHECKING, Any, NoReturn, Optional
4
+ from typing import TYPE_CHECKING, NoReturn
5
+
6
+ from docket import Perpetual
5
7
 
6
8
  from prefect.logging import get_logger
7
9
  from prefect.server.events import triggers
8
- from prefect.server.services.base import LoopService, RunInEphemeralServers, Service
10
+ from prefect.server.services.base import RunInEphemeralServers, Service
11
+ from prefect.server.services.perpetual_services import perpetual_service
9
12
  from prefect.server.utilities.messaging import Consumer, create_consumer
10
13
  from prefect.server.utilities.messaging._consumer_names import (
11
14
  generate_unique_consumer_name,
12
15
  )
13
- from prefect.settings import PREFECT_EVENTS_PROACTIVE_GRANULARITY
14
16
  from prefect.settings.context import get_current_settings
15
17
  from prefect.settings.models.server.services import ServicesBaseSetting
16
18
 
@@ -65,21 +67,15 @@ class ReactiveTriggers(RunInEphemeralServers, Service):
65
67
  logger.debug("Reactive triggers stopped")
66
68
 
67
69
 
68
- class ProactiveTriggers(RunInEphemeralServers, LoopService):
69
- """Evaluates proactive automation triggers"""
70
-
71
- @classmethod
72
- def service_settings(cls) -> ServicesBaseSetting:
73
- return get_current_settings().server.services.triggers
74
-
75
- def __init__(self, loop_seconds: Optional[float] = None, **kwargs: Any):
76
- super().__init__(
77
- loop_seconds=(
78
- loop_seconds
79
- or PREFECT_EVENTS_PROACTIVE_GRANULARITY.value().total_seconds()
80
- ),
81
- **kwargs,
82
- )
83
-
84
- async def run_once(self) -> None:
85
- await triggers.evaluate_proactive_triggers()
70
+ @perpetual_service(
71
+ enabled_getter=lambda: get_current_settings().server.services.triggers.enabled,
72
+ run_in_ephemeral=True,
73
+ )
74
+ async def evaluate_proactive_triggers_periodic(
75
+ perpetual: Perpetual = Perpetual(
76
+ automatic=True,
77
+ every=get_current_settings().server.events.proactive_granularity,
78
+ ),
79
+ ) -> None:
80
+ """Evaluate proactive automation triggers on a periodic schedule."""
81
+ await triggers.evaluate_proactive_triggers()