prefect-client 3.4.5.dev4__py3-none-any.whl → 3.4.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
prefect/_build_info.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # Generated by versioningit
2
- __version__ = "3.4.5.dev4"
3
- __build_date__ = "2025-06-05 08:09:20.805818+00:00"
4
- __git_commit__ = "92cd0a62e663cace1fdbb654f3533c368622d8c6"
2
+ __version__ = "3.4.6"
3
+ __build_date__ = "2025-06-11 20:00:26.503556+00:00"
4
+ __git_commit__ = "d10c6e6ac5de769628173c86ad32f9a3c3171d31"
5
5
  __dirty__ = False
@@ -0,0 +1,4 @@
1
+ from prefect.assets.core import Asset, AssetProperties, add_asset_metadata
2
+ from prefect.assets.materialize import materialize
3
+
4
+ __all__ = ["Asset", "AssetProperties", "materialize", "add_asset_metadata"]
prefect/assets/core.py ADDED
@@ -0,0 +1,75 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any, ClassVar, Optional
4
+
5
+ from pydantic import ConfigDict, Field
6
+
7
+ from prefect._internal.schemas.bases import PrefectBaseModel
8
+ from prefect.types import URILike
9
+
10
+
11
+ class AssetProperties(PrefectBaseModel):
12
+ """
13
+ Metadata properties to configure on an Asset
14
+ """
15
+
16
+ model_config: ClassVar[ConfigDict] = ConfigDict(frozen=True)
17
+
18
+ name: Optional[str] = Field(
19
+ default=None, description="Human readable name of the Asset."
20
+ )
21
+ url: Optional[str] = Field(
22
+ default=None, description="Visitable url to view the Asset."
23
+ )
24
+ description: Optional[str] = Field(
25
+ default=None, description="Description of the Asset."
26
+ )
27
+ owners: Optional[list[str]] = Field(
28
+ default=None, description="Owners of the Asset."
29
+ )
30
+
31
+
32
+ class Asset(PrefectBaseModel):
33
+ """
34
+ Assets are objects that represent materialized data,
35
+ providing a way to track lineage and dependencies.
36
+ """
37
+
38
+ model_config: ClassVar[ConfigDict] = ConfigDict(frozen=True)
39
+
40
+ key: URILike
41
+ properties: Optional[AssetProperties] = Field(
42
+ default=None,
43
+ description="Properties of the asset. "
44
+ "Setting this will overwrite properties of a known asset.",
45
+ )
46
+
47
+ def __repr__(self) -> str:
48
+ return f"Asset(key={self.key!r})"
49
+
50
+ def __hash__(self) -> int:
51
+ return hash(self.key)
52
+
53
+ def add_metadata(self, metadata: dict[str, Any]) -> None:
54
+ from prefect.context import AssetContext
55
+
56
+ asset_ctx = AssetContext.get()
57
+ if not asset_ctx:
58
+ raise RuntimeError(
59
+ "Unable add Asset metadata when not inside of an AssetContext"
60
+ )
61
+
62
+ asset_ctx.add_asset_metadata(self.key, metadata)
63
+
64
+
65
+ def add_asset_metadata(asset: str | Asset, metadata: dict[str, Any]) -> None:
66
+ from prefect.context import AssetContext
67
+
68
+ asset_ctx = AssetContext.get()
69
+ if not asset_ctx:
70
+ raise RuntimeError(
71
+ "Unable to call `add_asset_metadata` when not inside of an AssetContext"
72
+ )
73
+
74
+ asset_key = asset if isinstance(asset, str) else asset.key
75
+ asset_ctx.add_asset_metadata(asset_key, metadata)
@@ -0,0 +1,42 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Callable, TypeVar, Union
4
+
5
+ from typing_extensions import ParamSpec, Unpack
6
+
7
+ from .core import Asset
8
+
9
+ T = TypeVar("T")
10
+ P = ParamSpec("P")
11
+ R = TypeVar("R")
12
+
13
+ if TYPE_CHECKING:
14
+ from prefect.tasks import MaterializingTask, TaskOptions
15
+
16
+
17
+ def materialize(
18
+ *assets: Union[str, Asset],
19
+ by: str | None = None,
20
+ **task_kwargs: Unpack[TaskOptions],
21
+ ) -> Callable[[Callable[P, R]], MaterializingTask[P, R]]:
22
+ """
23
+ Decorator for materializing assets.
24
+
25
+ Args:
26
+ *assets: Assets to materialize
27
+ by: An optional tool that is ultimately responsible for materializing the asset e.g. "dbt" or "spark"
28
+ **task_kwargs: Additional task configuration
29
+ """
30
+ if not assets:
31
+ raise TypeError(
32
+ "materialize requires at least one asset argument, e.g. `@materialize(asset)`"
33
+ )
34
+
35
+ from prefect.tasks import MaterializingTask
36
+
37
+ def decorator(fn: Callable[P, R]) -> MaterializingTask[P, R]:
38
+ return MaterializingTask(
39
+ fn=fn, assets=assets, materialized_by=by, **task_kwargs
40
+ )
41
+
42
+ return decorator
prefect/context.py CHANGED
@@ -7,13 +7,23 @@ For more user-accessible information about the current run, see [`prefect.runtim
7
7
  """
8
8
 
9
9
  import asyncio
10
+ import json
10
11
  import os
11
12
  import sys
12
13
  import warnings
13
14
  from collections.abc import AsyncGenerator, Generator, Mapping
14
15
  from contextlib import ExitStack, asynccontextmanager, contextmanager
15
16
  from contextvars import ContextVar, Token
16
- from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TypeVar, Union
17
+ from typing import (
18
+ TYPE_CHECKING,
19
+ Any,
20
+ Callable,
21
+ ClassVar,
22
+ Optional,
23
+ TypeVar,
24
+ Union,
25
+ )
26
+ from uuid import UUID
17
27
 
18
28
  from pydantic import BaseModel, ConfigDict, Field, PrivateAttr
19
29
  from typing_extensions import Self
@@ -21,6 +31,7 @@ from typing_extensions import Self
21
31
  import prefect.settings
22
32
  import prefect.types._datetime
23
33
  from prefect._internal.compatibility.migration import getattr_migration
34
+ from prefect.assets import Asset
24
35
  from prefect.client.orchestration import PrefectClient, SyncPrefectClient, get_client
25
36
  from prefect.client.schemas import FlowRun, TaskRun
26
37
  from prefect.events.worker import EventsWorker
@@ -48,9 +59,15 @@ if TYPE_CHECKING:
48
59
  from prefect.tasks import Task
49
60
 
50
61
 
51
- def serialize_context() -> dict[str, Any]:
62
+ def serialize_context(
63
+ asset_ctx_kwargs: Union[dict[str, Any], None] = None,
64
+ ) -> dict[str, Any]:
52
65
  """
53
66
  Serialize the current context for use in a remote execution environment.
67
+
68
+ Optionally provide asset_ctx_kwargs to create new AssetContext, that will be used
69
+ in the remote execution environment. This is useful for TaskRunners, who rely on creating the
70
+ task run in the remote environment.
54
71
  """
55
72
  flow_run_context = EngineContext.get()
56
73
  task_run_context = TaskRunContext.get()
@@ -62,6 +79,11 @@ def serialize_context() -> dict[str, Any]:
62
79
  "task_run_context": task_run_context.serialize() if task_run_context else {},
63
80
  "tags_context": tags_context.serialize() if tags_context else {},
64
81
  "settings_context": settings_context.serialize() if settings_context else {},
82
+ "asset_context": AssetContext.from_task_and_inputs(
83
+ **asset_ctx_kwargs
84
+ ).serialize()
85
+ if asset_ctx_kwargs
86
+ else {},
65
87
  }
66
88
 
67
89
 
@@ -112,6 +134,9 @@ def hydrated_context(
112
134
  # Set up tags context
113
135
  if tags_context := serialized_context.get("tags_context"):
114
136
  stack.enter_context(tags(*tags_context["current_tags"]))
137
+ # Set up asset context
138
+ if asset_context := serialized_context.get("asset_context"):
139
+ stack.enter_context(AssetContext(**asset_context))
115
140
  yield
116
141
 
117
142
 
@@ -373,6 +398,10 @@ class EngineContext(RunContext):
373
398
  # Holds the ID of the object returned by the task run and task run state
374
399
  task_run_results: dict[int, State] = Field(default_factory=dict)
375
400
 
401
+ # Tracking information needed to track asset linage between
402
+ # tasks and materialization
403
+ task_run_assets: dict[UUID, set[Asset]] = Field(default_factory=dict)
404
+
376
405
  # Events worker to emit events
377
406
  events: Optional[EventsWorker] = None
378
407
 
@@ -443,6 +472,221 @@ class TaskRunContext(RunContext):
443
472
  )
444
473
 
445
474
 
475
+ class AssetContext(ContextModel):
476
+ """
477
+ The asset context for a materializing task run. Contains all asset-related information needed
478
+ for asset event emission and downstream asset dependency propagation.
479
+
480
+ Attributes:
481
+ direct_asset_dependencies: Assets that this task directly depends on (from task.asset_deps)
482
+ downstream_assets: Assets that this task will create/materialize (from MaterializingTask.assets)
483
+ upstream_assets: Assets from upstream task dependencies
484
+ materialized_by: Tool that materialized the assets (from MaterializingTask.materialized_by)
485
+ task_run_id: ID of the associated task run
486
+ materialization_metadata: Metadata for materialized assets
487
+ """
488
+
489
+ direct_asset_dependencies: set[Asset] = Field(default_factory=set)
490
+ downstream_assets: set[Asset] = Field(default_factory=set)
491
+ upstream_assets: set[Asset] = Field(default_factory=set)
492
+ materialized_by: Optional[str] = None
493
+ task_run_id: Optional[UUID] = None
494
+ materialization_metadata: dict[str, dict[str, Any]] = Field(default_factory=dict)
495
+ copy_to_child_ctx: bool = False
496
+
497
+ __var__: ClassVar[ContextVar[Self]] = ContextVar("asset_context")
498
+
499
+ @classmethod
500
+ def from_task_and_inputs(
501
+ cls,
502
+ task: "Task[Any, Any]",
503
+ task_run_id: UUID,
504
+ task_inputs: Optional[dict[str, set[Any]]] = None,
505
+ copy_to_child_ctx: bool = False,
506
+ ) -> "AssetContext":
507
+ """
508
+ Create an AssetContext from a task and its resolved inputs.
509
+
510
+ Args:
511
+ task: The task instance
512
+ task_run_id: The task run ID
513
+ task_inputs: The resolved task inputs (TaskRunResult objects)
514
+ copy_to_child_ctx: Whether this context should be copied on a child AssetContext
515
+
516
+ Returns:
517
+ Configured AssetContext
518
+ """
519
+ from prefect.client.schemas import TaskRunResult
520
+ from prefect.tasks import MaterializingTask
521
+
522
+ upstream_assets: set[Asset] = set()
523
+
524
+ flow_ctx = FlowRunContext.get()
525
+ if task_inputs and flow_ctx:
526
+ for name, inputs in task_inputs.items():
527
+ # Parent task runs are not dependencies
528
+ # that we want to track
529
+ if name == "__parents__":
530
+ continue
531
+
532
+ for task_input in inputs:
533
+ if isinstance(task_input, TaskRunResult):
534
+ task_assets = flow_ctx.task_run_assets.get(task_input.id)
535
+ if task_assets:
536
+ upstream_assets.update(task_assets)
537
+
538
+ ctx = cls(
539
+ direct_asset_dependencies=set(task.asset_deps)
540
+ if task.asset_deps
541
+ else set(),
542
+ downstream_assets=set(task.assets)
543
+ if isinstance(task, MaterializingTask) and task.assets
544
+ else set(),
545
+ upstream_assets=upstream_assets,
546
+ materialized_by=task.materialized_by
547
+ if isinstance(task, MaterializingTask)
548
+ else None,
549
+ task_run_id=task_run_id,
550
+ copy_to_child_ctx=copy_to_child_ctx,
551
+ )
552
+ ctx.update_tracked_assets()
553
+
554
+ return ctx
555
+
556
+ def add_asset_metadata(self, asset_key: str, metadata: dict[str, Any]) -> None:
557
+ """
558
+ Add metadata for a materialized asset.
559
+
560
+ Args:
561
+ asset_key: The asset key
562
+ metadata: Metadata dictionary to add
563
+
564
+ Raises:
565
+ ValueError: If asset_key is not in downstream_assets
566
+ """
567
+ downstream_keys = {asset.key for asset in self.downstream_assets}
568
+ if asset_key not in downstream_keys:
569
+ raise ValueError(
570
+ "Can only add metadata to assets that are arguments to @materialize"
571
+ )
572
+
573
+ existing = self.materialization_metadata.get(asset_key, {})
574
+ self.materialization_metadata[asset_key] = existing | metadata
575
+
576
+ @staticmethod
577
+ def asset_as_resource(asset: Asset) -> dict[str, str]:
578
+ """Convert Asset to event resource format."""
579
+ resource = {"prefect.resource.id": asset.key}
580
+
581
+ if asset.properties:
582
+ properties_dict = asset.properties.model_dump(exclude_unset=True)
583
+
584
+ if "name" in properties_dict:
585
+ resource["prefect.resource.name"] = properties_dict["name"]
586
+
587
+ if "description" in properties_dict:
588
+ resource["prefect.asset.description"] = properties_dict["description"]
589
+
590
+ if "url" in properties_dict:
591
+ resource["prefect.asset.url"] = properties_dict["url"]
592
+
593
+ if "owners" in properties_dict:
594
+ resource["prefect.asset.owners"] = json.dumps(properties_dict["owners"])
595
+
596
+ return resource
597
+
598
+ @staticmethod
599
+ def asset_as_related(asset: Asset) -> dict[str, str]:
600
+ """Convert Asset to event related format."""
601
+ return {
602
+ "prefect.resource.id": asset.key,
603
+ "prefect.resource.role": "asset",
604
+ }
605
+
606
+ @staticmethod
607
+ def related_materialized_by(by: str) -> dict[str, str]:
608
+ """Create a related resource for the tool that performed the materialization"""
609
+ return {
610
+ "prefect.resource.id": by,
611
+ "prefect.resource.role": "asset-materialized-by",
612
+ }
613
+
614
+ def emit_events(self, state: State) -> None:
615
+ """
616
+ Emit asset events
617
+ """
618
+
619
+ from prefect.events import emit_event
620
+
621
+ if state.name == "Cached":
622
+ return
623
+ elif state.is_failed():
624
+ event_status = "failed"
625
+ elif state.is_completed():
626
+ event_status = "succeeded"
627
+ else:
628
+ return
629
+
630
+ # If we have no downstream assets, this not a materialization
631
+ if not self.downstream_assets:
632
+ return
633
+
634
+ # Emit reference events for all upstream assets (direct + inherited)
635
+ all_upstream_assets = self.upstream_assets | self.direct_asset_dependencies
636
+ for asset in all_upstream_assets:
637
+ emit_event(
638
+ event="prefect.asset.referenced",
639
+ resource=self.asset_as_resource(asset),
640
+ related=[],
641
+ )
642
+
643
+ # Emit materialization events for downstream assets
644
+ upstream_related = [self.asset_as_related(a) for a in all_upstream_assets]
645
+
646
+ if self.materialized_by:
647
+ upstream_related.append(self.related_materialized_by(self.materialized_by))
648
+
649
+ for asset in self.downstream_assets:
650
+ emit_event(
651
+ event=f"prefect.asset.materialization.{event_status}",
652
+ resource=self.asset_as_resource(asset),
653
+ related=upstream_related,
654
+ payload=self.materialization_metadata.get(asset.key),
655
+ )
656
+
657
+ def update_tracked_assets(self) -> None:
658
+ """
659
+ Update the flow run context with assets that should be propagated downstream.
660
+ """
661
+ if not (flow_run_context := FlowRunContext.get()):
662
+ return
663
+
664
+ if not self.task_run_id:
665
+ return
666
+
667
+ if self.downstream_assets:
668
+ # MaterializingTask: propagate the downstream assets (what we create)
669
+ assets_for_downstream = set(self.downstream_assets)
670
+ else:
671
+ # Regular task: propagate upstream assets + direct dependencies
672
+ assets_for_downstream = set(
673
+ self.upstream_assets | self.direct_asset_dependencies
674
+ )
675
+
676
+ flow_run_context.task_run_assets[self.task_run_id] = assets_for_downstream
677
+
678
+ def serialize(self: Self, include_secrets: bool = True) -> dict[str, Any]:
679
+ """Serialize the AssetContext for distributed execution."""
680
+ return self.model_dump(
681
+ # use json serialization so fields that are
682
+ # sets of pydantic models are serialized
683
+ mode="json",
684
+ exclude_unset=True,
685
+ serialize_as_any=True,
686
+ context={"include_secrets": include_secrets},
687
+ )
688
+
689
+
446
690
  class TagsContext(ContextModel):
447
691
  """
448
692
  The context for `prefect.tags` management.
@@ -1222,14 +1222,14 @@ async def deploy(
1222
1222
  " or specify a remote storage location for the flow with `.from_source`."
1223
1223
  " If you are attempting to deploy a flow to a local process work pool,"
1224
1224
  " consider using `flow.serve` instead. See the documentation for more"
1225
- " information: https://docs.prefect.io/latest/deploy/run-flows-in-local-processes"
1225
+ " information: https://docs.prefect.io/latest/how-to-guides/deployments/run-flows-in-local-processes"
1226
1226
  )
1227
1227
  elif work_pool.type == "process" and not ignore_warnings:
1228
1228
  console.print(
1229
1229
  "Looks like you're deploying to a process work pool. If you're creating a"
1230
1230
  " deployment for local development, calling `.serve` on your flow is a great"
1231
1231
  " way to get started. See the documentation for more information:"
1232
- " https://docs.prefect.io/latest/deploy/run-flows-in-local-processes "
1232
+ " https://docs.prefect.io/latest/how-to-guides/deployments/run-flows-in-local-processes "
1233
1233
  " Set `ignore_warnings=True` to suppress this message.",
1234
1234
  style="yellow",
1235
1235
  )
prefect/events/clients.py CHANGED
@@ -628,7 +628,7 @@ class PrefectEventSubscriber:
628
628
  try:
629
629
  await self._reconnect()
630
630
  finally:
631
- EVENT_WEBSOCKET_CONNECTIONS.labels(self.client_name, "out", "initial")
631
+ EVENT_WEBSOCKET_CONNECTIONS.labels(self.client_name, "out", "initial").inc()
632
632
  return self
633
633
 
634
634
  async def _reconnect(self) -> None:
@@ -709,7 +709,7 @@ class PrefectEventSubscriber:
709
709
  finally:
710
710
  EVENT_WEBSOCKET_CONNECTIONS.labels(
711
711
  self.client_name, "out", "reconnect"
712
- )
712
+ ).inc()
713
713
  assert self._websocket
714
714
 
715
715
  while True:
prefect/runner/server.py CHANGED
@@ -257,7 +257,7 @@ def _build_generic_endpoint_for_flows(
257
257
  @deprecated_callable(
258
258
  start_date=datetime(2025, 4, 1),
259
259
  end_date=datetime(2025, 10, 1),
260
- help="Use background tasks (https://docs.prefect.io/v3/develop/deferred-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
260
+ help="Use background tasks (https://docs.prefect.io/v3/concepts/tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
261
261
  )
262
262
  async def build_server(runner: "Runner") -> FastAPI:
263
263
  """
@@ -306,7 +306,7 @@ async def build_server(runner: "Runner") -> FastAPI:
306
306
  @deprecated_callable(
307
307
  start_date=datetime(2025, 4, 1),
308
308
  end_date=datetime(2025, 10, 1),
309
- help="Use background tasks (https://docs.prefect.io/v3/develop/deferred-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
309
+ help="Use background tasks (https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
310
310
  )
311
311
  def start_webserver(runner: "Runner", log_level: str | None = None) -> None:
312
312
  """
prefect/runner/submit.py CHANGED
@@ -124,7 +124,7 @@ def submit_to_runner(
124
124
  @deprecated_callable(
125
125
  start_date=datetime(2025, 4, 1),
126
126
  end_date=datetime(2025, 10, 1),
127
- help="Use background tasks (https://docs.prefect.io/v3/develop/deferred-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
127
+ help="Use background tasks (https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
128
128
  )
129
129
  @sync_compatible
130
130
  async def submit_to_runner(
@@ -196,7 +196,7 @@ async def submit_to_runner(
196
196
  @deprecated_callable(
197
197
  start_date=datetime(2025, 4, 1),
198
198
  end_date=datetime(2025, 10, 1),
199
- help="Use background tasks (https://docs.prefect.io/v3/develop/deferred-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
199
+ help="Use background tasks (https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
200
200
  )
201
201
  @sync_compatible
202
202
  async def wait_for_submitted_runs(
@@ -51,7 +51,7 @@ async def create_events(
51
51
  """
52
52
  Record a batch of Events.
53
53
 
54
- For more information, see https://docs.prefect.io/v3/automate/events/events.
54
+ For more information, see https://docs.prefect.io/v3/concepts/events.
55
55
  """
56
56
  if ephemeral_request:
57
57
  await EventsPipeline().process_events(events)
@@ -23,7 +23,7 @@ async def read_task_workers(
23
23
  """
24
24
  Read active task workers. Optionally filter by task keys.
25
25
 
26
- For more information, see https://docs.prefect.io/v3/develop/deferred-tasks.
26
+ For more information, see https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks.
27
27
  """
28
28
 
29
29
  if task_worker_filter and task_worker_filter.task_keys:
@@ -8,11 +8,17 @@ from prefect.server.utilities.user_templates import (
8
8
  validate_user_template,
9
9
  )
10
10
 
11
- router: PrefectRouter = PrefectRouter(prefix="/templates", tags=["Automations"])
11
+ router: PrefectRouter = PrefectRouter(tags=["Automations"])
12
12
 
13
13
 
14
+ # deprecated and can be removed after the ui removes its dependency on it
15
+ # use /templates/validate instead
14
16
  @router.post(
15
- "/validate",
17
+ "/automations/templates/validate",
18
+ response_class=Response,
19
+ )
20
+ @router.post(
21
+ "/templates/validate",
16
22
  response_class=Response,
17
23
  )
18
24
  def validate_template(template: str = Body(default="")) -> Response:
@@ -448,6 +448,32 @@ class ServerServicesTriggersSettings(ServicesBaseSetting):
448
448
  ),
449
449
  )
450
450
 
451
+ pg_notify_reconnect_interval_seconds: int = Field(
452
+ default=10,
453
+ description="""
454
+ The number of seconds to wait before reconnecting to the PostgreSQL NOTIFY/LISTEN
455
+ connection after an error. Only used when using PostgreSQL as the database.
456
+ Defaults to `10`.
457
+ """,
458
+ validation_alias=AliasChoices(
459
+ AliasPath("pg_notify_reconnect_interval_seconds"),
460
+ "prefect_server_services_triggers_pg_notify_reconnect_interval_seconds",
461
+ ),
462
+ )
463
+
464
+ pg_notify_heartbeat_interval_seconds: int = Field(
465
+ default=5,
466
+ description="""
467
+ The number of seconds between heartbeat checks for the PostgreSQL NOTIFY/LISTEN
468
+ connection to ensure it's still alive. Only used when using PostgreSQL as the database.
469
+ Defaults to `5`.
470
+ """,
471
+ validation_alias=AliasChoices(
472
+ AliasPath("pg_notify_heartbeat_interval_seconds"),
473
+ "prefect_server_services_triggers_pg_notify_heartbeat_interval_seconds",
474
+ ),
475
+ )
476
+
451
477
 
452
478
  class ServerServicesSettings(PrefectBaseSettings):
453
479
  """