prefect-client 3.1.6__py3-none-any.whl → 3.1.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. prefect/_experimental/__init__.py +0 -0
  2. prefect/_experimental/lineage.py +181 -0
  3. prefect/_internal/compatibility/async_dispatch.py +38 -9
  4. prefect/_internal/pydantic/v2_validated_func.py +15 -10
  5. prefect/_internal/retries.py +15 -6
  6. prefect/_internal/schemas/bases.py +2 -1
  7. prefect/_internal/schemas/validators.py +5 -4
  8. prefect/_version.py +3 -3
  9. prefect/blocks/core.py +144 -17
  10. prefect/blocks/system.py +2 -1
  11. prefect/client/orchestration.py +106 -0
  12. prefect/client/schemas/actions.py +5 -5
  13. prefect/client/schemas/filters.py +1 -1
  14. prefect/client/schemas/objects.py +5 -5
  15. prefect/client/schemas/responses.py +1 -2
  16. prefect/client/schemas/schedules.py +1 -1
  17. prefect/client/subscriptions.py +2 -1
  18. prefect/client/utilities.py +15 -1
  19. prefect/context.py +1 -1
  20. prefect/deployments/flow_runs.py +3 -3
  21. prefect/deployments/runner.py +14 -14
  22. prefect/deployments/steps/core.py +3 -1
  23. prefect/deployments/steps/pull.py +60 -12
  24. prefect/events/clients.py +55 -4
  25. prefect/events/filters.py +1 -1
  26. prefect/events/related.py +2 -1
  27. prefect/events/schemas/events.py +1 -1
  28. prefect/events/utilities.py +2 -0
  29. prefect/events/worker.py +8 -0
  30. prefect/flow_engine.py +41 -81
  31. prefect/flow_runs.py +4 -2
  32. prefect/flows.py +4 -6
  33. prefect/results.py +43 -22
  34. prefect/runner/runner.py +129 -18
  35. prefect/runner/storage.py +3 -3
  36. prefect/serializers.py +28 -24
  37. prefect/settings/__init__.py +1 -0
  38. prefect/settings/base.py +3 -2
  39. prefect/settings/models/api.py +4 -0
  40. prefect/settings/models/experiments.py +5 -0
  41. prefect/settings/models/runner.py +8 -0
  42. prefect/settings/models/server/api.py +7 -1
  43. prefect/task_engine.py +34 -26
  44. prefect/task_worker.py +43 -25
  45. prefect/tasks.py +118 -125
  46. prefect/telemetry/instrumentation.py +1 -1
  47. prefect/telemetry/processors.py +10 -7
  48. prefect/telemetry/run_telemetry.py +157 -33
  49. prefect/types/__init__.py +4 -1
  50. prefect/variables.py +127 -19
  51. {prefect_client-3.1.6.dist-info → prefect_client-3.1.8.dist-info}/METADATA +2 -1
  52. {prefect_client-3.1.6.dist-info → prefect_client-3.1.8.dist-info}/RECORD +55 -53
  53. {prefect_client-3.1.6.dist-info → prefect_client-3.1.8.dist-info}/LICENSE +0 -0
  54. {prefect_client-3.1.6.dist-info → prefect_client-3.1.8.dist-info}/WHEEL +0 -0
  55. {prefect_client-3.1.6.dist-info → prefect_client-3.1.8.dist-info}/top_level.txt +0 -0
prefect/runner/runner.py CHANGED
@@ -47,11 +47,12 @@ from typing import (
47
47
  TYPE_CHECKING,
48
48
  Any,
49
49
  Callable,
50
+ Coroutine,
50
51
  Dict,
51
52
  Iterable,
52
53
  List,
53
54
  Optional,
54
- Set,
55
+ TypedDict,
55
56
  Union,
56
57
  )
57
58
  from uuid import UUID, uuid4
@@ -59,6 +60,7 @@ from uuid import UUID, uuid4
59
60
  import anyio
60
61
  import anyio.abc
61
62
  import pendulum
63
+ from cachetools import LRUCache
62
64
 
63
65
  from prefect._internal.concurrency.api import (
64
66
  create_call,
@@ -94,8 +96,6 @@ from prefect.logging.loggers import PrefectLogAdapter, flow_run_logger, get_logg
94
96
  from prefect.runner.storage import RunnerStorage
95
97
  from prefect.settings import (
96
98
  PREFECT_API_URL,
97
- PREFECT_RUNNER_POLL_FREQUENCY,
98
- PREFECT_RUNNER_PROCESS_LIMIT,
99
99
  PREFECT_RUNNER_SERVER_ENABLE,
100
100
  get_current_settings,
101
101
  )
@@ -123,19 +123,25 @@ from prefect.utilities.services import (
123
123
  from prefect.utilities.slugify import slugify
124
124
 
125
125
  if TYPE_CHECKING:
126
- from prefect.client.schemas.objects import Deployment
126
+ from prefect.client.schemas.responses import DeploymentResponse
127
127
  from prefect.client.types.flexible_schedule_list import FlexibleScheduleList
128
128
  from prefect.deployments.runner import RunnerDeployment
129
129
 
130
130
  __all__ = ["Runner"]
131
131
 
132
132
 
133
+ class ProcessMapEntry(TypedDict):
134
+ flow_run: FlowRun
135
+ pid: int
136
+
137
+
133
138
  class Runner:
134
139
  def __init__(
135
140
  self,
136
141
  name: Optional[str] = None,
137
142
  query_seconds: Optional[float] = None,
138
143
  prefetch_seconds: float = 10,
144
+ heartbeat_seconds: Optional[float] = None,
139
145
  limit: Optional[int] = None,
140
146
  pause_on_shutdown: bool = True,
141
147
  webserver: bool = False,
@@ -149,6 +155,9 @@ class Runner:
149
155
  query_seconds: The number of seconds to wait between querying for
150
156
  scheduled flow runs; defaults to `PREFECT_RUNNER_POLL_FREQUENCY`
151
157
  prefetch_seconds: The number of seconds to prefetch flow runs for.
158
+ heartbeat_seconds: The number of seconds to wait between emitting
159
+ flow run heartbeats. The runner will not emit heartbeats if the value is None.
160
+ Defaults to `PREFECT_RUNNER_HEARTBEAT_FREQUENCY`.
152
161
  limit: The maximum number of flow runs this runner should be running at
153
162
  pause_on_shutdown: A boolean for whether or not to automatically pause
154
163
  deployment schedules on shutdown; defaults to `True`
@@ -180,6 +189,8 @@ class Runner:
180
189
  asyncio.run(runner.start())
181
190
  ```
182
191
  """
192
+ settings = get_current_settings()
193
+
183
194
  if name and ("/" in name or "%" in name):
184
195
  raise ValueError("Runner name cannot contain '/' or '%'")
185
196
  self.name = Path(name).stem if name is not None else f"runner-{uuid4()}"
@@ -188,19 +199,24 @@ class Runner:
188
199
  self.started = False
189
200
  self.stopping = False
190
201
  self.pause_on_shutdown = pause_on_shutdown
191
- self.limit = limit or PREFECT_RUNNER_PROCESS_LIMIT.value()
202
+ self.limit = limit or settings.runner.process_limit
192
203
  self.webserver = webserver
193
204
 
194
- self.query_seconds = query_seconds or PREFECT_RUNNER_POLL_FREQUENCY.value()
205
+ self.query_seconds = query_seconds or settings.runner.poll_frequency
195
206
  self._prefetch_seconds = prefetch_seconds
207
+ self.heartbeat_seconds = (
208
+ heartbeat_seconds or settings.runner.heartbeat_frequency
209
+ )
210
+ if self.heartbeat_seconds is not None and self.heartbeat_seconds < 30:
211
+ raise ValueError("Heartbeat must be 30 seconds or greater.")
196
212
 
197
213
  self._limiter: Optional[anyio.CapacityLimiter] = None
198
214
  self._client = get_client()
199
- self._submitting_flow_run_ids = set()
200
- self._cancelling_flow_run_ids = set()
201
- self._scheduled_task_scopes = set()
202
- self._deployment_ids: Set[UUID] = set()
203
- self._flow_run_process_map: dict[UUID, dict[str, Any]] = dict()
215
+ self._submitting_flow_run_ids: set[UUID] = set()
216
+ self._cancelling_flow_run_ids: set[UUID] = set()
217
+ self._scheduled_task_scopes: set[UUID] = set()
218
+ self._deployment_ids: set[UUID] = set()
219
+ self._flow_run_process_map: dict[UUID, ProcessMapEntry] = dict()
204
220
 
205
221
  self._tmp_dir: Path = (
206
222
  Path(tempfile.gettempdir()) / "runner_storage" / str(uuid4())
@@ -210,6 +226,12 @@ class Runner:
210
226
 
211
227
  self._loop: Optional[asyncio.AbstractEventLoop] = None
212
228
 
229
+ # Caching
230
+ self._deployment_cache: LRUCache[UUID, "DeploymentResponse"] = LRUCache(
231
+ maxsize=100
232
+ )
233
+ self._flow_cache: LRUCache[UUID, "APIFlow"] = LRUCache(maxsize=100)
234
+
213
235
  @sync_compatible
214
236
  async def add_deployment(
215
237
  self,
@@ -234,7 +256,7 @@ class Runner:
234
256
  @sync_compatible
235
257
  async def add_flow(
236
258
  self,
237
- flow: Flow,
259
+ flow: Flow[Any, Any],
238
260
  name: Optional[str] = None,
239
261
  interval: Optional[
240
262
  Union[
@@ -249,7 +271,7 @@ class Runner:
249
271
  paused: Optional[bool] = None,
250
272
  schedules: Optional["FlexibleScheduleList"] = None,
251
273
  concurrency_limit: Optional[Union[int, ConcurrencyLimitConfig, None]] = None,
252
- parameters: Optional[dict] = None,
274
+ parameters: Optional[dict[str, Any]] = None,
253
275
  triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
254
276
  description: Optional[str] = None,
255
277
  tags: Optional[List[str]] = None,
@@ -336,7 +358,7 @@ class Runner:
336
358
  else:
337
359
  return next(s for s in self._storage_objs if s == storage)
338
360
 
339
- def handle_sigterm(self, signum, frame):
361
+ def handle_sigterm(self, **kwargs: Any) -> None:
340
362
  """
341
363
  Gracefully shuts down the runner when a SIGTERM is received.
342
364
  """
@@ -441,6 +463,16 @@ class Runner:
441
463
  jitter_range=0.3,
442
464
  )
443
465
  )
466
+ if self.heartbeat_seconds is not None:
467
+ loops_task_group.start_soon(
468
+ partial(
469
+ critical_service_loop,
470
+ workload=runner._emit_flow_run_heartbeats,
471
+ interval=self.heartbeat_seconds,
472
+ run_once=run_once,
473
+ jitter_range=0.3,
474
+ )
475
+ )
444
476
 
445
477
  def execute_in_background(
446
478
  self, func: Callable[..., Any], *args: Any, **kwargs: Any
@@ -538,6 +570,15 @@ class Runner:
538
570
  jitter_range=0.3,
539
571
  )
540
572
  )
573
+ if self.heartbeat_seconds is not None:
574
+ tg.start_soon(
575
+ partial(
576
+ critical_service_loop,
577
+ workload=self._emit_flow_run_heartbeats,
578
+ interval=self.heartbeat_seconds,
579
+ jitter_range=0.3,
580
+ )
581
+ )
541
582
 
542
583
  def _get_flow_run_logger(self, flow_run: "FlowRun") -> PrefectLogAdapter:
543
584
  return flow_run_logger(flow_run=flow_run).getChild(
@@ -850,18 +891,84 @@ class Runner:
850
891
  "message": state_msg or "Flow run was cancelled successfully."
851
892
  },
852
893
  )
894
+
895
+ flow, deployment = await self._get_flow_and_deployment(flow_run)
896
+ self._emit_flow_run_cancelled_event(
897
+ flow_run=flow_run, flow=flow, deployment=deployment
898
+ )
899
+ run_logger.info(f"Cancelled flow run '{flow_run.name}'!")
900
+
901
+ async def _get_flow_and_deployment(
902
+ self, flow_run: "FlowRun"
903
+ ) -> tuple[Optional["APIFlow"], Optional["DeploymentResponse"]]:
904
+ deployment: Optional["DeploymentResponse"] = (
905
+ self._deployment_cache.get(flow_run.deployment_id)
906
+ if flow_run.deployment_id
907
+ else None
908
+ )
909
+ flow: Optional["APIFlow"] = self._flow_cache.get(flow_run.flow_id)
910
+ if not deployment and flow_run.deployment_id is not None:
853
911
  try:
854
912
  deployment = await self._client.read_deployment(flow_run.deployment_id)
913
+ self._deployment_cache[flow_run.deployment_id] = deployment
855
914
  except ObjectNotFound:
856
915
  deployment = None
916
+ if not flow:
857
917
  try:
858
918
  flow = await self._client.read_flow(flow_run.flow_id)
919
+ self._flow_cache[flow_run.flow_id] = flow
859
920
  except ObjectNotFound:
860
921
  flow = None
861
- self._emit_flow_run_cancelled_event(
862
- flow_run=flow_run, flow=flow, deployment=deployment
922
+ return flow, deployment
923
+
924
+ async def _emit_flow_run_heartbeats(self):
925
+ coros: list[Coroutine[Any, Any, Any]] = []
926
+ for entry in self._flow_run_process_map.values():
927
+ coros.append(self._emit_flow_run_heartbeat(entry["flow_run"]))
928
+ await asyncio.gather(*coros)
929
+
930
+ async def _emit_flow_run_heartbeat(self, flow_run: "FlowRun"):
931
+ from prefect import __version__
932
+
933
+ related: list[RelatedResource] = []
934
+ tags: list[str] = []
935
+
936
+ flow, deployment = await self._get_flow_and_deployment(flow_run)
937
+ if deployment:
938
+ related.append(
939
+ RelatedResource(
940
+ {
941
+ "prefect.resource.id": f"prefect.deployment.{deployment.id}",
942
+ "prefect.resource.role": "deployment",
943
+ "prefect.resource.name": deployment.name,
944
+ }
945
+ )
863
946
  )
864
- run_logger.info(f"Cancelled flow run '{flow_run.name}'!")
947
+ tags.extend(deployment.tags)
948
+ if flow:
949
+ related.append(
950
+ RelatedResource(
951
+ {
952
+ "prefect.resource.id": f"prefect.flow.{flow.id}",
953
+ "prefect.resource.role": "flow",
954
+ "prefect.resource.name": flow.name,
955
+ }
956
+ )
957
+ )
958
+ tags.extend(flow_run.tags)
959
+
960
+ related = [RelatedResource.model_validate(r) for r in related]
961
+ related += tags_as_related_resources(set(tags))
962
+
963
+ emit_event(
964
+ event="prefect.flow-run.heartbeat",
965
+ resource={
966
+ "prefect.resource.id": f"prefect.flow-run.{flow_run.id}",
967
+ "prefect.resource.name": flow_run.name,
968
+ "prefect.version": __version__,
969
+ },
970
+ related=related,
971
+ )
865
972
 
866
973
  def _event_resource(self):
867
974
  from prefect import __version__
@@ -876,7 +983,7 @@ class Runner:
876
983
  self,
877
984
  flow_run: "FlowRun",
878
985
  flow: "Optional[APIFlow]",
879
- deployment: "Optional[Deployment]",
986
+ deployment: "Optional[DeploymentResponse]",
880
987
  ):
881
988
  related: list[RelatedResource] = []
882
989
  tags: list[str] = []
@@ -920,6 +1027,7 @@ class Runner:
920
1027
  resource=self._event_resource(),
921
1028
  related=related,
922
1029
  )
1030
+ self._logger.debug(f"Emitted flow run heartbeat event for {flow_run.id}")
923
1031
 
924
1032
  async def _get_scheduled_flow_runs(
925
1033
  self,
@@ -1052,6 +1160,9 @@ class Runner:
1052
1160
  self._flow_run_process_map[flow_run.id] = dict(
1053
1161
  pid=readiness_result, flow_run=flow_run
1054
1162
  )
1163
+ # Heartbeats are opt-in and only emitted if a heartbeat frequency is set
1164
+ if self.heartbeat_seconds is not None:
1165
+ await self._emit_flow_run_heartbeat(flow_run)
1055
1166
 
1056
1167
  run_logger.info(f"Completed submission of flow run '{flow_run.id}'")
1057
1168
  else:
prefect/runner/storage.py CHANGED
@@ -53,14 +53,14 @@ class RunnerStorage(Protocol):
53
53
  """
54
54
  ...
55
55
 
56
- def to_pull_step(self) -> dict:
56
+ def to_pull_step(self) -> dict[str, Any]:
57
57
  """
58
58
  Returns a dictionary representation of the storage object that can be
59
59
  used as a deployment pull step.
60
60
  """
61
61
  ...
62
62
 
63
- def __eq__(self, __value) -> bool:
63
+ def __eq__(self, __value: Any) -> bool:
64
64
  """
65
65
  Equality check for runner storage objects.
66
66
  """
@@ -69,7 +69,7 @@ class RunnerStorage(Protocol):
69
69
 
70
70
  class GitCredentials(TypedDict, total=False):
71
71
  username: str
72
- access_token: Union[str, Secret]
72
+ access_token: Union[str, Secret[str]]
73
73
 
74
74
 
75
75
  class GitRepository:
prefect/serializers.py CHANGED
@@ -13,7 +13,7 @@ bytes to an object respectively.
13
13
 
14
14
  import abc
15
15
  import base64
16
- from typing import Any, Dict, Generic, Optional, Type
16
+ from typing import Any, Generic, Optional, Type, Union
17
17
 
18
18
  from pydantic import (
19
19
  BaseModel,
@@ -23,7 +23,7 @@ from pydantic import (
23
23
  ValidationError,
24
24
  field_validator,
25
25
  )
26
- from typing_extensions import Literal, Self, TypeVar
26
+ from typing_extensions import Self, TypeVar
27
27
 
28
28
  from prefect._internal.schemas.validators import (
29
29
  cast_type_names_to_serializers,
@@ -54,7 +54,7 @@ def prefect_json_object_encoder(obj: Any) -> Any:
54
54
  }
55
55
 
56
56
 
57
- def prefect_json_object_decoder(result: dict):
57
+ def prefect_json_object_decoder(result: dict[str, Any]):
58
58
  """
59
59
  `JSONDecoder.object_hook` for decoding objects from JSON when previously encoded
60
60
  with `prefect_json_object_encoder`
@@ -80,12 +80,16 @@ class Serializer(BaseModel, Generic[D], abc.ABC):
80
80
  data.setdefault("type", type_string)
81
81
  super().__init__(**data)
82
82
 
83
- def __new__(cls: Type[Self], **kwargs) -> Self:
83
+ def __new__(cls: Type[Self], **kwargs: Any) -> Self:
84
84
  if "type" in kwargs:
85
85
  try:
86
86
  subcls = lookup_type(cls, dispatch_key=kwargs["type"])
87
87
  except KeyError as exc:
88
- raise ValidationError(errors=[exc], model=cls)
88
+ raise ValidationError.from_exception_data(
89
+ title=cls.__name__,
90
+ line_errors=[{"type": str(exc), "input": kwargs["type"]}],
91
+ input_type="python",
92
+ )
89
93
 
90
94
  return super().__new__(subcls)
91
95
  else:
@@ -104,7 +108,7 @@ class Serializer(BaseModel, Generic[D], abc.ABC):
104
108
  model_config = ConfigDict(extra="forbid")
105
109
 
106
110
  @classmethod
107
- def __dispatch_key__(cls) -> str:
111
+ def __dispatch_key__(cls) -> Optional[str]:
108
112
  type_str = cls.model_fields["type"].default
109
113
  return type_str if isinstance(type_str, str) else None
110
114
 
@@ -119,19 +123,15 @@ class PickleSerializer(Serializer):
119
123
  - Wraps pickles in base64 for safe transmission.
120
124
  """
121
125
 
122
- type: Literal["pickle"] = "pickle"
126
+ type: str = Field(default="pickle", frozen=True)
123
127
 
124
128
  picklelib: str = "cloudpickle"
125
129
  picklelib_version: Optional[str] = None
126
130
 
127
131
  @field_validator("picklelib")
128
- def check_picklelib(cls, value):
132
+ def check_picklelib(cls, value: str) -> str:
129
133
  return validate_picklelib(value)
130
134
 
131
- # @model_validator(mode="before")
132
- # def check_picklelib_version(cls, values):
133
- # return validate_picklelib_version(values)
134
-
135
135
  def dumps(self, obj: Any) -> bytes:
136
136
  pickler = from_qualified_name(self.picklelib)
137
137
  blob = pickler.dumps(obj)
@@ -151,7 +151,7 @@ class JSONSerializer(Serializer):
151
151
  Wraps the `json` library to serialize to UTF-8 bytes instead of string types.
152
152
  """
153
153
 
154
- type: Literal["json"] = "json"
154
+ type: str = Field(default="json", frozen=True)
155
155
 
156
156
  jsonlib: str = "json"
157
157
  object_encoder: Optional[str] = Field(
@@ -171,23 +171,27 @@ class JSONSerializer(Serializer):
171
171
  "by our default `object_encoder`."
172
172
  ),
173
173
  )
174
- dumps_kwargs: Dict[str, Any] = Field(default_factory=dict)
175
- loads_kwargs: Dict[str, Any] = Field(default_factory=dict)
174
+ dumps_kwargs: dict[str, Any] = Field(default_factory=dict)
175
+ loads_kwargs: dict[str, Any] = Field(default_factory=dict)
176
176
 
177
177
  @field_validator("dumps_kwargs")
178
- def dumps_kwargs_cannot_contain_default(cls, value):
178
+ def dumps_kwargs_cannot_contain_default(
179
+ cls, value: dict[str, Any]
180
+ ) -> dict[str, Any]:
179
181
  return validate_dump_kwargs(value)
180
182
 
181
183
  @field_validator("loads_kwargs")
182
- def loads_kwargs_cannot_contain_object_hook(cls, value):
184
+ def loads_kwargs_cannot_contain_object_hook(
185
+ cls, value: dict[str, Any]
186
+ ) -> dict[str, Any]:
183
187
  return validate_load_kwargs(value)
184
188
 
185
- def dumps(self, data: Any) -> bytes:
189
+ def dumps(self, obj: Any) -> bytes:
186
190
  json = from_qualified_name(self.jsonlib)
187
191
  kwargs = self.dumps_kwargs.copy()
188
192
  if self.object_encoder:
189
193
  kwargs["default"] = from_qualified_name(self.object_encoder)
190
- result = json.dumps(data, **kwargs)
194
+ result = json.dumps(obj, **kwargs)
191
195
  if isinstance(result, str):
192
196
  # The standard library returns str but others may return bytes directly
193
197
  result = result.encode()
@@ -213,17 +217,17 @@ class CompressedSerializer(Serializer):
213
217
  level: If not null, the level of compression to pass to `compress`.
214
218
  """
215
219
 
216
- type: Literal["compressed"] = "compressed"
220
+ type: str = Field(default="compressed", frozen=True)
217
221
 
218
222
  serializer: Serializer
219
223
  compressionlib: str = "lzma"
220
224
 
221
225
  @field_validator("serializer", mode="before")
222
- def validate_serializer(cls, value):
226
+ def validate_serializer(cls, value: Union[str, Serializer]) -> Serializer:
223
227
  return cast_type_names_to_serializers(value)
224
228
 
225
229
  @field_validator("compressionlib")
226
- def check_compressionlib(cls, value):
230
+ def check_compressionlib(cls, value: str) -> str:
227
231
  return validate_compressionlib(value)
228
232
 
229
233
  def dumps(self, obj: Any) -> bytes:
@@ -242,7 +246,7 @@ class CompressedPickleSerializer(CompressedSerializer):
242
246
  A compressed serializer preconfigured to use the pickle serializer.
243
247
  """
244
248
 
245
- type: Literal["compressed/pickle"] = "compressed/pickle"
249
+ type: str = Field(default="compressed/pickle", frozen=True)
246
250
 
247
251
  serializer: Serializer = Field(default_factory=PickleSerializer)
248
252
 
@@ -252,6 +256,6 @@ class CompressedJSONSerializer(CompressedSerializer):
252
256
  A compressed serializer preconfigured to use the json serializer.
253
257
  """
254
258
 
255
- type: Literal["compressed/json"] = "compressed/json"
259
+ type: str = Field(default="compressed/json", frozen=True)
256
260
 
257
261
  serializer: Serializer = Field(default_factory=JSONSerializer)
@@ -53,6 +53,7 @@ __all__ = [ # noqa: F822
53
53
  "temporary_settings",
54
54
  "DEFAULT_PROFILES_PATH",
55
55
  # add public settings here for auto-completion
56
+ "PREFECT_API_AUTH_STRING", # type: ignore
56
57
  "PREFECT_API_KEY", # type: ignore
57
58
  "PREFECT_API_URL", # type: ignore
58
59
  "PREFECT_UI_URL", # type: ignore
prefect/settings/base.py CHANGED
@@ -192,7 +192,7 @@ def _add_environment_variables(
192
192
 
193
193
 
194
194
  def _build_settings_config(
195
- path: Tuple[str, ...] = tuple(),
195
+ path: Tuple[str, ...] = tuple(), frozen: bool = False
196
196
  ) -> PrefectSettingsConfigDict:
197
197
  env_prefix = f"PREFECT_{'_'.join(path).upper()}_" if path else "PREFECT_"
198
198
  return PrefectSettingsConfigDict(
@@ -202,7 +202,8 @@ def _build_settings_config(
202
202
  toml_file="prefect.toml",
203
203
  prefect_toml_table_header=path,
204
204
  pyproject_toml_table_header=("tool", "prefect", *path),
205
- json_schema_extra=_add_environment_variables,
205
+ json_schema_extra=_add_environment_variables, # type: ignore
206
+ frozen=frozen,
206
207
  )
207
208
 
208
209
 
@@ -19,6 +19,10 @@ class APISettings(PrefectBaseSettings):
19
19
  default=None,
20
20
  description="The URL of the Prefect API. If not set, the client will attempt to infer it.",
21
21
  )
22
+ auth_string: Optional[SecretStr] = Field(
23
+ default=None,
24
+ description="The auth string used for basic authentication with a self-hosted Prefect API. Should be kept secret.",
25
+ )
22
26
  key: Optional[SecretStr] = Field(
23
27
  default=None,
24
28
  description="The API key used for authentication with the Prefect API. Should be kept secret.",
@@ -22,3 +22,8 @@ class ExperimentsSettings(PrefectBaseSettings):
22
22
  default=False,
23
23
  description="Enables sending telemetry to Prefect Cloud.",
24
24
  )
25
+
26
+ lineage_events_enabled: bool = Field(
27
+ default=False,
28
+ description="If `True`, enables emitting lineage events. Set to `False` to disable lineage event emission.",
29
+ )
@@ -1,3 +1,5 @@
1
+ from typing import Optional
2
+
1
3
  from pydantic import Field
2
4
 
3
5
  from prefect.settings.base import PrefectBaseSettings, _build_settings_config
@@ -54,6 +56,12 @@ class RunnerSettings(PrefectBaseSettings):
54
56
  description="Number of seconds a runner should wait between queries for scheduled work.",
55
57
  )
56
58
 
59
+ heartbeat_frequency: Optional[int] = Field(
60
+ default=None,
61
+ description="Number of seconds a runner should wait between heartbeats for flow runs.",
62
+ ge=30,
63
+ )
64
+
57
65
  server: RunnerServerSettings = Field(
58
66
  default_factory=RunnerServerSettings,
59
67
  description="Settings for controlling runner server behavior",
@@ -1,6 +1,7 @@
1
1
  from datetime import timedelta
2
+ from typing import Optional
2
3
 
3
- from pydantic import AliasChoices, AliasPath, Field
4
+ from pydantic import AliasChoices, AliasPath, Field, SecretStr
4
5
 
5
6
  from prefect.settings.base import PrefectBaseSettings, _build_settings_config
6
7
 
@@ -12,6 +13,11 @@ class ServerAPISettings(PrefectBaseSettings):
12
13
 
13
14
  model_config = _build_settings_config(("server", "api"))
14
15
 
16
+ auth_string: Optional[SecretStr] = Field(
17
+ default=None,
18
+ description="A string to use for basic authentication with the API; typically in the form 'user:password' but can be any string.",
19
+ )
20
+
15
21
  host: str = Field(
16
22
  default="127.0.0.1",
17
23
  description="The API's host address (defaults to `127.0.0.1`).",