prefect-client 3.1.5__py3-none-any.whl → 3.1.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (114) hide show
  1. prefect/__init__.py +3 -0
  2. prefect/_experimental/__init__.py +0 -0
  3. prefect/_experimental/lineage.py +181 -0
  4. prefect/_internal/compatibility/async_dispatch.py +38 -9
  5. prefect/_internal/compatibility/migration.py +1 -1
  6. prefect/_internal/concurrency/api.py +52 -52
  7. prefect/_internal/concurrency/calls.py +59 -35
  8. prefect/_internal/concurrency/cancellation.py +34 -18
  9. prefect/_internal/concurrency/event_loop.py +7 -6
  10. prefect/_internal/concurrency/threads.py +41 -33
  11. prefect/_internal/concurrency/waiters.py +28 -21
  12. prefect/_internal/pydantic/v1_schema.py +2 -2
  13. prefect/_internal/pydantic/v2_schema.py +10 -9
  14. prefect/_internal/pydantic/v2_validated_func.py +15 -10
  15. prefect/_internal/retries.py +15 -6
  16. prefect/_internal/schemas/bases.py +11 -8
  17. prefect/_internal/schemas/validators.py +7 -5
  18. prefect/_version.py +3 -3
  19. prefect/automations.py +53 -47
  20. prefect/blocks/abstract.py +12 -10
  21. prefect/blocks/core.py +148 -19
  22. prefect/blocks/system.py +2 -1
  23. prefect/cache_policies.py +11 -11
  24. prefect/client/__init__.py +3 -1
  25. prefect/client/base.py +36 -37
  26. prefect/client/cloud.py +26 -19
  27. prefect/client/collections.py +2 -2
  28. prefect/client/orchestration.py +430 -273
  29. prefect/client/schemas/__init__.py +24 -0
  30. prefect/client/schemas/actions.py +128 -121
  31. prefect/client/schemas/filters.py +1 -1
  32. prefect/client/schemas/objects.py +114 -85
  33. prefect/client/schemas/responses.py +19 -20
  34. prefect/client/schemas/schedules.py +136 -93
  35. prefect/client/subscriptions.py +30 -15
  36. prefect/client/utilities.py +46 -36
  37. prefect/concurrency/asyncio.py +6 -9
  38. prefect/concurrency/sync.py +35 -5
  39. prefect/context.py +40 -32
  40. prefect/deployments/flow_runs.py +6 -8
  41. prefect/deployments/runner.py +14 -14
  42. prefect/deployments/steps/core.py +3 -1
  43. prefect/deployments/steps/pull.py +60 -12
  44. prefect/docker/__init__.py +1 -1
  45. prefect/events/clients.py +55 -4
  46. prefect/events/filters.py +1 -1
  47. prefect/events/related.py +2 -1
  48. prefect/events/schemas/events.py +26 -21
  49. prefect/events/utilities.py +3 -2
  50. prefect/events/worker.py +8 -0
  51. prefect/filesystems.py +3 -3
  52. prefect/flow_engine.py +87 -87
  53. prefect/flow_runs.py +7 -5
  54. prefect/flows.py +218 -176
  55. prefect/logging/configuration.py +1 -1
  56. prefect/logging/highlighters.py +1 -2
  57. prefect/logging/loggers.py +30 -20
  58. prefect/main.py +17 -24
  59. prefect/results.py +43 -22
  60. prefect/runner/runner.py +43 -21
  61. prefect/runner/server.py +30 -32
  62. prefect/runner/storage.py +3 -3
  63. prefect/runner/submit.py +3 -6
  64. prefect/runner/utils.py +6 -6
  65. prefect/runtime/flow_run.py +7 -0
  66. prefect/serializers.py +28 -24
  67. prefect/settings/constants.py +2 -2
  68. prefect/settings/legacy.py +1 -1
  69. prefect/settings/models/experiments.py +5 -0
  70. prefect/settings/models/server/events.py +10 -0
  71. prefect/task_engine.py +87 -26
  72. prefect/task_runners.py +2 -2
  73. prefect/task_worker.py +43 -25
  74. prefect/tasks.py +148 -142
  75. prefect/telemetry/bootstrap.py +15 -2
  76. prefect/telemetry/instrumentation.py +1 -1
  77. prefect/telemetry/processors.py +10 -7
  78. prefect/telemetry/run_telemetry.py +231 -0
  79. prefect/transactions.py +14 -14
  80. prefect/types/__init__.py +5 -5
  81. prefect/utilities/_engine.py +96 -0
  82. prefect/utilities/annotations.py +25 -18
  83. prefect/utilities/asyncutils.py +126 -140
  84. prefect/utilities/callables.py +87 -78
  85. prefect/utilities/collections.py +278 -117
  86. prefect/utilities/compat.py +13 -21
  87. prefect/utilities/context.py +6 -5
  88. prefect/utilities/dispatch.py +23 -12
  89. prefect/utilities/dockerutils.py +33 -32
  90. prefect/utilities/engine.py +126 -239
  91. prefect/utilities/filesystem.py +18 -15
  92. prefect/utilities/hashing.py +10 -11
  93. prefect/utilities/importtools.py +40 -27
  94. prefect/utilities/math.py +9 -5
  95. prefect/utilities/names.py +3 -3
  96. prefect/utilities/processutils.py +121 -57
  97. prefect/utilities/pydantic.py +41 -36
  98. prefect/utilities/render_swagger.py +22 -12
  99. prefect/utilities/schema_tools/__init__.py +2 -1
  100. prefect/utilities/schema_tools/hydration.py +50 -43
  101. prefect/utilities/schema_tools/validation.py +52 -42
  102. prefect/utilities/services.py +13 -12
  103. prefect/utilities/templating.py +45 -45
  104. prefect/utilities/text.py +2 -1
  105. prefect/utilities/timeout.py +4 -4
  106. prefect/utilities/urls.py +9 -4
  107. prefect/utilities/visualization.py +46 -24
  108. prefect/variables.py +136 -27
  109. prefect/workers/base.py +15 -8
  110. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/METADATA +5 -2
  111. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/RECORD +114 -110
  112. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/LICENSE +0 -0
  113. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/WHEEL +0 -0
  114. {prefect_client-3.1.5.dist-info → prefect_client-3.1.7.dist-info}/top_level.txt +0 -0
@@ -58,7 +58,7 @@ def load_logging_config(path: Path) -> dict:
58
58
  return flatdict_to_dict(flat_config)
59
59
 
60
60
 
61
- def setup_logging(incremental: Optional[bool] = None) -> dict:
61
+ def setup_logging(incremental: Optional[bool] = None) -> dict[str, Any]:
62
62
  """
63
63
  Sets up logging.
64
64
 
@@ -45,8 +45,7 @@ class StateHighlighter(RegexHighlighter):
45
45
 
46
46
  base_style = "state."
47
47
  highlights = [
48
- rf"(?P<{state.value.lower()}_state>{state.value.title()})"
49
- for state in StateType
48
+ rf"(?P<{state.lower()}_state>{state.title()})" for state in StateType
50
49
  ] + [
51
50
  r"(?P<cached_state>Cached)(?=\(type=COMPLETED\))" # Highlight only "Cached"
52
51
  ]
@@ -4,12 +4,11 @@ import sys
4
4
  from builtins import print
5
5
  from contextlib import contextmanager
6
6
  from functools import lru_cache
7
- from logging import LoggerAdapter, LogRecord
8
- from typing import TYPE_CHECKING, Dict, List, Optional, Union
7
+ from logging import LogRecord
8
+ from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
9
9
 
10
10
  from typing_extensions import Self
11
11
 
12
- import prefect
13
12
  from prefect.exceptions import MissingContextError
14
13
  from prefect.logging.filters import ObfuscateApiKeyFilter
15
14
  from prefect.telemetry.logging import add_telemetry_log_handler
@@ -22,8 +21,13 @@ if TYPE_CHECKING:
22
21
  from prefect.tasks import Task
23
22
  from prefect.workers.base import BaseWorker
24
23
 
24
+ if sys.version_info >= (3, 12):
25
+ LoggingAdapter = logging.LoggerAdapter[logging.Logger]
26
+ else:
27
+ LoggingAdapter = logging.LoggerAdapter
25
28
 
26
- class PrefectLogAdapter(logging.LoggerAdapter):
29
+
30
+ class PrefectLogAdapter(LoggingAdapter):
27
31
  """
28
32
  Adapter that ensures extra kwargs are passed through correctly; without this
29
33
  the `extra` fields set on the adapter would overshadow any provided on a
@@ -83,8 +87,8 @@ def get_logger(name: Optional[str] = None) -> logging.Logger:
83
87
 
84
88
 
85
89
  def get_run_logger(
86
- context: Optional["RunContext"] = None, **kwargs: str
87
- ) -> Union[logging.Logger, logging.LoggerAdapter]:
90
+ context: Optional["RunContext"] = None, **kwargs: Any
91
+ ) -> Union[logging.Logger, LoggingAdapter]:
88
92
  """
89
93
  Get a Prefect logger for the current task run or flow run.
90
94
 
@@ -103,15 +107,17 @@ def get_run_logger(
103
107
  Raises:
104
108
  MissingContextError: If no context can be found
105
109
  """
110
+ from prefect.context import FlowRunContext, TaskRunContext
111
+
106
112
  # Check for existing contexts
107
- task_run_context = prefect.context.TaskRunContext.get()
108
- flow_run_context = prefect.context.FlowRunContext.get()
113
+ task_run_context = TaskRunContext.get()
114
+ flow_run_context = FlowRunContext.get()
109
115
 
110
116
  # Apply the context override
111
117
  if context:
112
- if isinstance(context, prefect.context.FlowRunContext):
118
+ if isinstance(context, FlowRunContext):
113
119
  flow_run_context = context
114
- elif isinstance(context, prefect.context.TaskRunContext):
120
+ elif isinstance(context, TaskRunContext):
115
121
  task_run_context = context
116
122
  else:
117
123
  raise TypeError(
@@ -130,7 +136,9 @@ def get_run_logger(
130
136
  )
131
137
  elif flow_run_context:
132
138
  logger = flow_run_logger(
133
- flow_run=flow_run_context.flow_run, flow=flow_run_context.flow, **kwargs
139
+ flow_run=flow_run_context.flow_run, # type: ignore
140
+ flow=flow_run_context.flow,
141
+ **kwargs,
134
142
  )
135
143
  elif (
136
144
  get_logger("prefect.flow_run").disabled
@@ -151,9 +159,9 @@ def get_run_logger(
151
159
 
152
160
  def flow_run_logger(
153
161
  flow_run: Union["FlowRun", "ClientFlowRun"],
154
- flow: Optional["Flow"] = None,
162
+ flow: Optional["Flow[Any, Any]"] = None,
155
163
  **kwargs: str,
156
- ) -> LoggerAdapter:
164
+ ) -> LoggingAdapter:
157
165
  """
158
166
  Create a flow run logger with the run's metadata attached.
159
167
 
@@ -177,10 +185,10 @@ def flow_run_logger(
177
185
 
178
186
  def task_run_logger(
179
187
  task_run: "TaskRun",
180
- task: "Task" = None,
181
- flow_run: "FlowRun" = None,
182
- flow: "Flow" = None,
183
- **kwargs: str,
188
+ task: Optional["Task[Any, Any]"] = None,
189
+ flow_run: Optional["FlowRun"] = None,
190
+ flow: Optional["Flow[Any, Any]"] = None,
191
+ **kwargs: Any,
184
192
  ):
185
193
  """
186
194
  Create a task run logger with the run's metadata attached.
@@ -193,8 +201,10 @@ def task_run_logger(
193
201
  If only the flow run context is available, it will be used for default values
194
202
  of `flow_run` and `flow`.
195
203
  """
204
+ from prefect.context import FlowRunContext
205
+
196
206
  if not flow_run or not flow:
197
- flow_run_context = prefect.context.FlowRunContext.get()
207
+ flow_run_context = FlowRunContext.get()
198
208
  if flow_run_context:
199
209
  flow_run = flow_run or flow_run_context.flow_run
200
210
  flow = flow or flow_run_context.flow
@@ -269,7 +279,7 @@ def disable_run_logger():
269
279
  yield
270
280
 
271
281
 
272
- def print_as_log(*args, **kwargs):
282
+ def print_as_log(*args: Any, **kwargs: Any) -> None:
273
283
  """
274
284
  A patch for `print` to send printed messages to the Prefect run logger.
275
285
 
@@ -333,7 +343,7 @@ class LogEavesdropper(logging.Handler):
333
343
  # Outputs: "Hello, world!\nAnother one!"
334
344
  """
335
345
 
336
- _target_logger: logging.Logger
346
+ _target_logger: Optional[logging.Logger]
337
347
  _lines: List[str]
338
348
 
339
349
  def __init__(self, eavesdrop_on: str, level: int = logging.NOTSET):
prefect/main.py CHANGED
@@ -1,16 +1,18 @@
1
1
  # Import user-facing API
2
+ from typing import Any
3
+
2
4
  from prefect.deployments import deploy
3
5
  from prefect.states import State
4
6
  from prefect.logging import get_run_logger
5
- from prefect.flows import flow, Flow, serve
7
+ from prefect.flows import flow, Flow, serve, aserve
6
8
  from prefect.transactions import Transaction
7
9
  from prefect.tasks import task, Task
8
10
  from prefect.context import tags
9
11
  from prefect.utilities.annotations import unmapped, allow_failure
10
12
  from prefect.results import BaseResult, ResultRecordMetadata
11
13
  from prefect.flow_runs import pause_flow_run, resume_flow_run, suspend_flow_run
12
- from prefect.client.orchestration import get_client, PrefectClient
13
- from prefect.client.cloud import get_cloud_client, CloudClient
14
+ from prefect.client.orchestration import get_client
15
+ from prefect.client.cloud import get_cloud_client
14
16
  import prefect.variables
15
17
  import prefect.runtime
16
18
 
@@ -25,28 +27,17 @@ import prefect.context
25
27
  # Perform any forward-ref updates needed for Pydantic models
26
28
  import prefect.client.schemas
27
29
 
28
- prefect.context.FlowRunContext.model_rebuild(
29
- _types_namespace={
30
- "Flow": Flow,
31
- "BaseResult": BaseResult,
32
- "ResultRecordMetadata": ResultRecordMetadata,
33
- }
34
- )
35
- prefect.context.TaskRunContext.model_rebuild(
36
- _types_namespace={"Task": Task, "BaseResult": BaseResult}
37
- )
38
- prefect.client.schemas.State.model_rebuild(
39
- _types_namespace={
40
- "BaseResult": BaseResult,
41
- "ResultRecordMetadata": ResultRecordMetadata,
42
- }
43
- )
44
- prefect.client.schemas.StateCreate.model_rebuild(
45
- _types_namespace={
46
- "BaseResult": BaseResult,
47
- "ResultRecordMetadata": ResultRecordMetadata,
48
- }
30
+ _types: dict[str, Any] = dict(
31
+ Task=Task,
32
+ Flow=Flow,
33
+ BaseResult=BaseResult,
34
+ ResultRecordMetadata=ResultRecordMetadata,
49
35
  )
36
+ prefect.context.FlowRunContext.model_rebuild(_types_namespace=_types)
37
+ prefect.context.TaskRunContext.model_rebuild(_types_namespace=_types)
38
+ prefect.client.schemas.State.model_rebuild(_types_namespace=_types)
39
+ prefect.client.schemas.StateCreate.model_rebuild(_types_namespace=_types)
40
+ prefect.client.schemas.OrchestrationResult.model_rebuild(_types_namespace=_types)
50
41
  Transaction.model_rebuild()
51
42
 
52
43
  # Configure logging
@@ -76,6 +67,7 @@ __all__ = [
76
67
  "flow",
77
68
  "Flow",
78
69
  "get_client",
70
+ "get_cloud_client",
79
71
  "get_run_logger",
80
72
  "State",
81
73
  "tags",
@@ -84,6 +76,7 @@ __all__ = [
84
76
  "Transaction",
85
77
  "unmapped",
86
78
  "serve",
79
+ "aserve",
87
80
  "deploy",
88
81
  "pause_flow_run",
89
82
  "resume_flow_run",
prefect/results.py CHANGED
@@ -35,10 +35,13 @@ from pydantic import (
35
35
  model_validator,
36
36
  )
37
37
  from pydantic_core import PydanticUndefinedType
38
- from pydantic_extra_types.pendulum_dt import DateTime
39
38
  from typing_extensions import ParamSpec, Self
40
39
 
41
40
  import prefect
41
+ from prefect._experimental.lineage import (
42
+ emit_result_read_event,
43
+ emit_result_write_event,
44
+ )
42
45
  from prefect._internal.compatibility import deprecated
43
46
  from prefect._internal.compatibility.deprecated import deprecated_field
44
47
  from prefect.blocks.core import Block
@@ -57,6 +60,7 @@ from prefect.locking.protocol import LockManager
57
60
  from prefect.logging import get_logger
58
61
  from prefect.serializers import PickleSerializer, Serializer
59
62
  from prefect.settings.context import get_current_settings
63
+ from prefect.types import DateTime
60
64
  from prefect.utilities.annotations import NotSet
61
65
  from prefect.utilities.asyncutils import sync_compatible
62
66
  from prefect.utilities.pydantic import get_dispatch_key, lookup_type, register_base_type
@@ -129,7 +133,7 @@ async def resolve_result_storage(
129
133
  elif isinstance(result_storage, Path):
130
134
  storage_block = LocalFileSystem(basepath=str(result_storage))
131
135
  elif isinstance(result_storage, str):
132
- storage_block = await Block.load(result_storage, client=client)
136
+ storage_block = await Block.aload(result_storage, client=client)
133
137
  storage_block_id = storage_block._block_document_id
134
138
  assert storage_block_id is not None, "Loaded storage blocks must have ids"
135
139
  elif isinstance(result_storage, UUID):
@@ -168,7 +172,7 @@ async def get_or_create_default_task_scheduling_storage() -> ResultStorage:
168
172
  default_block = settings.tasks.scheduling.default_storage_block
169
173
 
170
174
  if default_block is not None:
171
- return await Block.load(default_block)
175
+ return await Block.aload(default_block)
172
176
 
173
177
  # otherwise, use the local file system
174
178
  basepath = settings.results.local_storage_path
@@ -232,6 +236,10 @@ def _format_user_supplied_storage_key(key: str) -> str:
232
236
  T = TypeVar("T")
233
237
 
234
238
 
239
+ def default_cache() -> LRUCache[str, "ResultRecord[Any]"]:
240
+ return LRUCache(maxsize=1000)
241
+
242
+
235
243
  def result_storage_discriminator(x: Any) -> str:
236
244
  if isinstance(x, dict):
237
245
  if "block_type_slug" in x:
@@ -284,7 +292,7 @@ class ResultStore(BaseModel):
284
292
  cache_result_in_memory: bool = Field(default=True)
285
293
  serializer: Serializer = Field(default_factory=get_default_result_serializer)
286
294
  storage_key_fn: Callable[[], str] = Field(default=DEFAULT_STORAGE_KEY_FN)
287
- cache: LRUCache = Field(default_factory=lambda: LRUCache(maxsize=1000))
295
+ cache: LRUCache[str, "ResultRecord[Any]"] = Field(default_factory=default_cache)
288
296
 
289
297
  # Deprecated fields
290
298
  persist_result: Optional[bool] = Field(default=None)
@@ -319,7 +327,7 @@ class ResultStore(BaseModel):
319
327
  return self.model_copy(update=update)
320
328
 
321
329
  @sync_compatible
322
- async def update_for_task(self: Self, task: "Task") -> Self:
330
+ async def update_for_task(self: Self, task: "Task[P, R]") -> Self:
323
331
  """
324
332
  Create a new result store for a task.
325
333
 
@@ -446,8 +454,15 @@ class ResultStore(BaseModel):
446
454
  """
447
455
  return await self._exists(key=key, _sync=False)
448
456
 
457
+ def _resolved_key_path(self, key: str) -> str:
458
+ if self.result_storage_block_id is None and hasattr(
459
+ self.result_storage, "_resolve_path"
460
+ ):
461
+ return str(self.result_storage._resolve_path(key))
462
+ return key
463
+
449
464
  @sync_compatible
450
- async def _read(self, key: str, holder: str) -> "ResultRecord":
465
+ async def _read(self, key: str, holder: str) -> "ResultRecord[Any]":
451
466
  """
452
467
  Read a result record from storage.
453
468
 
@@ -465,8 +480,12 @@ class ResultStore(BaseModel):
465
480
  if self.lock_manager is not None and not self.is_lock_holder(key, holder):
466
481
  await self.await_for_lock(key)
467
482
 
468
- if key in self.cache:
469
- return self.cache[key]
483
+ resolved_key_path = self._resolved_key_path(key)
484
+
485
+ if resolved_key_path in self.cache:
486
+ cached_result = self.cache[resolved_key_path]
487
+ await emit_result_read_event(self, resolved_key_path, cached=True)
488
+ return cached_result
470
489
 
471
490
  if self.result_storage is None:
472
491
  self.result_storage = await get_default_result_storage()
@@ -478,31 +497,28 @@ class ResultStore(BaseModel):
478
497
  metadata.storage_key is not None
479
498
  ), "Did not find storage key in metadata"
480
499
  result_content = await self.result_storage.read_path(metadata.storage_key)
481
- result_record = ResultRecord.deserialize_from_result_and_metadata(
500
+ result_record: ResultRecord[
501
+ Any
502
+ ] = ResultRecord.deserialize_from_result_and_metadata(
482
503
  result=result_content, metadata=metadata_content
483
504
  )
505
+ await emit_result_read_event(self, resolved_key_path)
484
506
  else:
485
507
  content = await self.result_storage.read_path(key)
486
- result_record = ResultRecord.deserialize(
508
+ result_record: ResultRecord[Any] = ResultRecord.deserialize(
487
509
  content, backup_serializer=self.serializer
488
510
  )
511
+ await emit_result_read_event(self, resolved_key_path)
489
512
 
490
513
  if self.cache_result_in_memory:
491
- if self.result_storage_block_id is None and hasattr(
492
- self.result_storage, "_resolve_path"
493
- ):
494
- cache_key = str(self.result_storage._resolve_path(key))
495
- else:
496
- cache_key = key
497
-
498
- self.cache[cache_key] = result_record
514
+ self.cache[resolved_key_path] = result_record
499
515
  return result_record
500
516
 
501
517
  def read(
502
518
  self,
503
519
  key: str,
504
520
  holder: Optional[str] = None,
505
- ) -> "ResultRecord":
521
+ ) -> "ResultRecord[Any]":
506
522
  """
507
523
  Read a result record from storage.
508
524
 
@@ -520,7 +536,7 @@ class ResultStore(BaseModel):
520
536
  self,
521
537
  key: str,
522
538
  holder: Optional[str] = None,
523
- ) -> "ResultRecord":
539
+ ) -> "ResultRecord[Any]":
524
540
  """
525
541
  Read a result record from storage.
526
542
 
@@ -663,12 +679,13 @@ class ResultStore(BaseModel):
663
679
  base_key,
664
680
  content=result_record.serialize_metadata(),
665
681
  )
682
+ await emit_result_write_event(self, result_record.metadata.storage_key)
666
683
  # Otherwise, write the result metadata and result together
667
684
  else:
668
685
  await self.result_storage.write_path(
669
686
  result_record.metadata.storage_key, content=result_record.serialize()
670
687
  )
671
-
688
+ await emit_result_write_event(self, result_record.metadata.storage_key)
672
689
  if self.cache_result_in_memory:
673
690
  self.cache[key] = result_record
674
691
 
@@ -898,7 +915,11 @@ class ResultStore(BaseModel):
898
915
  )
899
916
 
900
917
  @sync_compatible
901
- async def read_parameters(self, identifier: UUID) -> Dict[str, Any]:
918
+ async def read_parameters(self, identifier: UUID) -> dict[str, Any]:
919
+ if self.result_storage is None:
920
+ raise ValueError(
921
+ "Result store is not configured - must have a result storage block to read parameters"
922
+ )
902
923
  record = ResultRecord.deserialize(
903
924
  await self.result_storage.read_path(f"parameters/{identifier}")
904
925
  )
prefect/runner/runner.py CHANGED
@@ -43,7 +43,17 @@ import threading
43
43
  from copy import deepcopy
44
44
  from functools import partial
45
45
  from pathlib import Path
46
- from typing import TYPE_CHECKING, Callable, Dict, Iterable, List, Optional, Set, Union
46
+ from typing import (
47
+ TYPE_CHECKING,
48
+ Any,
49
+ Callable,
50
+ Dict,
51
+ Iterable,
52
+ List,
53
+ Optional,
54
+ Set,
55
+ Union,
56
+ )
47
57
  from uuid import UUID, uuid4
48
58
 
49
59
  import anyio
@@ -190,7 +200,7 @@ class Runner:
190
200
  self._cancelling_flow_run_ids = set()
191
201
  self._scheduled_task_scopes = set()
192
202
  self._deployment_ids: Set[UUID] = set()
193
- self._flow_run_process_map: Dict[UUID, Dict] = dict()
203
+ self._flow_run_process_map: dict[UUID, dict[str, Any]] = dict()
194
204
 
195
205
  self._tmp_dir: Path = (
196
206
  Path(tempfile.gettempdir()) / "runner_storage" / str(uuid4())
@@ -432,10 +442,14 @@ class Runner:
432
442
  )
433
443
  )
434
444
 
435
- def execute_in_background(self, func, *args, **kwargs):
445
+ def execute_in_background(
446
+ self, func: Callable[..., Any], *args: Any, **kwargs: Any
447
+ ):
436
448
  """
437
449
  Executes a function in the background.
438
450
  """
451
+ if TYPE_CHECKING:
452
+ assert self._loop is not None
439
453
 
440
454
  return asyncio.run_coroutine_threadsafe(func(*args, **kwargs), self._loop)
441
455
 
@@ -536,7 +550,7 @@ class Runner:
536
550
  async def _run_process(
537
551
  self,
538
552
  flow_run: "FlowRun",
539
- task_status: Optional[anyio.abc.TaskStatus] = None,
553
+ task_status: Optional[anyio.abc.TaskStatus[Any]] = None,
540
554
  entrypoint: Optional[str] = None,
541
555
  ):
542
556
  """
@@ -723,7 +737,9 @@ class Runner:
723
737
  return await self._submit_scheduled_flow_runs(flow_run_response=runs_response)
724
738
 
725
739
  async def _check_for_cancelled_flow_runs(
726
- self, should_stop: Callable = lambda: False, on_stop: Callable = lambda: None
740
+ self,
741
+ should_stop: Callable[[], bool] = lambda: False,
742
+ on_stop: Callable[[], None] = lambda: None,
727
743
  ):
728
744
  """
729
745
  Checks for flow runs with CANCELLING a cancelling state and attempts to
@@ -862,31 +878,37 @@ class Runner:
862
878
  flow: "Optional[APIFlow]",
863
879
  deployment: "Optional[Deployment]",
864
880
  ):
865
- related = []
866
- tags = []
881
+ related: list[RelatedResource] = []
882
+ tags: list[str] = []
867
883
  if deployment:
868
884
  related.append(
869
- {
870
- "prefect.resource.id": f"prefect.deployment.{deployment.id}",
871
- "prefect.resource.role": "deployment",
872
- "prefect.resource.name": deployment.name,
873
- }
885
+ RelatedResource(
886
+ {
887
+ "prefect.resource.id": f"prefect.deployment.{deployment.id}",
888
+ "prefect.resource.role": "deployment",
889
+ "prefect.resource.name": deployment.name,
890
+ }
891
+ )
874
892
  )
875
893
  tags.extend(deployment.tags)
876
894
  if flow:
877
895
  related.append(
896
+ RelatedResource(
897
+ {
898
+ "prefect.resource.id": f"prefect.flow.{flow.id}",
899
+ "prefect.resource.role": "flow",
900
+ "prefect.resource.name": flow.name,
901
+ }
902
+ )
903
+ )
904
+ related.append(
905
+ RelatedResource(
878
906
  {
879
- "prefect.resource.id": f"prefect.flow.{flow.id}",
880
- "prefect.resource.role": "flow",
881
- "prefect.resource.name": flow.name,
907
+ "prefect.resource.id": f"prefect.flow-run.{flow_run.id}",
908
+ "prefect.resource.role": "flow-run",
909
+ "prefect.resource.name": flow_run.name,
882
910
  }
883
911
  )
884
- related.append(
885
- {
886
- "prefect.resource.id": f"prefect.flow-run.{flow_run.id}",
887
- "prefect.resource.role": "flow-run",
888
- "prefect.resource.name": flow_run.name,
889
- }
890
912
  )
891
913
  tags.extend(flow_run.tags)
892
914
 
prefect/runner/server.py CHANGED
@@ -1,5 +1,5 @@
1
1
  import uuid
2
- from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Tuple
2
+ from typing import TYPE_CHECKING, Any, Callable, Coroutine, Hashable, Optional, Tuple
3
3
 
4
4
  import pendulum
5
5
  import uvicorn
@@ -22,7 +22,7 @@ from prefect.settings import (
22
22
  PREFECT_RUNNER_SERVER_MISSED_POLLS_TOLERANCE,
23
23
  PREFECT_RUNNER_SERVER_PORT,
24
24
  )
25
- from prefect.utilities.asyncutils import sync_compatible
25
+ from prefect.utilities.asyncutils import run_coro_as_sync
26
26
  from prefect.utilities.importtools import load_script_as_module
27
27
 
28
28
  if TYPE_CHECKING:
@@ -38,11 +38,13 @@ RunnableEndpoint = Literal["deployment", "flow", "task"]
38
38
 
39
39
  class RunnerGenericFlowRunRequest(BaseModel):
40
40
  entrypoint: str
41
- parameters: Optional[Dict[str, Any]] = None
41
+ parameters: Optional[dict[str, Any]] = None
42
42
  parent_task_run_id: Optional[uuid.UUID] = None
43
43
 
44
44
 
45
- def perform_health_check(runner, delay_threshold: Optional[int] = None) -> JSONResponse:
45
+ def perform_health_check(
46
+ runner: "Runner", delay_threshold: Optional[int] = None
47
+ ) -> Callable[..., JSONResponse]:
46
48
  if delay_threshold is None:
47
49
  delay_threshold = (
48
50
  PREFECT_RUNNER_SERVER_MISSED_POLLS_TOLERANCE.value()
@@ -63,15 +65,15 @@ def perform_health_check(runner, delay_threshold: Optional[int] = None) -> JSONR
63
65
  return _health_check
64
66
 
65
67
 
66
- def run_count(runner) -> int:
67
- def _run_count():
68
- run_count = len(runner._flow_run_process_map)
68
+ def run_count(runner: "Runner") -> Callable[..., int]:
69
+ def _run_count() -> int:
70
+ run_count = len(runner._flow_run_process_map) # pyright: ignore[reportPrivateUsage]
69
71
  return run_count
70
72
 
71
73
  return _run_count
72
74
 
73
75
 
74
- def shutdown(runner) -> int:
76
+ def shutdown(runner: "Runner") -> Callable[..., JSONResponse]:
75
77
  def _shutdown():
76
78
  runner.stop()
77
79
  return JSONResponse(status_code=status.HTTP_200_OK, content={"message": "OK"})
@@ -81,9 +83,9 @@ def shutdown(runner) -> int:
81
83
 
82
84
  async def _build_endpoint_for_deployment(
83
85
  deployment: "DeploymentResponse", runner: "Runner"
84
- ) -> Callable:
86
+ ) -> Callable[..., Coroutine[Any, Any, JSONResponse]]:
85
87
  async def _create_flow_run_for_deployment(
86
- body: Optional[Dict[Any, Any]] = None,
88
+ body: Optional[dict[Any, Any]] = None,
87
89
  ) -> JSONResponse:
88
90
  body = body or {}
89
91
  if deployment.enforce_parameter_schema and deployment.parameter_openapi_schema:
@@ -116,11 +118,11 @@ async def _build_endpoint_for_deployment(
116
118
 
117
119
  async def get_deployment_router(
118
120
  runner: "Runner",
119
- ) -> Tuple[APIRouter, Dict[str, Dict]]:
121
+ ) -> Tuple[APIRouter, dict[Hashable, Any]]:
120
122
  router = APIRouter()
121
- schemas = {}
123
+ schemas: dict[Hashable, Any] = {}
122
124
  async with get_client() as client:
123
- for deployment_id in runner._deployment_ids:
125
+ for deployment_id in runner._deployment_ids: # pyright: ignore[reportPrivateUsage]
124
126
  deployment = await client.read_deployment(deployment_id)
125
127
  router.add_api_route(
126
128
  f"/deployment/{deployment.id}/run",
@@ -142,21 +144,21 @@ async def get_deployment_router(
142
144
  return router, schemas
143
145
 
144
146
 
145
- async def get_subflow_schemas(runner: "Runner") -> Dict[str, Dict]:
147
+ async def get_subflow_schemas(runner: "Runner") -> dict[str, dict[str, Any]]:
146
148
  """
147
149
  Load available subflow schemas by filtering for only those subflows in the
148
150
  deployment entrypoint's import space.
149
151
  """
150
- schemas = {}
152
+ schemas: dict[str, dict[str, Any]] = {}
151
153
  async with get_client() as client:
152
- for deployment_id in runner._deployment_ids:
154
+ for deployment_id in runner._deployment_ids: # pyright: ignore[reportPrivateUsage]
153
155
  deployment = await client.read_deployment(deployment_id)
154
156
  if deployment.entrypoint is None:
155
157
  continue
156
158
 
157
159
  script = deployment.entrypoint.split(":")[0]
158
160
  module = load_script_as_module(script)
159
- subflows = [
161
+ subflows: list[Flow[Any, Any]] = [
160
162
  obj for obj in module.__dict__.values() if isinstance(obj, Flow)
161
163
  ]
162
164
  for flow in subflows:
@@ -165,7 +167,7 @@ async def get_subflow_schemas(runner: "Runner") -> Dict[str, Dict]:
165
167
  return schemas
166
168
 
167
169
 
168
- def _flow_in_schemas(flow: Flow, schemas: Dict[str, Dict]) -> bool:
170
+ def _flow_in_schemas(flow: Flow[Any, Any], schemas: dict[str, dict[str, Any]]) -> bool:
169
171
  """
170
172
  Check if a flow is in the schemas dict, either by name or by name with
171
173
  dashes replaced with underscores.
@@ -174,7 +176,9 @@ def _flow_in_schemas(flow: Flow, schemas: Dict[str, Dict]) -> bool:
174
176
  return flow.name in schemas or flow_name_with_dashes in schemas
175
177
 
176
178
 
177
- def _flow_schema_changed(flow: Flow, schemas: Dict[str, Dict]) -> bool:
179
+ def _flow_schema_changed(
180
+ flow: Flow[Any, Any], schemas: dict[str, dict[str, Any]]
181
+ ) -> bool:
178
182
  """
179
183
  Check if a flow's schemas have changed, either by bame of by name with
180
184
  dashes replaced with underscores.
@@ -188,8 +192,8 @@ def _flow_schema_changed(flow: Flow, schemas: Dict[str, Dict]) -> bool:
188
192
 
189
193
 
190
194
  def _build_generic_endpoint_for_flows(
191
- runner: "Runner", schemas: Dict[str, Dict]
192
- ) -> Callable:
195
+ runner: "Runner", schemas: dict[str, dict[str, Any]]
196
+ ) -> Callable[..., Coroutine[Any, Any, JSONResponse]]:
193
197
  async def _create_flow_run_for_flow_from_fqn(
194
198
  body: RunnerGenericFlowRunRequest,
195
199
  ) -> JSONResponse:
@@ -241,7 +245,6 @@ def _build_generic_endpoint_for_flows(
241
245
  return _create_flow_run_for_flow_from_fqn
242
246
 
243
247
 
244
- @sync_compatible
245
248
  async def build_server(runner: "Runner") -> FastAPI:
246
249
  """
247
250
  Build a FastAPI server for a runner.
@@ -297,16 +300,11 @@ def start_webserver(runner: "Runner", log_level: Optional[str] = None) -> None:
297
300
  host = PREFECT_RUNNER_SERVER_HOST.value()
298
301
  port = PREFECT_RUNNER_SERVER_PORT.value()
299
302
  log_level = log_level or PREFECT_RUNNER_SERVER_LOG_LEVEL.value()
300
- webserver = build_server(runner)
303
+ webserver = run_coro_as_sync(build_server(runner))
304
+ if TYPE_CHECKING:
305
+ assert webserver is not None, "webserver should be built"
306
+ assert log_level is not None, "log_level should be set"
307
+
301
308
  uvicorn.run(
302
309
  webserver, host=host, port=port, log_level=log_level.lower()
303
310
  ) # Uvicorn supports only lowercase log_level
304
- # From the Uvicorn config file:
305
- # LOG_LEVELS: dict[str, int] = {
306
- # "critical": logging.CRITICAL,
307
- # "error": logging.ERROR,
308
- # "warning": logging.WARNING,
309
- # "info": logging.INFO,
310
- # "debug": logging.DEBUG,
311
- # "trace": TRACE_LOG_LEVEL,
312
- # }