prefect-client 2.17.1__py3-none-any.whl → 2.18.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. prefect/_internal/compatibility/deprecated.py +2 -0
  2. prefect/_internal/pydantic/_compat.py +1 -0
  3. prefect/_internal/pydantic/utilities/field_validator.py +25 -10
  4. prefect/_internal/pydantic/utilities/model_dump.py +1 -1
  5. prefect/_internal/pydantic/utilities/model_validate.py +1 -1
  6. prefect/_internal/pydantic/utilities/model_validator.py +11 -3
  7. prefect/_internal/schemas/fields.py +31 -12
  8. prefect/_internal/schemas/validators.py +0 -6
  9. prefect/_version.py +97 -38
  10. prefect/blocks/abstract.py +34 -1
  11. prefect/blocks/core.py +1 -1
  12. prefect/blocks/notifications.py +16 -7
  13. prefect/blocks/system.py +2 -3
  14. prefect/client/base.py +10 -5
  15. prefect/client/orchestration.py +405 -85
  16. prefect/client/schemas/actions.py +4 -3
  17. prefect/client/schemas/objects.py +6 -5
  18. prefect/client/schemas/schedules.py +2 -6
  19. prefect/client/schemas/sorting.py +9 -0
  20. prefect/client/utilities.py +25 -3
  21. prefect/concurrency/asyncio.py +11 -5
  22. prefect/concurrency/events.py +3 -3
  23. prefect/concurrency/services.py +1 -1
  24. prefect/concurrency/sync.py +9 -5
  25. prefect/deployments/__init__.py +0 -2
  26. prefect/deployments/base.py +2 -144
  27. prefect/deployments/deployments.py +29 -20
  28. prefect/deployments/runner.py +36 -28
  29. prefect/deployments/steps/core.py +3 -3
  30. prefect/deprecated/packaging/serializers.py +5 -4
  31. prefect/engine.py +3 -1
  32. prefect/events/__init__.py +45 -0
  33. prefect/events/actions.py +250 -18
  34. prefect/events/cli/automations.py +201 -0
  35. prefect/events/clients.py +179 -21
  36. prefect/events/filters.py +30 -3
  37. prefect/events/instrument.py +40 -40
  38. prefect/events/related.py +2 -1
  39. prefect/events/schemas/automations.py +126 -8
  40. prefect/events/schemas/deployment_triggers.py +23 -277
  41. prefect/events/schemas/events.py +7 -7
  42. prefect/events/utilities.py +3 -1
  43. prefect/events/worker.py +21 -8
  44. prefect/exceptions.py +1 -1
  45. prefect/flows.py +33 -18
  46. prefect/input/actions.py +9 -9
  47. prefect/input/run_input.py +49 -37
  48. prefect/logging/__init__.py +2 -2
  49. prefect/logging/loggers.py +64 -1
  50. prefect/new_flow_engine.py +293 -0
  51. prefect/new_task_engine.py +374 -0
  52. prefect/results.py +32 -12
  53. prefect/runner/runner.py +3 -2
  54. prefect/serializers.py +62 -31
  55. prefect/server/api/collections_data/views/aggregate-worker-metadata.json +44 -3
  56. prefect/settings.py +32 -10
  57. prefect/states.py +25 -19
  58. prefect/tasks.py +17 -0
  59. prefect/types/__init__.py +90 -0
  60. prefect/utilities/asyncutils.py +37 -0
  61. prefect/utilities/engine.py +6 -4
  62. prefect/utilities/pydantic.py +34 -15
  63. prefect/utilities/schema_tools/hydration.py +88 -19
  64. prefect/utilities/schema_tools/validation.py +1 -1
  65. prefect/variables.py +4 -4
  66. {prefect_client-2.17.1.dist-info → prefect_client-2.18.1.dist-info}/METADATA +1 -1
  67. {prefect_client-2.17.1.dist-info → prefect_client-2.18.1.dist-info}/RECORD +71 -67
  68. /prefect/{concurrency/common.py → events/cli/__init__.py} +0 -0
  69. {prefect_client-2.17.1.dist-info → prefect_client-2.18.1.dist-info}/LICENSE +0 -0
  70. {prefect_client-2.17.1.dist-info → prefect_client-2.18.1.dist-info}/WHEEL +0 -0
  71. {prefect_client-2.17.1.dist-info → prefect_client-2.18.1.dist-info}/top_level.txt +0 -0
prefect/events/worker.py CHANGED
@@ -1,6 +1,7 @@
1
1
  from contextlib import asynccontextmanager
2
2
  from contextvars import Context, copy_context
3
- from typing import Any, Optional, Tuple, Type
3
+ from typing import Any, Dict, Optional, Tuple, Type
4
+ from uuid import UUID
4
5
 
5
6
  from typing_extensions import Self
6
7
 
@@ -17,6 +18,7 @@ from .clients import (
17
18
  EventsClient,
18
19
  NullEventsClient,
19
20
  PrefectCloudEventsClient,
21
+ PrefectEphemeralEventsClient,
20
22
  PrefectEventsClient,
21
23
  )
22
24
  from .related import related_resources_from_run_context
@@ -24,7 +26,11 @@ from .schemas.events import Event
24
26
 
25
27
 
26
28
  def should_emit_events() -> bool:
27
- return emit_events_to_cloud() or should_emit_events_to_running_server()
29
+ return (
30
+ emit_events_to_cloud()
31
+ or should_emit_events_to_running_server()
32
+ or should_emit_events_to_ephemeral_server()
33
+ )
28
34
 
29
35
 
30
36
  def emit_events_to_cloud() -> bool:
@@ -36,7 +42,11 @@ def emit_events_to_cloud() -> bool:
36
42
 
37
43
  def should_emit_events_to_running_server() -> bool:
38
44
  api_url = PREFECT_API_URL.value()
39
- return isinstance(api_url, str) and PREFECT_EXPERIMENTAL_EVENTS
45
+ return isinstance(api_url, str) and PREFECT_EXPERIMENTAL_EVENTS.value()
46
+
47
+
48
+ def should_emit_events_to_ephemeral_server() -> bool:
49
+ return PREFECT_API_KEY.value() is None and PREFECT_EXPERIMENTAL_EVENTS.value()
40
50
 
41
51
 
42
52
  class EventsWorker(QueueService[Event]):
@@ -47,6 +57,7 @@ class EventsWorker(QueueService[Event]):
47
57
  self.client_type = client_type
48
58
  self.client_options = client_options
49
59
  self._client: EventsClient
60
+ self._context_cache: Dict[UUID, Context] = {}
50
61
 
51
62
  @asynccontextmanager
52
63
  async def _lifespan(self):
@@ -55,11 +66,12 @@ class EventsWorker(QueueService[Event]):
55
66
  async with self._client:
56
67
  yield
57
68
 
58
- def _prepare_item(self, event: Event) -> Tuple[Event, Context]:
59
- return (event, copy_context())
69
+ def _prepare_item(self, event: Event) -> Event:
70
+ self._context_cache[event.id] = copy_context()
71
+ return event
60
72
 
61
- async def _handle(self, event_and_context: Tuple[Event, Context]):
62
- event, context = event_and_context
73
+ async def _handle(self, event: Event):
74
+ context = self._context_cache.pop(event.id)
63
75
  with temporary_context(context=context):
64
76
  await self.attach_related_resources_from_context(event)
65
77
 
@@ -85,7 +97,8 @@ class EventsWorker(QueueService[Event]):
85
97
  }
86
98
  elif should_emit_events_to_running_server():
87
99
  client_type = PrefectEventsClient
88
-
100
+ elif should_emit_events_to_ephemeral_server():
101
+ client_type = PrefectEphemeralEventsClient
89
102
  else:
90
103
  client_type = NullEventsClient
91
104
 
prefect/exceptions.py CHANGED
@@ -178,7 +178,7 @@ class ParameterTypeError(PrefectException):
178
178
 
179
179
  @classmethod
180
180
  def from_validation_error(cls, exc: ValidationError) -> Self:
181
- bad_params = [f'{err["loc"][0]}: {err["msg"]}' for err in exc.errors()]
181
+ bad_params = [f'{".".join(err["loc"])}: {err["msg"]}' for err in exc.errors()]
182
182
  msg = "Flow run received invalid parameters:\n - " + "\n - ".join(bad_params)
183
183
  return cls(msg)
184
184
 
prefect/flows.py CHANGED
@@ -34,20 +34,10 @@ from typing import (
34
34
  )
35
35
  from uuid import UUID
36
36
 
37
- from prefect._vendor.fastapi.encoders import jsonable_encoder
38
- from typing_extensions import Self
37
+ from rich.console import Console
38
+ from typing_extensions import Literal, ParamSpec, Self
39
39
 
40
- from prefect._internal.compatibility.deprecated import deprecated_parameter
41
- from prefect._internal.concurrency.api import create_call, from_async
42
40
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
43
- from prefect.client.orchestration import get_client
44
- from prefect.deployments.runner import DeploymentImage, EntrypointType, deploy
45
- from prefect.filesystems import ReadableDeploymentStorage
46
- from prefect.runner.storage import (
47
- BlockStorageAdapter,
48
- RunnerStorage,
49
- create_storage_from_url,
50
- )
51
41
 
52
42
  if HAS_PYDANTIC_V2:
53
43
  import pydantic.v1 as pydantic
@@ -67,26 +57,36 @@ else:
67
57
 
68
58
  V2ValidationError = None
69
59
 
70
- from rich.console import Console
71
- from typing_extensions import Literal, ParamSpec
60
+ from prefect._vendor.fastapi.encoders import jsonable_encoder
72
61
 
62
+ from prefect._internal.compatibility.deprecated import deprecated_parameter
63
+ from prefect._internal.concurrency.api import create_call, from_async
73
64
  from prefect._internal.schemas.validators import raise_on_name_with_banned_characters
65
+ from prefect.client.orchestration import get_client
74
66
  from prefect.client.schemas.objects import Flow as FlowSchema
75
67
  from prefect.client.schemas.objects import FlowRun, MinimalDeploymentSchedule
76
68
  from prefect.client.schemas.schedules import SCHEDULE_TYPES
77
69
  from prefect.context import PrefectObjectRegistry, registry_from_script
78
- from prefect.events import DeploymentTriggerTypes
70
+ from prefect.deployments.runner import DeploymentImage, EntrypointType, deploy
71
+ from prefect.events import DeploymentTriggerTypes, TriggerTypes
79
72
  from prefect.exceptions import (
80
73
  MissingFlowError,
81
74
  ObjectNotFound,
82
75
  ParameterTypeError,
83
76
  UnspecifiedFlowError,
84
77
  )
78
+ from prefect.filesystems import ReadableDeploymentStorage
85
79
  from prefect.futures import PrefectFuture
86
80
  from prefect.logging import get_logger
87
81
  from prefect.results import ResultSerializer, ResultStorage
82
+ from prefect.runner.storage import (
83
+ BlockStorageAdapter,
84
+ RunnerStorage,
85
+ create_storage_from_url,
86
+ )
88
87
  from prefect.settings import (
89
88
  PREFECT_DEFAULT_WORK_POOL_NAME,
89
+ PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE,
90
90
  PREFECT_FLOW_DEFAULT_RETRIES,
91
91
  PREFECT_FLOW_DEFAULT_RETRY_DELAY_SECONDS,
92
92
  PREFECT_UI_URL,
@@ -618,7 +618,7 @@ class Flow(Generic[P, R]):
618
618
  schedule: Optional[SCHEDULE_TYPES] = None,
619
619
  is_schedule_active: Optional[bool] = None,
620
620
  parameters: Optional[dict] = None,
621
- triggers: Optional[List[DeploymentTriggerTypes]] = None,
621
+ triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
622
622
  description: Optional[str] = None,
623
623
  tags: Optional[List[str]] = None,
624
624
  version: Optional[str] = None,
@@ -748,7 +748,7 @@ class Flow(Generic[P, R]):
748
748
  schedules: Optional[List["FlexibleScheduleList"]] = None,
749
749
  schedule: Optional[SCHEDULE_TYPES] = None,
750
750
  is_schedule_active: Optional[bool] = None,
751
- triggers: Optional[List[DeploymentTriggerTypes]] = None,
751
+ triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
752
752
  parameters: Optional[dict] = None,
753
753
  description: Optional[str] = None,
754
754
  tags: Optional[List[str]] = None,
@@ -962,7 +962,7 @@ class Flow(Generic[P, R]):
962
962
  schedules: Optional[List[MinimalDeploymentSchedule]] = None,
963
963
  schedule: Optional[SCHEDULE_TYPES] = None,
964
964
  is_schedule_active: Optional[bool] = None,
965
- triggers: Optional[List[DeploymentTriggerTypes]] = None,
965
+ triggers: Optional[List[Union[DeploymentTriggerTypes, TriggerTypes]]] = None,
966
966
  parameters: Optional[dict] = None,
967
967
  description: Optional[str] = None,
968
968
  tags: Optional[List[str]] = None,
@@ -1225,6 +1225,21 @@ class Flow(Generic[P, R]):
1225
1225
  # we can add support for exploring subflows for tasks in the future.
1226
1226
  return track_viz_task(self.isasync, self.name, parameters)
1227
1227
 
1228
+ if PREFECT_EXPERIMENTAL_ENABLE_NEW_ENGINE.value():
1229
+ from prefect.new_flow_engine import run_flow
1230
+ from prefect.utilities.asyncutils import run_sync
1231
+
1232
+ awaitable = run_flow(
1233
+ flow=self,
1234
+ parameters=parameters,
1235
+ wait_for=wait_for,
1236
+ return_type=return_type,
1237
+ )
1238
+ if self.isasync:
1239
+ return awaitable
1240
+ else:
1241
+ return run_sync(awaitable)
1242
+
1228
1243
  return enter_flow_run_engine_from_flow_call(
1229
1244
  self,
1230
1245
  parameters,
prefect/input/actions.py CHANGED
@@ -5,7 +5,7 @@ import orjson
5
5
  import pydantic
6
6
 
7
7
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
8
- from prefect.client.utilities import inject_client
8
+ from prefect.client.utilities import client_injector
9
9
  from prefect.context import FlowRunContext
10
10
  from prefect.exceptions import PrefectHTTPStatusError
11
11
  from prefect.utilities.asyncutils import sync_compatible
@@ -52,13 +52,13 @@ async def create_flow_run_input_from_model(
52
52
 
53
53
 
54
54
  @sync_compatible
55
- @inject_client
55
+ @client_injector
56
56
  async def create_flow_run_input(
57
+ client: "PrefectClient",
57
58
  key: str,
58
59
  value: Any,
59
60
  flow_run_id: Optional[UUID] = None,
60
61
  sender: Optional[str] = None,
61
- client: "PrefectClient" = None,
62
62
  ):
63
63
  """
64
64
  Create a new flow run input. The given `value` will be serialized to JSON
@@ -81,13 +81,13 @@ async def create_flow_run_input(
81
81
 
82
82
 
83
83
  @sync_compatible
84
- @inject_client
84
+ @client_injector
85
85
  async def filter_flow_run_input(
86
+ client: "PrefectClient",
86
87
  key_prefix: str,
87
88
  limit: int = 1,
88
89
  exclude_keys: Optional[Set[str]] = None,
89
90
  flow_run_id: Optional[UUID] = None,
90
- client: "PrefectClient" = None,
91
91
  ):
92
92
  if exclude_keys is None:
93
93
  exclude_keys = set()
@@ -103,9 +103,9 @@ async def filter_flow_run_input(
103
103
 
104
104
 
105
105
  @sync_compatible
106
- @inject_client
106
+ @client_injector
107
107
  async def read_flow_run_input(
108
- key: str, flow_run_id: Optional[UUID] = None, client: "PrefectClient" = None
108
+ client: "PrefectClient", key: str, flow_run_id: Optional[UUID] = None
109
109
  ) -> Any:
110
110
  """Read a flow run input.
111
111
 
@@ -126,9 +126,9 @@ async def read_flow_run_input(
126
126
 
127
127
 
128
128
  @sync_compatible
129
- @inject_client
129
+ @client_injector
130
130
  async def delete_flow_run_input(
131
- key: str, flow_run_id: Optional[UUID] = None, client: "PrefectClient" = None
131
+ client: "PrefectClient", key: str, flow_run_id: Optional[UUID] = None
132
132
  ):
133
133
  """Delete a flow run input.
134
134
 
@@ -58,7 +58,7 @@ async def receiver_flow():
58
58
  ```
59
59
  """
60
60
 
61
-
61
+ from inspect import isclass
62
62
  from typing import (
63
63
  TYPE_CHECKING,
64
64
  Any,
@@ -96,7 +96,7 @@ if HAS_PYDANTIC_V2:
96
96
  from prefect._internal.pydantic.v2_schema import create_v2_schema
97
97
 
98
98
  R = TypeVar("R", bound="RunInput")
99
- T = TypeVar("T")
99
+ T = TypeVar("T", bound="object")
100
100
 
101
101
  Keyset = Dict[
102
102
  Union[Literal["description"], Literal["response"], Literal["schema"]], str
@@ -114,7 +114,8 @@ def keyset_from_paused_state(state: "State") -> Keyset:
114
114
  if not state.is_paused():
115
115
  raise RuntimeError(f"{state.type.value!r} is unsupported.")
116
116
 
117
- base_key = f"{state.name.lower()}-{str(state.state_details.pause_key)}"
117
+ state_name = state.name or ""
118
+ base_key = f"{state_name.lower()}-{str(state.state_details.pause_key)}"
118
119
  return keyset_from_base_key(base_key)
119
120
 
120
121
 
@@ -234,7 +235,7 @@ class RunInput(pydantic.BaseModel):
234
235
  a flow run that requires input
235
236
  - kwargs (Any): the initial data to populate the subclass
236
237
  """
237
- fields = {}
238
+ fields: Dict[str, Any] = {}
238
239
  for key, value in kwargs.items():
239
240
  fields[key] = (type(value), value)
240
241
  model = pydantic.create_model(cls.__name__, **fields, __base__=cls)
@@ -340,31 +341,34 @@ class AutomaticRunInput(RunInput, Generic[T]):
340
341
  def subclass_from_type(cls, _type: Type[T]) -> Type["AutomaticRunInput[T]"]:
341
342
  """
342
343
  Create a new `AutomaticRunInput` subclass from the given type.
344
+
345
+ This method uses the type's name as a key prefix to identify related
346
+ flow run inputs. This helps in ensuring that values saved under a type
347
+ (like List[int]) are retrievable under the generic type name (like "list").
343
348
  """
344
- fields = {"value": (_type, ...)}
345
-
346
- # Sending a value to a flow run that relies on an AutomaticRunInput will
347
- # produce a key prefix that includes the type name. For example, if the
348
- # value is a list, the key will include "list" as the type. If the user
349
- # then tries to receive the value with a type annotation like List[int],
350
- # we need to find the key we saved with "list" as the type (not
351
- # "List[int]"). Calling __name__.lower() on a type annotation like
352
- # List[int] produces the string "list", which is what we need.
353
- if hasattr(_type, "__name__"):
354
- type_prefix = _type.__name__.lower()
355
- elif hasattr(_type, "_name"):
356
- # On Python 3.9 and earlier, type annotation values don't have a
357
- # __name__ attribute, but they do have a _name.
358
- type_prefix = _type._name.lower()
359
- else:
360
- # If we can't identify a type name that we can use as a key
361
- # prefix that will match an input, we'll have to use
362
- # "AutomaticRunInput" as the generic name. This will match all
363
- # automatic inputs sent to the flow run, rather than a specific
364
- # type.
365
- type_prefix = ""
349
+ fields: Dict[str, Any] = {"value": (_type, ...)}
350
+
351
+ # Explanation for using getattr for type name extraction:
352
+ # - "__name__": This is the usual attribute for getting the name of
353
+ # most types.
354
+ # - "_name": Used as a fallback, some type annotations in Python 3.9
355
+ # and earlier might only have this attribute instead of __name__.
356
+ # - If neither is available, defaults to an empty string to prevent
357
+ # errors, but typically we should find at least one valid name
358
+ # attribute. This will match all automatic inputs sent to the flow
359
+ # run, rather than a specific type.
360
+ #
361
+ # This approach ensures compatibility across Python versions and
362
+ # handles various edge cases in type annotations.
363
+
364
+ type_prefix: str = getattr(
365
+ _type, "__name__", getattr(_type, "_name", "")
366
+ ).lower()
367
+
366
368
  class_name = f"{type_prefix}AutomaticRunInput"
367
369
 
370
+ # Creating a new Pydantic model class dynamically with the name based
371
+ # on the type prefix.
368
372
  new_cls: Type["AutomaticRunInput"] = pydantic.create_model(
369
373
  class_name, **fields, __base__=AutomaticRunInput
370
374
  )
@@ -384,18 +388,19 @@ def run_input_subclass_from_type(
384
388
  """
385
389
  Create a new `RunInput` subclass from the given type.
386
390
  """
387
- try:
391
+ if isclass(_type):
388
392
  if issubclass(_type, RunInput):
389
393
  return cast(Type[R], _type)
390
394
  elif issubclass(_type, pydantic.BaseModel):
391
395
  return cast(Type[R], RunInput.subclass_from_base_model_type(_type))
392
- except TypeError:
393
- pass
394
396
 
395
397
  # Could be something like a typing._GenericAlias or any other type that
396
398
  # isn't a `RunInput` subclass or `pydantic.BaseModel` subclass. Try passing
397
399
  # it to AutomaticRunInput to see if we can create a model from it.
398
- return cast(Type[AutomaticRunInput[T]], AutomaticRunInput.subclass_from_type(_type))
400
+ return cast(
401
+ Type[AutomaticRunInput[T]],
402
+ AutomaticRunInput.subclass_from_type(cast(Type[T], _type)),
403
+ )
399
404
 
400
405
 
401
406
  class GetInputHandler(Generic[R]):
@@ -425,7 +430,7 @@ class GetInputHandler(Generic[R]):
425
430
 
426
431
  def __next__(self) -> R:
427
432
  try:
428
- return self.next()
433
+ return cast(R, self.next())
429
434
  except TimeoutError:
430
435
  if self.raise_timeout_error:
431
436
  raise
@@ -502,9 +507,11 @@ async def _send_input(
502
507
  key_prefix: Optional[str] = None,
503
508
  ):
504
509
  if isinstance(run_input, RunInput):
505
- _run_input = run_input
510
+ _run_input: RunInput = run_input
506
511
  else:
507
- input_cls = run_input_subclass_from_type(type(run_input))
512
+ input_cls: Type[AutomaticRunInput] = run_input_subclass_from_type(
513
+ type(run_input)
514
+ )
508
515
  _run_input = input_cls(value=run_input)
509
516
 
510
517
  if key_prefix is None:
@@ -533,8 +540,8 @@ async def send_input(
533
540
 
534
541
 
535
542
  @overload
536
- def receive_input(
537
- input_type: Type[R],
543
+ def receive_input( # type: ignore[overload-overlap]
544
+ input_type: Union[Type[R], pydantic.BaseModel],
538
545
  timeout: Optional[float] = 3600,
539
546
  poll_interval: float = 10,
540
547
  raise_timeout_error: bool = False,
@@ -561,7 +568,7 @@ def receive_input(
561
568
 
562
569
 
563
570
  def receive_input(
564
- input_type: Union[Type[R], Type[T]],
571
+ input_type: Union[Type[R], Type[T], pydantic.BaseModel],
565
572
  timeout: Optional[float] = 3600,
566
573
  poll_interval: float = 10,
567
574
  raise_timeout_error: bool = False,
@@ -570,7 +577,12 @@ def receive_input(
570
577
  flow_run_id: Optional[UUID] = None,
571
578
  with_metadata: bool = False,
572
579
  ) -> Union[GetAutomaticInputHandler[T], GetInputHandler[R]]:
573
- input_cls = run_input_subclass_from_type(input_type)
580
+ # The typing in this module is a bit complex, and at this point `mypy`
581
+ # thinks that `run_input_subclass_from_type` accepts a `Type[Never]` but
582
+ # the signature is the same as here:
583
+ # Union[Type[R], Type[T], pydantic.BaseModel],
584
+ # Seems like a possible mypy bug, so we'll ignore the type check here.
585
+ input_cls = run_input_subclass_from_type(input_type) # type: ignore[arg-type]
574
586
 
575
587
  if issubclass(input_cls, AutomaticRunInput):
576
588
  return input_cls.receive(
@@ -1,3 +1,3 @@
1
- from .loggers import disable_run_logger, get_logger, get_run_logger
1
+ from .loggers import disable_run_logger, get_logger, get_run_logger, LogEavesdropper
2
2
 
3
- __all__ = ["get_logger", "get_run_logger"]
3
+ __all__ = ["get_logger", "get_run_logger", "LogEavesdropper"]
@@ -5,7 +5,10 @@ import warnings
5
5
  from builtins import print
6
6
  from contextlib import contextmanager
7
7
  from functools import lru_cache
8
- from typing import TYPE_CHECKING, Dict, Optional, Union
8
+ from logging import LogRecord
9
+ from typing import TYPE_CHECKING, Dict, List, Optional, Union
10
+
11
+ from typing_extensions import Self
9
12
 
10
13
  import prefect
11
14
  from prefect.exceptions import MissingContextError
@@ -295,3 +298,63 @@ def patch_print():
295
298
  yield
296
299
  finally:
297
300
  builtins.print = original
301
+
302
+
303
+ class LogEavesdropper(logging.Handler):
304
+ """A context manager that collects logs for the duration of the context
305
+
306
+ Example:
307
+
308
+ ```python
309
+ import logging
310
+ from prefect.logging import LogEavesdropper
311
+
312
+ with LogEavesdropper("my_logger") as eavesdropper:
313
+ logging.getLogger("my_logger").info("Hello, world!")
314
+ logging.getLogger("my_logger.child_module").info("Another one!")
315
+
316
+ print(eavesdropper.text())
317
+
318
+ # Outputs: "Hello, world!\nAnother one!"
319
+ """
320
+
321
+ _target_logger: logging.Logger
322
+ _lines: List[str]
323
+
324
+ def __init__(self, eavesdrop_on: str, level: int = logging.NOTSET):
325
+ """
326
+ Args:
327
+ eavesdrop_on (str): the name of the logger to eavesdrop on
328
+ level (int): the minimum log level to eavesdrop on; if omitted, all levels
329
+ are captured
330
+ """
331
+
332
+ super().__init__(level=level)
333
+ self.eavesdrop_on = eavesdrop_on
334
+ self._target_logger = None
335
+
336
+ # It's important that we use a very minimalistic formatter for use cases where
337
+ # we may present these logs back to the user. We shouldn't leak filenames,
338
+ # versions, or other environmental information.
339
+ self.formatter = logging.Formatter("[%(levelname)s]: %(message)s")
340
+
341
+ def __enter__(self) -> Self:
342
+ self._target_logger = logging.getLogger(self.eavesdrop_on)
343
+ self._original_level = self._target_logger.level
344
+ self._target_logger.level = self.level
345
+ self._target_logger.addHandler(self)
346
+ self._lines = []
347
+ return self
348
+
349
+ def __exit__(self, *_):
350
+ if self._target_logger:
351
+ self._target_logger.removeHandler(self)
352
+ self._target_logger.level = self._original_level
353
+
354
+ def emit(self, record: LogRecord) -> None:
355
+ """The logging.Handler implementation, not intended to be called directly."""
356
+ self._lines.append(self.format(record))
357
+
358
+ def text(self) -> str:
359
+ """Return the collected logs as a single newline-delimited string"""
360
+ return "\n".join(self._lines)