prefect-client 3.1.12__py3-none-any.whl → 3.1.14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (111) hide show
  1. prefect/_experimental/lineage.py +63 -0
  2. prefect/_experimental/sla/client.py +53 -27
  3. prefect/_experimental/sla/objects.py +10 -2
  4. prefect/_internal/concurrency/services.py +2 -2
  5. prefect/_internal/concurrency/threads.py +6 -0
  6. prefect/_internal/retries.py +6 -3
  7. prefect/_internal/schemas/validators.py +6 -4
  8. prefect/_version.py +3 -3
  9. prefect/artifacts.py +4 -1
  10. prefect/automations.py +1 -1
  11. prefect/blocks/abstract.py +5 -2
  12. prefect/blocks/notifications.py +1 -0
  13. prefect/cache_policies.py +70 -22
  14. prefect/client/orchestration/_automations/client.py +4 -0
  15. prefect/client/orchestration/_deployments/client.py +3 -3
  16. prefect/client/utilities.py +3 -3
  17. prefect/context.py +16 -6
  18. prefect/deployments/base.py +7 -4
  19. prefect/deployments/flow_runs.py +5 -1
  20. prefect/deployments/runner.py +6 -11
  21. prefect/deployments/steps/core.py +1 -1
  22. prefect/deployments/steps/pull.py +8 -3
  23. prefect/deployments/steps/utility.py +2 -2
  24. prefect/docker/docker_image.py +13 -9
  25. prefect/engine.py +19 -10
  26. prefect/events/cli/automations.py +4 -4
  27. prefect/events/clients.py +17 -14
  28. prefect/events/filters.py +34 -34
  29. prefect/events/schemas/automations.py +12 -8
  30. prefect/events/schemas/events.py +5 -1
  31. prefect/events/worker.py +1 -1
  32. prefect/filesystems.py +1 -1
  33. prefect/flow_engine.py +172 -123
  34. prefect/flows.py +119 -74
  35. prefect/futures.py +14 -7
  36. prefect/infrastructure/provisioners/__init__.py +2 -0
  37. prefect/infrastructure/provisioners/cloud_run.py +4 -4
  38. prefect/infrastructure/provisioners/coiled.py +249 -0
  39. prefect/infrastructure/provisioners/container_instance.py +4 -3
  40. prefect/infrastructure/provisioners/ecs.py +55 -43
  41. prefect/infrastructure/provisioners/modal.py +5 -4
  42. prefect/input/actions.py +5 -1
  43. prefect/input/run_input.py +157 -43
  44. prefect/logging/configuration.py +5 -8
  45. prefect/logging/filters.py +2 -2
  46. prefect/logging/formatters.py +15 -11
  47. prefect/logging/handlers.py +24 -14
  48. prefect/logging/highlighters.py +5 -5
  49. prefect/logging/loggers.py +29 -20
  50. prefect/main.py +3 -1
  51. prefect/results.py +166 -86
  52. prefect/runner/runner.py +112 -84
  53. prefect/runner/server.py +3 -1
  54. prefect/runner/storage.py +18 -18
  55. prefect/runner/submit.py +19 -12
  56. prefect/runtime/deployment.py +15 -8
  57. prefect/runtime/flow_run.py +19 -6
  58. prefect/runtime/task_run.py +7 -3
  59. prefect/settings/base.py +17 -7
  60. prefect/settings/legacy.py +4 -4
  61. prefect/settings/models/api.py +4 -3
  62. prefect/settings/models/cli.py +4 -3
  63. prefect/settings/models/client.py +7 -4
  64. prefect/settings/models/cloud.py +4 -3
  65. prefect/settings/models/deployments.py +4 -3
  66. prefect/settings/models/experiments.py +4 -3
  67. prefect/settings/models/flows.py +4 -3
  68. prefect/settings/models/internal.py +4 -3
  69. prefect/settings/models/logging.py +8 -6
  70. prefect/settings/models/results.py +4 -3
  71. prefect/settings/models/root.py +11 -16
  72. prefect/settings/models/runner.py +8 -5
  73. prefect/settings/models/server/api.py +6 -3
  74. prefect/settings/models/server/database.py +120 -25
  75. prefect/settings/models/server/deployments.py +4 -3
  76. prefect/settings/models/server/ephemeral.py +7 -4
  77. prefect/settings/models/server/events.py +6 -3
  78. prefect/settings/models/server/flow_run_graph.py +4 -3
  79. prefect/settings/models/server/root.py +4 -3
  80. prefect/settings/models/server/services.py +15 -12
  81. prefect/settings/models/server/tasks.py +7 -4
  82. prefect/settings/models/server/ui.py +4 -3
  83. prefect/settings/models/tasks.py +10 -5
  84. prefect/settings/models/testing.py +4 -3
  85. prefect/settings/models/worker.py +7 -4
  86. prefect/settings/profiles.py +13 -12
  87. prefect/settings/sources.py +20 -19
  88. prefect/states.py +17 -13
  89. prefect/task_engine.py +43 -33
  90. prefect/task_runners.py +35 -23
  91. prefect/task_runs.py +20 -11
  92. prefect/task_worker.py +12 -7
  93. prefect/tasks.py +67 -25
  94. prefect/telemetry/bootstrap.py +4 -1
  95. prefect/telemetry/run_telemetry.py +15 -13
  96. prefect/transactions.py +3 -3
  97. prefect/types/__init__.py +9 -6
  98. prefect/types/_datetime.py +19 -0
  99. prefect/utilities/_deprecated.py +38 -0
  100. prefect/utilities/engine.py +11 -4
  101. prefect/utilities/filesystem.py +2 -2
  102. prefect/utilities/generics.py +1 -1
  103. prefect/utilities/pydantic.py +21 -36
  104. prefect/workers/base.py +52 -30
  105. prefect/workers/process.py +20 -15
  106. prefect/workers/server.py +4 -5
  107. {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/METADATA +2 -2
  108. {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/RECORD +111 -108
  109. {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/LICENSE +0 -0
  110. {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/WHEEL +0 -0
  111. {prefect_client-3.1.12.dist-info → prefect_client-3.1.14.dist-info}/top_level.txt +0 -0
@@ -60,11 +60,15 @@ async def receiver_flow():
60
60
  ```
61
61
  """
62
62
 
63
+ from __future__ import annotations
64
+
65
+ import inspect
63
66
  from inspect import isclass
64
67
  from typing import (
65
68
  TYPE_CHECKING,
66
69
  Any,
67
70
  ClassVar,
71
+ Coroutine,
68
72
  Dict,
69
73
  Generic,
70
74
  Literal,
@@ -81,6 +85,7 @@ from uuid import UUID, uuid4
81
85
  import anyio
82
86
  import pydantic
83
87
  from pydantic import ConfigDict
88
+ from typing_extensions import Self
84
89
 
85
90
  from prefect.input.actions import (
86
91
  create_flow_run_input,
@@ -144,7 +149,7 @@ class RunInputMetadata(pydantic.BaseModel):
144
149
  receiver: UUID
145
150
 
146
151
 
147
- class RunInput(pydantic.BaseModel):
152
+ class BaseRunInput(pydantic.BaseModel):
148
153
  model_config: ClassVar[ConfigDict] = ConfigDict(extra="forbid")
149
154
 
150
155
  _description: Optional[str] = pydantic.PrivateAttr(default=None)
@@ -172,23 +177,29 @@ class RunInput(pydantic.BaseModel):
172
177
  if is_v2_model(cls):
173
178
  schema = create_v2_schema(cls.__name__, model_base=cls)
174
179
  else:
175
- schema = cls.schema(by_alias=True)
180
+ schema = cls.model_json_schema(by_alias=True)
176
181
 
177
- await create_flow_run_input(
182
+ coro = create_flow_run_input(
178
183
  key=keyset["schema"], value=schema, flow_run_id=flow_run_id
179
184
  )
185
+ if TYPE_CHECKING:
186
+ assert inspect.iscoroutine(coro)
187
+ await coro
180
188
 
181
189
  description = cls._description if isinstance(cls._description, str) else None
182
190
  if description:
183
- await create_flow_run_input(
191
+ coro = create_flow_run_input(
184
192
  key=keyset["description"],
185
193
  value=description,
186
194
  flow_run_id=flow_run_id,
187
195
  )
196
+ if TYPE_CHECKING:
197
+ assert inspect.iscoroutine(coro)
198
+ await coro
188
199
 
189
200
  @classmethod
190
201
  @sync_compatible
191
- async def load(cls, keyset: Keyset, flow_run_id: Optional[UUID] = None):
202
+ async def load(cls, keyset: Keyset, flow_run_id: Optional[UUID] = None) -> Self:
192
203
  """
193
204
  Load the run input response from the given key.
194
205
 
@@ -208,7 +219,7 @@ class RunInput(pydantic.BaseModel):
208
219
  return instance
209
220
 
210
221
  @classmethod
211
- def load_from_flow_run_input(cls, flow_run_input: "FlowRunInput"):
222
+ def load_from_flow_run_input(cls, flow_run_input: "FlowRunInput") -> Self:
212
223
  """
213
224
  Load the run input from a FlowRunInput object.
214
225
 
@@ -284,6 +295,8 @@ class RunInput(pydantic.BaseModel):
284
295
  key_prefix=key_prefix,
285
296
  )
286
297
 
298
+
299
+ class RunInput(BaseRunInput):
287
300
  @classmethod
288
301
  def receive(
289
302
  cls,
@@ -293,7 +306,7 @@ class RunInput(pydantic.BaseModel):
293
306
  exclude_keys: Optional[Set[str]] = None,
294
307
  key_prefix: Optional[str] = None,
295
308
  flow_run_id: Optional[UUID] = None,
296
- ):
309
+ ) -> GetInputHandler[Self]:
297
310
  if key_prefix is None:
298
311
  key_prefix = f"{cls.__name__.lower()}-auto"
299
312
 
@@ -322,12 +335,12 @@ class RunInput(pydantic.BaseModel):
322
335
  return type(f"{model_cls.__name__}RunInput", (RunInput, model_cls), {}) # type: ignore
323
336
 
324
337
 
325
- class AutomaticRunInput(RunInput, Generic[T]):
338
+ class AutomaticRunInput(BaseRunInput, Generic[T]):
326
339
  value: T
327
340
 
328
341
  @classmethod
329
342
  @sync_compatible
330
- async def load(cls, keyset: Keyset, flow_run_id: Optional[UUID] = None) -> T:
343
+ async def load(cls, keyset: Keyset, flow_run_id: Optional[UUID] = None) -> Self:
331
344
  """
332
345
  Load the run input response from the given key.
333
346
 
@@ -335,7 +348,10 @@ class AutomaticRunInput(RunInput, Generic[T]):
335
348
  - keyset (Keyset): the keyset to load the input for
336
349
  - flow_run_id (UUID, optional): the flow run ID to load the input for
337
350
  """
338
- instance = await super().load(keyset, flow_run_id=flow_run_id)
351
+ instance_coro = super().load(keyset, flow_run_id=flow_run_id)
352
+ if TYPE_CHECKING:
353
+ assert inspect.iscoroutine(instance_coro)
354
+ instance = await instance_coro
339
355
  return instance.value
340
356
 
341
357
  @classmethod
@@ -370,17 +386,34 @@ class AutomaticRunInput(RunInput, Generic[T]):
370
386
 
371
387
  # Creating a new Pydantic model class dynamically with the name based
372
388
  # on the type prefix.
373
- new_cls: Type["AutomaticRunInput"] = pydantic.create_model(
389
+ new_cls: Type["AutomaticRunInput[T]"] = pydantic.create_model(
374
390
  class_name, **fields, __base__=AutomaticRunInput
375
391
  )
376
392
  return new_cls
377
393
 
378
394
  @classmethod
379
- def receive(cls, *args, **kwargs):
380
- if kwargs.get("key_prefix") is None:
381
- kwargs["key_prefix"] = f"{cls.__name__.lower()}-auto"
395
+ def receive(
396
+ cls,
397
+ timeout: Optional[float] = 3600,
398
+ poll_interval: float = 10,
399
+ raise_timeout_error: bool = False,
400
+ exclude_keys: Optional[Set[str]] = None,
401
+ key_prefix: Optional[str] = None,
402
+ flow_run_id: Optional[UUID] = None,
403
+ with_metadata: bool = False,
404
+ ) -> GetAutomaticInputHandler[T]:
405
+ key_prefix = key_prefix or f"{cls.__name__.lower()}-auto"
382
406
 
383
- return GetAutomaticInputHandler(run_input_cls=cls, *args, **kwargs)
407
+ return GetAutomaticInputHandler(
408
+ run_input_cls=cls,
409
+ key_prefix=key_prefix,
410
+ timeout=timeout,
411
+ poll_interval=poll_interval,
412
+ raise_timeout_error=raise_timeout_error,
413
+ exclude_keys=exclude_keys,
414
+ flow_run_id=flow_run_id,
415
+ with_metadata=with_metadata,
416
+ )
384
417
 
385
418
 
386
419
  def run_input_subclass_from_type(
@@ -409,24 +442,24 @@ class GetInputHandler(Generic[R]):
409
442
  self,
410
443
  run_input_cls: Type[R],
411
444
  key_prefix: str,
412
- timeout: Optional[float] = 3600,
445
+ timeout: float | None = 3600,
413
446
  poll_interval: float = 10,
414
447
  raise_timeout_error: bool = False,
415
448
  exclude_keys: Optional[Set[str]] = None,
416
449
  flow_run_id: Optional[UUID] = None,
417
450
  ):
418
- self.run_input_cls = run_input_cls
419
- self.key_prefix = key_prefix
420
- self.timeout = timeout
421
- self.poll_interval = poll_interval
422
- self.exclude_keys = set()
423
- self.raise_timeout_error = raise_timeout_error
424
- self.flow_run_id = ensure_flow_run_id(flow_run_id)
451
+ self.run_input_cls: Type[R] = run_input_cls
452
+ self.key_prefix: str = key_prefix
453
+ self.timeout: float | None = timeout
454
+ self.poll_interval: float = poll_interval
455
+ self.exclude_keys: set[str] = set()
456
+ self.raise_timeout_error: bool = raise_timeout_error
457
+ self.flow_run_id: UUID = ensure_flow_run_id(flow_run_id)
425
458
 
426
459
  if exclude_keys is not None:
427
460
  self.exclude_keys.update(exclude_keys)
428
461
 
429
- def __iter__(self):
462
+ def __iter__(self) -> Self:
430
463
  return self
431
464
 
432
465
  def __next__(self) -> R:
@@ -437,24 +470,31 @@ class GetInputHandler(Generic[R]):
437
470
  raise
438
471
  raise StopIteration
439
472
 
440
- def __aiter__(self):
473
+ def __aiter__(self) -> Self:
441
474
  return self
442
475
 
443
476
  async def __anext__(self) -> R:
444
477
  try:
445
- return await self.next()
478
+ coro = self.next()
479
+ if TYPE_CHECKING:
480
+ assert inspect.iscoroutine(coro)
481
+ return await coro
446
482
  except TimeoutError:
447
483
  if self.raise_timeout_error:
448
484
  raise
449
485
  raise StopAsyncIteration
450
486
 
451
- async def filter_for_inputs(self):
452
- flow_run_inputs = await filter_flow_run_input(
487
+ async def filter_for_inputs(self) -> list["FlowRunInput"]:
488
+ flow_run_inputs_coro = filter_flow_run_input(
453
489
  key_prefix=self.key_prefix,
454
490
  limit=1,
455
491
  exclude_keys=self.exclude_keys,
456
492
  flow_run_id=self.flow_run_id,
457
493
  )
494
+ if TYPE_CHECKING:
495
+ assert inspect.iscoroutine(flow_run_inputs_coro)
496
+
497
+ flow_run_inputs = await flow_run_inputs_coro
458
498
 
459
499
  if flow_run_inputs:
460
500
  self.exclude_keys.add(*[i.key for i in flow_run_inputs])
@@ -478,22 +518,91 @@ class GetInputHandler(Generic[R]):
478
518
  return self.to_instance(flow_run_inputs[0])
479
519
 
480
520
 
481
- class GetAutomaticInputHandler(GetInputHandler, Generic[T]):
482
- def __init__(self, *args, **kwargs):
483
- self.with_metadata = kwargs.pop("with_metadata", False)
484
- super().__init__(*args, **kwargs)
521
+ class GetAutomaticInputHandler(Generic[T]):
522
+ def __init__(
523
+ self,
524
+ run_input_cls: Type[AutomaticRunInput[T]],
525
+ key_prefix: str,
526
+ timeout: float | None = 3600,
527
+ poll_interval: float = 10,
528
+ raise_timeout_error: bool = False,
529
+ exclude_keys: Optional[Set[str]] = None,
530
+ flow_run_id: Optional[UUID] = None,
531
+ with_metadata: bool = False,
532
+ ):
533
+ self.run_input_cls: Type[AutomaticRunInput[T]] = run_input_cls
534
+ self.key_prefix: str = key_prefix
535
+ self.timeout: float | None = timeout
536
+ self.poll_interval: float = poll_interval
537
+ self.exclude_keys: set[str] = set()
538
+ self.raise_timeout_error: bool = raise_timeout_error
539
+ self.flow_run_id: UUID = ensure_flow_run_id(flow_run_id)
540
+ self.with_metadata = with_metadata
485
541
 
486
- def __next__(self) -> T:
487
- return cast(T, super().__next__())
542
+ if exclude_keys is not None:
543
+ self.exclude_keys.update(exclude_keys)
488
544
 
489
- async def __anext__(self) -> T:
490
- return cast(T, await super().__anext__())
545
+ def __iter__(self) -> Self:
546
+ return self
547
+
548
+ def __next__(self) -> T | AutomaticRunInput[T]:
549
+ try:
550
+ not_coro = self.next()
551
+ if TYPE_CHECKING:
552
+ assert not isinstance(not_coro, Coroutine)
553
+ return not_coro
554
+ except TimeoutError:
555
+ if self.raise_timeout_error:
556
+ raise
557
+ raise StopIteration
558
+
559
+ def __aiter__(self) -> Self:
560
+ return self
561
+
562
+ async def __anext__(self) -> Union[T, AutomaticRunInput[T]]:
563
+ try:
564
+ coro = self.next()
565
+ if TYPE_CHECKING:
566
+ assert inspect.iscoroutine(coro)
567
+ return cast(Union[T, AutomaticRunInput[T]], await coro)
568
+ except TimeoutError:
569
+ if self.raise_timeout_error:
570
+ raise
571
+ raise StopAsyncIteration
572
+
573
+ async def filter_for_inputs(self) -> list["FlowRunInput"]:
574
+ flow_run_inputs_coro = filter_flow_run_input(
575
+ key_prefix=self.key_prefix,
576
+ limit=1,
577
+ exclude_keys=self.exclude_keys,
578
+ flow_run_id=self.flow_run_id,
579
+ )
580
+ if TYPE_CHECKING:
581
+ assert inspect.iscoroutine(flow_run_inputs_coro)
582
+
583
+ flow_run_inputs = await flow_run_inputs_coro
584
+
585
+ if flow_run_inputs:
586
+ self.exclude_keys.add(*[i.key for i in flow_run_inputs])
587
+
588
+ return flow_run_inputs
491
589
 
492
590
  @sync_compatible
493
- async def next(self) -> T:
494
- return cast(T, await super().next())
591
+ async def next(self) -> Union[T, AutomaticRunInput[T]]:
592
+ flow_run_inputs = await self.filter_for_inputs()
593
+ if flow_run_inputs:
594
+ return self.to_instance(flow_run_inputs[0])
495
595
 
496
- def to_instance(self, flow_run_input: "FlowRunInput") -> T:
596
+ with anyio.fail_after(self.timeout):
597
+ while True:
598
+ await anyio.sleep(self.poll_interval)
599
+ flow_run_inputs = await self.filter_for_inputs()
600
+ if flow_run_inputs:
601
+ return self.to_instance(flow_run_inputs[0])
602
+
603
+ def to_instance(
604
+ self, flow_run_input: "FlowRunInput"
605
+ ) -> Union[T, AutomaticRunInput[T]]:
497
606
  run_input = self.run_input_cls.load_from_flow_run_input(flow_run_input)
498
607
 
499
608
  if self.with_metadata:
@@ -503,14 +612,15 @@ class GetAutomaticInputHandler(GetInputHandler, Generic[T]):
503
612
 
504
613
  async def _send_input(
505
614
  flow_run_id: UUID,
506
- run_input: Any,
615
+ run_input: RunInput | pydantic.BaseModel,
507
616
  sender: Optional[str] = None,
508
617
  key_prefix: Optional[str] = None,
509
618
  ):
619
+ _run_input: Union[RunInput, AutomaticRunInput[Any]]
510
620
  if isinstance(run_input, RunInput):
511
- _run_input: RunInput = run_input
621
+ _run_input = run_input
512
622
  else:
513
- input_cls: Type[AutomaticRunInput] = run_input_subclass_from_type(
623
+ input_cls: Type[AutomaticRunInput[Any]] = run_input_subclass_from_type(
514
624
  type(run_input)
515
625
  )
516
626
  _run_input = input_cls(value=run_input)
@@ -520,9 +630,13 @@ async def _send_input(
520
630
 
521
631
  key = f"{key_prefix}-{uuid4()}"
522
632
 
523
- await create_flow_run_input_from_model(
633
+ coro = create_flow_run_input_from_model(
524
634
  key=key, flow_run_id=flow_run_id, model_instance=_run_input, sender=sender
525
635
  )
636
+ if TYPE_CHECKING:
637
+ assert inspect.iscoroutine(coro)
638
+
639
+ await coro
526
640
 
527
641
 
528
642
  @sync_compatible
@@ -6,7 +6,7 @@ import string
6
6
  import warnings
7
7
  from functools import partial
8
8
  from pathlib import Path
9
- from typing import Any, Dict, Optional
9
+ from typing import Any, Callable, Dict, Optional
10
10
 
11
11
  import yaml
12
12
 
@@ -24,10 +24,10 @@ DEFAULT_LOGGING_SETTINGS_PATH = Path(__file__).parent / "logging.yml"
24
24
  PROCESS_LOGGING_CONFIG: Optional[Dict[str, Any]] = None
25
25
 
26
26
  # Regex call to replace non-alphanumeric characters to '_' to create a valid env var
27
- to_envvar = partial(re.sub, re.compile(r"[^0-9a-zA-Z]+"), "_")
27
+ to_envvar: Callable[[str], str] = partial(re.sub, re.compile(r"[^0-9a-zA-Z]+"), "_")
28
28
 
29
29
 
30
- def load_logging_config(path: Path) -> dict:
30
+ def load_logging_config(path: Path) -> dict[str, Any]:
31
31
  """
32
32
  Loads logging configuration from a path allowing override from the environment
33
33
  """
@@ -94,12 +94,9 @@ def setup_logging(incremental: Optional[bool] = None) -> dict[str, Any]:
94
94
 
95
95
  for logger_name in PREFECT_LOGGING_EXTRA_LOGGERS.value():
96
96
  logger = logging.getLogger(logger_name)
97
- for handler in extra_config.handlers:
98
- if not config["incremental"]:
97
+ if not config["incremental"]:
98
+ for handler in extra_config.handlers:
99
99
  logger.addHandler(handler)
100
- if logger.level == logging.NOTSET:
101
- logger.setLevel(extra_config.level)
102
- logger.propagate = extra_config.propagate
103
100
 
104
101
  PROCESS_LOGGING_CONFIG = config
105
102
 
@@ -5,7 +5,7 @@ from prefect.utilities.collections import visit_collection
5
5
  from prefect.utilities.names import obfuscate
6
6
 
7
7
 
8
- def redact_substr(obj: Any, substr: str):
8
+ def redact_substr(obj: Any, substr: str) -> Any:
9
9
  """
10
10
  Redact a string from a potentially nested object.
11
11
 
@@ -17,7 +17,7 @@ def redact_substr(obj: Any, substr: str):
17
17
  Any: The object with the API key redacted.
18
18
  """
19
19
 
20
- def redact_item(item):
20
+ def redact_item(item: Any) -> Any:
21
21
  if isinstance(item, str):
22
22
  return item.replace(substr, obfuscate(substr))
23
23
  return item
@@ -1,8 +1,10 @@
1
+ from __future__ import annotations
2
+
1
3
  import logging.handlers
2
4
  import sys
3
5
  import traceback
4
6
  from types import TracebackType
5
- from typing import Optional, Tuple, Type, Union
7
+ from typing import Any, Literal, Optional, Tuple, Type, Union
6
8
 
7
9
  import orjson
8
10
 
@@ -14,7 +16,7 @@ ExceptionInfoType = Union[
14
16
  ]
15
17
 
16
18
 
17
- def format_exception_info(exc_info: ExceptionInfoType) -> dict:
19
+ def format_exception_info(exc_info: ExceptionInfoType) -> dict[str, Any]:
18
20
  # if sys.exc_info() returned a (None, None, None) tuple,
19
21
  # then there's nothing to format
20
22
  if exc_info[0] is None:
@@ -40,13 +42,15 @@ class JsonFormatter(logging.Formatter):
40
42
  newlines.
41
43
  """
42
44
 
43
- def __init__(self, fmt, dmft, style) -> None: # noqa
45
+ def __init__(
46
+ self, fmt: Literal["pretty", "default"], dmft: str, style: str
47
+ ) -> None: # noqa
44
48
  super().__init__()
45
49
 
46
50
  if fmt not in ["pretty", "default"]:
47
51
  raise ValueError("Format must be either 'pretty' or 'default'.")
48
52
 
49
- self.serializer = JSONSerializer(
53
+ self.serializer: JSONSerializer = JSONSerializer(
50
54
  jsonlib="orjson",
51
55
  dumps_kwargs={"option": orjson.OPT_INDENT_2} if fmt == "pretty" else {},
52
56
  )
@@ -72,13 +76,13 @@ class JsonFormatter(logging.Formatter):
72
76
  class PrefectFormatter(logging.Formatter):
73
77
  def __init__(
74
78
  self,
75
- format=None,
76
- datefmt=None,
77
- style="%",
78
- validate=True,
79
+ format: str | None = None,
80
+ datefmt: str | None = None,
81
+ style: str = "%",
82
+ validate: bool = True,
79
83
  *,
80
- defaults=None,
81
- task_run_fmt: Optional[str] = None,
84
+ defaults: dict[str, Any] | None = None,
85
+ task_run_fmt: str | None = None,
82
86
  flow_run_fmt: Optional[str] = None,
83
87
  ) -> None:
84
88
  """
@@ -118,7 +122,7 @@ class PrefectFormatter(logging.Formatter):
118
122
  self._flow_run_style.validate()
119
123
  self._task_run_style.validate()
120
124
 
121
- def formatMessage(self, record: logging.LogRecord):
125
+ def formatMessage(self, record: logging.LogRecord) -> str:
122
126
  if record.name == "prefect.flow_runs":
123
127
  style = self._flow_run_style
124
128
  elif record.name == "prefect.task_runs":
@@ -1,3 +1,5 @@
1
+ from __future__ import annotations
2
+
1
3
  import json
2
4
  import logging
3
5
  import sys
@@ -6,7 +8,7 @@ import traceback
6
8
  import uuid
7
9
  import warnings
8
10
  from contextlib import asynccontextmanager
9
- from typing import Any, Dict, List, Optional, Type, Union
11
+ from typing import TYPE_CHECKING, Any, Dict, List, TextIO, Type
10
12
 
11
13
  import pendulum
12
14
  from rich.console import Console
@@ -34,6 +36,14 @@ from prefect.settings import (
34
36
  PREFECT_LOGGING_TO_API_WHEN_MISSING_FLOW,
35
37
  )
36
38
 
39
+ if sys.version_info >= (3, 12):
40
+ StreamHandler = logging.StreamHandler[TextIO]
41
+ else:
42
+ if TYPE_CHECKING:
43
+ StreamHandler = logging.StreamHandler[TextIO]
44
+ else:
45
+ StreamHandler = logging.StreamHandler
46
+
37
47
 
38
48
  class APILogWorker(BatchedQueueService[Dict[str, Any]]):
39
49
  @property
@@ -90,7 +100,7 @@ class APILogHandler(logging.Handler):
90
100
  """
91
101
 
92
102
  @classmethod
93
- def flush(cls):
103
+ def flush(cls) -> None:
94
104
  """
95
105
  Tell the `APILogWorker` to send any currently enqueued logs and block until
96
106
  completion.
@@ -107,8 +117,8 @@ class APILogHandler(logging.Handler):
107
117
  )
108
118
 
109
119
  # Not ideal, but this method is called by the stdlib and cannot return a
110
- # coroutine so we just schedule the drain in the global loop thread and continue
111
- from_sync.call_soon_in_loop_thread(create_call(APILogWorker.drain_all))
120
+ # coroutine so we just schedule the drain in a new thread and continue
121
+ from_sync.call_soon_in_new_thread(create_call(APILogWorker.drain_all))
112
122
  return None
113
123
  else:
114
124
  # We set a timeout of 5s because we don't want to block forever if the worker
@@ -118,7 +128,7 @@ class APILogHandler(logging.Handler):
118
128
  return APILogWorker.drain_all(timeout=5)
119
129
 
120
130
  @classmethod
121
- async def aflush(cls):
131
+ async def aflush(cls) -> bool:
122
132
  """
123
133
  Tell the `APILogWorker` to send any currently enqueued logs and block until
124
134
  completion.
@@ -126,7 +136,7 @@ class APILogHandler(logging.Handler):
126
136
 
127
137
  return await APILogWorker.drain_all()
128
138
 
129
- def emit(self, record: logging.LogRecord):
139
+ def emit(self, record: logging.LogRecord) -> None:
130
140
  """
131
141
  Send a log to the `APILogWorker`
132
142
  """
@@ -239,7 +249,7 @@ class APILogHandler(logging.Handler):
239
249
 
240
250
 
241
251
  class WorkerAPILogHandler(APILogHandler):
242
- def emit(self, record: logging.LogRecord):
252
+ def emit(self, record: logging.LogRecord) -> None:
243
253
  # Open-source API servers do not currently support worker logs, and
244
254
  # worker logs only have an associated worker ID when connected to Cloud,
245
255
  # so we won't send worker logs to the API unless they have a worker ID.
@@ -278,13 +288,13 @@ class WorkerAPILogHandler(APILogHandler):
278
288
  return log
279
289
 
280
290
 
281
- class PrefectConsoleHandler(logging.StreamHandler):
291
+ class PrefectConsoleHandler(StreamHandler):
282
292
  def __init__(
283
293
  self,
284
- stream=None,
285
- highlighter: Highlighter = PrefectConsoleHighlighter,
286
- styles: Optional[Dict[str, str]] = None,
287
- level: Union[int, str] = logging.NOTSET,
294
+ stream: TextIO | None = None,
295
+ highlighter: type[Highlighter] = PrefectConsoleHighlighter,
296
+ styles: dict[str, str] | None = None,
297
+ level: int | str = logging.NOTSET,
288
298
  ):
289
299
  """
290
300
  The default console handler for Prefect, which highlights log levels,
@@ -307,14 +317,14 @@ class PrefectConsoleHandler(logging.StreamHandler):
307
317
  theme = Theme(inherit=False)
308
318
 
309
319
  self.level = level
310
- self.console = Console(
320
+ self.console: Console = Console(
311
321
  highlighter=highlighter,
312
322
  theme=theme,
313
323
  file=self.stream,
314
324
  markup=markup_console,
315
325
  )
316
326
 
317
- def emit(self, record: logging.LogRecord):
327
+ def emit(self, record: logging.LogRecord) -> None:
318
328
  try:
319
329
  message = self.format(record)
320
330
  self.console.print(message, soft_wrap=True)
@@ -7,7 +7,7 @@ class LevelHighlighter(RegexHighlighter):
7
7
  """Apply style to log levels."""
8
8
 
9
9
  base_style = "level."
10
- highlights = [
10
+ highlights: list[str] = [
11
11
  r"(?P<debug_level>DEBUG)",
12
12
  r"(?P<info_level>INFO)",
13
13
  r"(?P<warning_level>WARNING)",
@@ -20,7 +20,7 @@ class UrlHighlighter(RegexHighlighter):
20
20
  """Apply style to urls."""
21
21
 
22
22
  base_style = "url."
23
- highlights = [
23
+ highlights: list[str] = [
24
24
  r"(?P<web_url>(https|http|ws|wss):\/\/[0-9a-zA-Z\$\-\_\+\!`\(\)\,\.\?\/\;\:\&\=\%\#]*)",
25
25
  r"(?P<local_url>(file):\/\/[0-9a-zA-Z\$\-\_\+\!`\(\)\,\.\?\/\;\:\&\=\%\#]*)",
26
26
  ]
@@ -30,7 +30,7 @@ class NameHighlighter(RegexHighlighter):
30
30
  """Apply style to names."""
31
31
 
32
32
  base_style = "name."
33
- highlights = [
33
+ highlights: list[str] = [
34
34
  # ?i means case insensitive
35
35
  # ?<= means find string right after the words: flow run
36
36
  r"(?i)(?P<flow_run_name>(?<=flow run) \'(.*?)\')",
@@ -44,7 +44,7 @@ class StateHighlighter(RegexHighlighter):
44
44
  """Apply style to states."""
45
45
 
46
46
  base_style = "state."
47
- highlights = [
47
+ highlights: list[str] = [
48
48
  rf"(?P<{state.lower()}_state>{state.title()})" for state in StateType
49
49
  ] + [
50
50
  r"(?P<cached_state>Cached)(?=\(type=COMPLETED\))" # Highlight only "Cached"
@@ -55,7 +55,7 @@ class PrefectConsoleHighlighter(RegexHighlighter):
55
55
  """Applies style from multiple highlighters."""
56
56
 
57
57
  base_style = "log."
58
- highlights = (
58
+ highlights: list[str] = (
59
59
  LevelHighlighter.highlights
60
60
  + UrlHighlighter.highlights
61
61
  + NameHighlighter.highlights