prefect-client 3.4.6.dev1__py3-none-any.whl → 3.4.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. prefect/AGENTS.md +28 -0
  2. prefect/_build_info.py +3 -3
  3. prefect/_internal/websockets.py +109 -0
  4. prefect/artifacts.py +51 -2
  5. prefect/assets/core.py +2 -2
  6. prefect/blocks/core.py +82 -11
  7. prefect/client/cloud.py +11 -1
  8. prefect/client/orchestration/__init__.py +21 -15
  9. prefect/client/orchestration/_deployments/client.py +139 -4
  10. prefect/client/orchestration/_flows/client.py +4 -4
  11. prefect/client/schemas/__init__.py +5 -2
  12. prefect/client/schemas/actions.py +1 -0
  13. prefect/client/schemas/filters.py +3 -0
  14. prefect/client/schemas/objects.py +27 -10
  15. prefect/context.py +16 -7
  16. prefect/events/clients.py +2 -76
  17. prefect/events/schemas/automations.py +4 -0
  18. prefect/events/schemas/labelling.py +2 -0
  19. prefect/flow_engine.py +6 -3
  20. prefect/flows.py +64 -45
  21. prefect/futures.py +25 -4
  22. prefect/locking/filesystem.py +1 -1
  23. prefect/logging/clients.py +347 -0
  24. prefect/runner/runner.py +1 -1
  25. prefect/runner/submit.py +10 -4
  26. prefect/serializers.py +8 -3
  27. prefect/server/api/logs.py +64 -9
  28. prefect/server/api/server.py +2 -0
  29. prefect/server/api/templates.py +8 -2
  30. prefect/settings/context.py +17 -14
  31. prefect/settings/models/server/logs.py +28 -0
  32. prefect/settings/models/server/root.py +5 -0
  33. prefect/settings/models/server/services.py +26 -0
  34. prefect/task_engine.py +73 -43
  35. prefect/task_runners.py +10 -10
  36. prefect/tasks.py +52 -9
  37. prefect/types/__init__.py +2 -0
  38. prefect/types/names.py +50 -0
  39. prefect/utilities/_ast.py +2 -2
  40. prefect/utilities/callables.py +1 -1
  41. prefect/utilities/collections.py +6 -6
  42. prefect/utilities/engine.py +67 -72
  43. prefect/utilities/pydantic.py +19 -1
  44. prefect/workers/base.py +2 -0
  45. {prefect_client-3.4.6.dev1.dist-info → prefect_client-3.4.7.dist-info}/METADATA +1 -1
  46. {prefect_client-3.4.6.dev1.dist-info → prefect_client-3.4.7.dist-info}/RECORD +48 -44
  47. {prefect_client-3.4.6.dev1.dist-info → prefect_client-3.4.7.dist-info}/WHEEL +0 -0
  48. {prefect_client-3.4.6.dev1.dist-info → prefect_client-3.4.7.dist-info}/licenses/LICENSE +0 -0
@@ -2,14 +2,21 @@
2
2
  Routes for interacting with log objects.
3
3
  """
4
4
 
5
- from typing import List
5
+ from typing import Optional, Sequence
6
6
 
7
- from fastapi import Body, Depends, status
7
+ from fastapi import Body, Depends, WebSocket, status
8
+ from pydantic import TypeAdapter
9
+ from starlette.status import WS_1002_PROTOCOL_ERROR
8
10
 
9
11
  import prefect.server.api.dependencies as dependencies
10
12
  import prefect.server.models as models
11
- import prefect.server.schemas as schemas
12
13
  from prefect.server.database import PrefectDBInterface, provide_database_interface
14
+ from prefect.server.logs import stream
15
+ from prefect.server.schemas.actions import LogCreate
16
+ from prefect.server.schemas.core import Log
17
+ from prefect.server.schemas.filters import LogFilter
18
+ from prefect.server.schemas.sorting import LogSort
19
+ from prefect.server.utilities import subscriptions
13
20
  from prefect.server.utilities.server import PrefectRouter
14
21
 
15
22
  router: PrefectRouter = PrefectRouter(prefix="/logs", tags=["Logs"])
@@ -17,7 +24,7 @@ router: PrefectRouter = PrefectRouter(prefix="/logs", tags=["Logs"])
17
24
 
18
25
  @router.post("/", status_code=status.HTTP_201_CREATED)
19
26
  async def create_logs(
20
- logs: List[schemas.actions.LogCreate],
27
+ logs: Sequence[LogCreate],
21
28
  db: PrefectDBInterface = Depends(provide_database_interface),
22
29
  ) -> None:
23
30
  """
@@ -30,18 +37,66 @@ async def create_logs(
30
37
  await models.logs.create_logs(session=session, logs=batch)
31
38
 
32
39
 
40
+ logs_adapter: TypeAdapter[Sequence[Log]] = TypeAdapter(Sequence[Log])
41
+
42
+
33
43
  @router.post("/filter")
34
44
  async def read_logs(
35
45
  limit: int = dependencies.LimitBody(),
36
46
  offset: int = Body(0, ge=0),
37
- logs: schemas.filters.LogFilter = None,
38
- sort: schemas.sorting.LogSort = Body(schemas.sorting.LogSort.TIMESTAMP_ASC),
47
+ logs: Optional[LogFilter] = None,
48
+ sort: LogSort = Body(LogSort.TIMESTAMP_ASC),
39
49
  db: PrefectDBInterface = Depends(provide_database_interface),
40
- ) -> List[schemas.core.Log]:
50
+ ) -> Sequence[Log]:
41
51
  """
42
52
  Query for logs.
43
53
  """
44
54
  async with db.session_context() as session:
45
- return await models.logs.read_logs(
46
- session=session, log_filter=logs, offset=offset, limit=limit, sort=sort
55
+ return logs_adapter.validate_python(
56
+ await models.logs.read_logs(
57
+ session=session, log_filter=logs, offset=offset, limit=limit, sort=sort
58
+ )
47
59
  )
60
+
61
+
62
+ @router.websocket("/out")
63
+ async def stream_logs_out(websocket: WebSocket) -> None:
64
+ """Serve a WebSocket to stream live logs"""
65
+ websocket = await subscriptions.accept_prefect_socket(websocket)
66
+ if not websocket:
67
+ return
68
+
69
+ try:
70
+ # After authentication, the next message is expected to be a filter message, any
71
+ # other type of message will close the connection.
72
+ message = await websocket.receive_json()
73
+
74
+ if message["type"] != "filter":
75
+ return await websocket.close(
76
+ WS_1002_PROTOCOL_ERROR, reason="Expected 'filter' message"
77
+ )
78
+
79
+ try:
80
+ filter = LogFilter.model_validate(message["filter"])
81
+ except Exception as e:
82
+ return await websocket.close(
83
+ WS_1002_PROTOCOL_ERROR, reason=f"Invalid filter: {e}"
84
+ )
85
+
86
+ # No backfill support for logs - only live streaming
87
+ # Subscribe to the ongoing log stream
88
+ async with stream.logs(filter) as log_stream:
89
+ async for log in log_stream:
90
+ if not log:
91
+ if await subscriptions.still_connected(websocket):
92
+ continue
93
+ break
94
+
95
+ await websocket.send_json(
96
+ {"type": "log", "log": log.model_dump(mode="json")}
97
+ )
98
+
99
+ except subscriptions.NORMAL_DISCONNECT_EXCEPTIONS: # pragma: no cover
100
+ pass # it's fine if a client disconnects either normally or abnormally
101
+
102
+ return None
@@ -922,6 +922,8 @@ class SubprocessASGIServer:
922
922
  self.server_process.wait(timeout=5)
923
923
  except subprocess.TimeoutExpired:
924
924
  self.server_process.kill()
925
+ # Ensure the process is reaped to avoid ResourceWarning
926
+ self.server_process.wait()
925
927
  finally:
926
928
  self.server_process = None
927
929
  if self.port in self._instances:
@@ -8,11 +8,17 @@ from prefect.server.utilities.user_templates import (
8
8
  validate_user_template,
9
9
  )
10
10
 
11
- router: PrefectRouter = PrefectRouter(prefix="/templates", tags=["Automations"])
11
+ router: PrefectRouter = PrefectRouter(tags=["Automations"])
12
12
 
13
13
 
14
+ # deprecated and can be removed after the ui removes its dependency on it
15
+ # use /templates/validate instead
14
16
  @router.post(
15
- "/validate",
17
+ "/automations/templates/validate",
18
+ response_class=Response,
19
+ )
20
+ @router.post(
21
+ "/templates/validate",
16
22
  response_class=Response,
17
23
  )
18
24
  def validate_template(template: str = Body(default="")) -> Response:
@@ -33,20 +33,23 @@ def temporary_settings(
33
33
  See `Settings.copy_with_update` for details on different argument behavior.
34
34
 
35
35
  Examples:
36
- >>> from prefect.settings import PREFECT_API_URL
37
- >>>
38
- >>> with temporary_settings(updates={PREFECT_API_URL: "foo"}):
39
- >>> assert PREFECT_API_URL.value() == "foo"
40
- >>>
41
- >>> with temporary_settings(set_defaults={PREFECT_API_URL: "bar"}):
42
- >>> assert PREFECT_API_URL.value() == "foo"
43
- >>>
44
- >>> with temporary_settings(restore_defaults={PREFECT_API_URL}):
45
- >>> assert PREFECT_API_URL.value() is None
46
- >>>
47
- >>> with temporary_settings(set_defaults={PREFECT_API_URL: "bar"})
48
- >>> assert PREFECT_API_URL.value() == "bar"
49
- >>> assert PREFECT_API_URL.value() is None
36
+
37
+ ```python
38
+ from prefect.settings import PREFECT_API_URL
39
+
40
+ with temporary_settings(updates={PREFECT_API_URL: "foo"}):
41
+ assert PREFECT_API_URL.value() == "foo"
42
+
43
+ with temporary_settings(set_defaults={PREFECT_API_URL: "bar"}):
44
+ assert PREFECT_API_URL.value() == "foo"
45
+
46
+ with temporary_settings(restore_defaults={PREFECT_API_URL}):
47
+ assert PREFECT_API_URL.value() is None
48
+
49
+ with temporary_settings(set_defaults={PREFECT_API_URL: "bar"})
50
+ assert PREFECT_API_URL.value() == "bar"
51
+ assert PREFECT_API_URL.value() is None
52
+ ```
50
53
  """
51
54
  import prefect.context
52
55
 
@@ -0,0 +1,28 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import ClassVar
4
+
5
+ from pydantic import Field
6
+ from pydantic_settings import SettingsConfigDict
7
+
8
+ from prefect.settings.base import PrefectBaseSettings, build_settings_config
9
+
10
+
11
+ class ServerLogsSettings(PrefectBaseSettings):
12
+ """
13
+ Settings for controlling behavior of the logs subsystem
14
+ """
15
+
16
+ model_config: ClassVar[SettingsConfigDict] = build_settings_config(
17
+ ("server", "logs")
18
+ )
19
+
20
+ stream_out_enabled: bool = Field(
21
+ default=False,
22
+ description="Whether or not to stream logs out to the API via websockets.",
23
+ )
24
+
25
+ stream_publishing_enabled: bool = Field(
26
+ default=False,
27
+ description="Whether or not to publish logs to the streaming system.",
28
+ )
@@ -13,6 +13,7 @@ from .deployments import ServerDeploymentsSettings
13
13
  from .ephemeral import ServerEphemeralSettings
14
14
  from .events import ServerEventsSettings
15
15
  from .flow_run_graph import ServerFlowRunGraphSettings
16
+ from .logs import ServerLogsSettings
16
17
  from .services import ServerServicesSettings
17
18
  from .tasks import ServerTasksSettings
18
19
  from .ui import ServerUISettings
@@ -127,6 +128,10 @@ class ServerSettings(PrefectBaseSettings):
127
128
  default_factory=ServerFlowRunGraphSettings,
128
129
  description="Settings for controlling flow run graph behavior",
129
130
  )
131
+ logs: ServerLogsSettings = Field(
132
+ default_factory=ServerLogsSettings,
133
+ description="Settings for controlling server logs behavior",
134
+ )
130
135
  services: ServerServicesSettings = Field(
131
136
  default_factory=ServerServicesSettings,
132
137
  description="Settings for controlling server services behavior",
@@ -448,6 +448,32 @@ class ServerServicesTriggersSettings(ServicesBaseSetting):
448
448
  ),
449
449
  )
450
450
 
451
+ pg_notify_reconnect_interval_seconds: int = Field(
452
+ default=10,
453
+ description="""
454
+ The number of seconds to wait before reconnecting to the PostgreSQL NOTIFY/LISTEN
455
+ connection after an error. Only used when using PostgreSQL as the database.
456
+ Defaults to `10`.
457
+ """,
458
+ validation_alias=AliasChoices(
459
+ AliasPath("pg_notify_reconnect_interval_seconds"),
460
+ "prefect_server_services_triggers_pg_notify_reconnect_interval_seconds",
461
+ ),
462
+ )
463
+
464
+ pg_notify_heartbeat_interval_seconds: int = Field(
465
+ default=5,
466
+ description="""
467
+ The number of seconds between heartbeat checks for the PostgreSQL NOTIFY/LISTEN
468
+ connection to ensure it's still alive. Only used when using PostgreSQL as the database.
469
+ Defaults to `5`.
470
+ """,
471
+ validation_alias=AliasChoices(
472
+ AliasPath("pg_notify_heartbeat_interval_seconds"),
473
+ "prefect_server_services_triggers_pg_notify_heartbeat_interval_seconds",
474
+ ),
475
+ )
476
+
451
477
 
452
478
  class ServerServicesSettings(PrefectBaseSettings):
453
479
  """
prefect/task_engine.py CHANGED
@@ -37,7 +37,7 @@ import prefect.types._datetime
37
37
  from prefect.cache_policies import CachePolicy
38
38
  from prefect.client.orchestration import PrefectClient, SyncPrefectClient, get_client
39
39
  from prefect.client.schemas import TaskRun
40
- from prefect.client.schemas.objects import State, TaskRunInput
40
+ from prefect.client.schemas.objects import RunInput, State
41
41
  from prefect.concurrency.context import ConcurrencyContext
42
42
  from prefect.concurrency.v1.asyncio import concurrency as aconcurrency
43
43
  from prefect.concurrency.v1.context import ConcurrencyContext as ConcurrencyContextV1
@@ -96,7 +96,7 @@ from prefect.utilities.callables import call_with_parameters, parameters_to_args
96
96
  from prefect.utilities.collections import visit_collection
97
97
  from prefect.utilities.engine import (
98
98
  emit_task_run_state_change_event,
99
- link_state_to_result,
99
+ link_state_to_task_run_result,
100
100
  resolve_to_final_result,
101
101
  )
102
102
  from prefect.utilities.math import clamped_poisson_interval
@@ -453,9 +453,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
453
453
  else:
454
454
  result = state.data
455
455
 
456
- link_state_to_result(new_state, result)
457
- if asset_context := AssetContext.get():
458
- asset_context.emit_events(new_state)
456
+ link_state_to_task_run_result(new_state, result)
459
457
 
460
458
  # emit a state change event
461
459
  self._last_event = emit_task_run_state_change_event(
@@ -641,15 +639,6 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
641
639
  else:
642
640
  persist_result = should_persist_result()
643
641
 
644
- asset_context = AssetContext.get()
645
- if not asset_context:
646
- asset_context = AssetContext.from_task_and_inputs(
647
- task=self.task,
648
- task_run_id=self.task_run.id,
649
- task_inputs=self.task_run.task_inputs,
650
- )
651
- stack.enter_context(asset_context)
652
-
653
642
  stack.enter_context(
654
643
  TaskRunContext(
655
644
  task=self.task,
@@ -672,11 +661,29 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
672
661
 
673
662
  yield
674
663
 
664
+ @contextmanager
665
+ def asset_context(self):
666
+ parent_asset_ctx = AssetContext.get()
667
+
668
+ if parent_asset_ctx and parent_asset_ctx.copy_to_child_ctx:
669
+ asset_ctx = parent_asset_ctx.model_copy()
670
+ asset_ctx.copy_to_child_ctx = False
671
+ else:
672
+ asset_ctx = AssetContext.from_task_and_inputs(
673
+ self.task, self.task_run.id, self.task_run.task_inputs
674
+ )
675
+
676
+ with asset_ctx as ctx:
677
+ try:
678
+ yield
679
+ finally:
680
+ ctx.emit_events(self.state)
681
+
675
682
  @contextmanager
676
683
  def initialize_run(
677
684
  self,
678
685
  task_run_id: Optional[UUID] = None,
679
- dependencies: Optional[dict[str, set[TaskRunInput]]] = None,
686
+ dependencies: Optional[dict[str, set[RunInput]]] = None,
680
687
  ) -> Generator[Self, Any, Any]:
681
688
  """
682
689
  Enters a client context and creates a task run if needed.
@@ -770,7 +777,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
770
777
  def start(
771
778
  self,
772
779
  task_run_id: Optional[UUID] = None,
773
- dependencies: Optional[dict[str, set[TaskRunInput]]] = None,
780
+ dependencies: Optional[dict[str, set[RunInput]]] = None,
774
781
  ) -> Generator[None, None, None]:
775
782
  with self.initialize_run(task_run_id=task_run_id, dependencies=dependencies):
776
783
  with (
@@ -1031,9 +1038,7 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1031
1038
  else:
1032
1039
  result = new_state.data
1033
1040
 
1034
- link_state_to_result(new_state, result)
1035
- if asset_context := AssetContext.get():
1036
- asset_context.emit_events(new_state)
1041
+ link_state_to_task_run_result(new_state, result)
1037
1042
 
1038
1043
  # emit a state change event
1039
1044
  self._last_event = emit_task_run_state_change_event(
@@ -1219,15 +1224,6 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1219
1224
  else:
1220
1225
  persist_result = should_persist_result()
1221
1226
 
1222
- asset_context = AssetContext.get()
1223
- if not asset_context:
1224
- asset_context = AssetContext.from_task_and_inputs(
1225
- task=self.task,
1226
- task_run_id=self.task_run.id,
1227
- task_inputs=self.task_run.task_inputs,
1228
- )
1229
- stack.enter_context(asset_context)
1230
-
1231
1227
  stack.enter_context(
1232
1228
  TaskRunContext(
1233
1229
  task=self.task,
@@ -1249,11 +1245,29 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1249
1245
 
1250
1246
  yield
1251
1247
 
1248
+ @asynccontextmanager
1249
+ async def asset_context(self):
1250
+ parent_asset_ctx = AssetContext.get()
1251
+
1252
+ if parent_asset_ctx and parent_asset_ctx.copy_to_child_ctx:
1253
+ asset_ctx = parent_asset_ctx.model_copy()
1254
+ asset_ctx.copy_to_child_ctx = False
1255
+ else:
1256
+ asset_ctx = AssetContext.from_task_and_inputs(
1257
+ self.task, self.task_run.id, self.task_run.task_inputs
1258
+ )
1259
+
1260
+ with asset_ctx as ctx:
1261
+ try:
1262
+ yield
1263
+ finally:
1264
+ ctx.emit_events(self.state)
1265
+
1252
1266
  @asynccontextmanager
1253
1267
  async def initialize_run(
1254
1268
  self,
1255
1269
  task_run_id: Optional[UUID] = None,
1256
- dependencies: Optional[dict[str, set[TaskRunInput]]] = None,
1270
+ dependencies: Optional[dict[str, set[RunInput]]] = None,
1257
1271
  ) -> AsyncGenerator[Self, Any]:
1258
1272
  """
1259
1273
  Enters a client context and creates a task run if needed.
@@ -1345,7 +1359,7 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1345
1359
  async def start(
1346
1360
  self,
1347
1361
  task_run_id: Optional[UUID] = None,
1348
- dependencies: Optional[dict[str, set[TaskRunInput]]] = None,
1362
+ dependencies: Optional[dict[str, set[RunInput]]] = None,
1349
1363
  ) -> AsyncGenerator[None, None]:
1350
1364
  async with self.initialize_run(
1351
1365
  task_run_id=task_run_id, dependencies=dependencies
@@ -1451,7 +1465,7 @@ def run_task_sync(
1451
1465
  parameters: Optional[dict[str, Any]] = None,
1452
1466
  wait_for: Optional["OneOrManyFutureOrResult[Any]"] = None,
1453
1467
  return_type: Literal["state", "result"] = "result",
1454
- dependencies: Optional[dict[str, set[TaskRunInput]]] = None,
1468
+ dependencies: Optional[dict[str, set[RunInput]]] = None,
1455
1469
  context: Optional[dict[str, Any]] = None,
1456
1470
  ) -> Union[R, State, None]:
1457
1471
  engine = SyncTaskRunEngine[P, R](
@@ -1465,7 +1479,11 @@ def run_task_sync(
1465
1479
  with engine.start(task_run_id=task_run_id, dependencies=dependencies):
1466
1480
  while engine.is_running():
1467
1481
  run_coro_as_sync(engine.wait_until_ready())
1468
- with engine.run_context(), engine.transaction_context() as txn:
1482
+ with (
1483
+ engine.asset_context(),
1484
+ engine.run_context(),
1485
+ engine.transaction_context() as txn,
1486
+ ):
1469
1487
  engine.call_task_fn(txn)
1470
1488
 
1471
1489
  return engine.state if return_type == "state" else engine.result()
@@ -1478,7 +1496,7 @@ async def run_task_async(
1478
1496
  parameters: Optional[dict[str, Any]] = None,
1479
1497
  wait_for: Optional["OneOrManyFutureOrResult[Any]"] = None,
1480
1498
  return_type: Literal["state", "result"] = "result",
1481
- dependencies: Optional[dict[str, set[TaskRunInput]]] = None,
1499
+ dependencies: Optional[dict[str, set[RunInput]]] = None,
1482
1500
  context: Optional[dict[str, Any]] = None,
1483
1501
  ) -> Union[R, State, None]:
1484
1502
  engine = AsyncTaskRunEngine[P, R](
@@ -1492,7 +1510,11 @@ async def run_task_async(
1492
1510
  async with engine.start(task_run_id=task_run_id, dependencies=dependencies):
1493
1511
  while engine.is_running():
1494
1512
  await engine.wait_until_ready()
1495
- async with engine.run_context(), engine.transaction_context() as txn:
1513
+ async with (
1514
+ engine.asset_context(),
1515
+ engine.run_context(),
1516
+ engine.transaction_context() as txn,
1517
+ ):
1496
1518
  await engine.call_task_fn(txn)
1497
1519
 
1498
1520
  return engine.state if return_type == "state" else await engine.result()
@@ -1505,7 +1527,7 @@ def run_generator_task_sync(
1505
1527
  parameters: Optional[dict[str, Any]] = None,
1506
1528
  wait_for: Optional["OneOrManyFutureOrResult[Any]"] = None,
1507
1529
  return_type: Literal["state", "result"] = "result",
1508
- dependencies: Optional[dict[str, set[TaskRunInput]]] = None,
1530
+ dependencies: Optional[dict[str, set[RunInput]]] = None,
1509
1531
  context: Optional[dict[str, Any]] = None,
1510
1532
  ) -> Generator[R, None, None]:
1511
1533
  if return_type != "result":
@@ -1522,7 +1544,11 @@ def run_generator_task_sync(
1522
1544
  with engine.start(task_run_id=task_run_id, dependencies=dependencies):
1523
1545
  while engine.is_running():
1524
1546
  run_coro_as_sync(engine.wait_until_ready())
1525
- with engine.run_context(), engine.transaction_context() as txn:
1547
+ with (
1548
+ engine.asset_context(),
1549
+ engine.run_context(),
1550
+ engine.transaction_context() as txn,
1551
+ ):
1526
1552
  # TODO: generators should default to commit_mode=OFF
1527
1553
  # because they are dynamic by definition
1528
1554
  # for now we just prevent this branch explicitly
@@ -1542,7 +1568,7 @@ def run_generator_task_sync(
1542
1568
  # dictionary in an unbounded way, so finding a
1543
1569
  # way to periodically clean it up (using
1544
1570
  # weakrefs or similar) would be good
1545
- link_state_to_result(engine.state, gen_result)
1571
+ link_state_to_task_run_result(engine.state, gen_result)
1546
1572
  yield gen_result
1547
1573
  except StopIteration as exc:
1548
1574
  engine.handle_success(exc.value, transaction=txn)
@@ -1560,7 +1586,7 @@ async def run_generator_task_async(
1560
1586
  parameters: Optional[dict[str, Any]] = None,
1561
1587
  wait_for: Optional["OneOrManyFutureOrResult[Any]"] = None,
1562
1588
  return_type: Literal["state", "result"] = "result",
1563
- dependencies: Optional[dict[str, set[TaskRunInput]]] = None,
1589
+ dependencies: Optional[dict[str, set[RunInput]]] = None,
1564
1590
  context: Optional[dict[str, Any]] = None,
1565
1591
  ) -> AsyncGenerator[R, None]:
1566
1592
  if return_type != "result":
@@ -1576,7 +1602,11 @@ async def run_generator_task_async(
1576
1602
  async with engine.start(task_run_id=task_run_id, dependencies=dependencies):
1577
1603
  while engine.is_running():
1578
1604
  await engine.wait_until_ready()
1579
- async with engine.run_context(), engine.transaction_context() as txn:
1605
+ async with (
1606
+ engine.asset_context(),
1607
+ engine.run_context(),
1608
+ engine.transaction_context() as txn,
1609
+ ):
1580
1610
  # TODO: generators should default to commit_mode=OFF
1581
1611
  # because they are dynamic by definition
1582
1612
  # for now we just prevent this branch explicitly
@@ -1597,7 +1627,7 @@ async def run_generator_task_async(
1597
1627
  # dictionary in an unbounded way, so finding a
1598
1628
  # way to periodically clean it up (using
1599
1629
  # weakrefs or similar) would be good
1600
- link_state_to_result(engine.state, gen_result)
1630
+ link_state_to_task_run_result(engine.state, gen_result)
1601
1631
  yield gen_result
1602
1632
  except (StopAsyncIteration, GeneratorExit) as exc:
1603
1633
  await engine.handle_success(None, transaction=txn)
@@ -1617,7 +1647,7 @@ def run_task(
1617
1647
  parameters: Optional[dict[str, Any]] = None,
1618
1648
  wait_for: Optional["OneOrManyFutureOrResult[Any]"] = None,
1619
1649
  return_type: Literal["state"] = "state",
1620
- dependencies: Optional[dict[str, set[TaskRunInput]]] = None,
1650
+ dependencies: Optional[dict[str, set[RunInput]]] = None,
1621
1651
  context: Optional[dict[str, Any]] = None,
1622
1652
  ) -> State[R]: ...
1623
1653
 
@@ -1630,7 +1660,7 @@ def run_task(
1630
1660
  parameters: Optional[dict[str, Any]] = None,
1631
1661
  wait_for: Optional["OneOrManyFutureOrResult[Any]"] = None,
1632
1662
  return_type: Literal["result"] = "result",
1633
- dependencies: Optional[dict[str, set[TaskRunInput]]] = None,
1663
+ dependencies: Optional[dict[str, set[RunInput]]] = None,
1634
1664
  context: Optional[dict[str, Any]] = None,
1635
1665
  ) -> R: ...
1636
1666
 
@@ -1642,7 +1672,7 @@ def run_task(
1642
1672
  parameters: Optional[dict[str, Any]] = None,
1643
1673
  wait_for: Optional["OneOrManyFutureOrResult[Any]"] = None,
1644
1674
  return_type: Literal["state", "result"] = "result",
1645
- dependencies: Optional[dict[str, set[TaskRunInput]]] = None,
1675
+ dependencies: Optional[dict[str, set[RunInput]]] = None,
1646
1676
  context: Optional[dict[str, Any]] = None,
1647
1677
  ) -> Union[R, State, None, Coroutine[Any, Any, Union[R, State, None]]]:
1648
1678
  """
prefect/task_runners.py CHANGED
@@ -19,7 +19,7 @@ from typing import (
19
19
  from typing_extensions import ParamSpec, Self, TypeVar
20
20
 
21
21
  from prefect._internal.uuid7 import uuid7
22
- from prefect.client.schemas.objects import TaskRunInput
22
+ from prefect.client.schemas.objects import RunInput
23
23
  from prefect.exceptions import MappingLengthMismatch, MappingMissingIterable
24
24
  from prefect.futures import (
25
25
  PrefectConcurrentFuture,
@@ -81,7 +81,7 @@ class TaskRunner(abc.ABC, Generic[F]):
81
81
  task: "Task[P, Coroutine[Any, Any, R]]",
82
82
  parameters: dict[str, Any],
83
83
  wait_for: Iterable[PrefectFuture[Any]] | None = None,
84
- dependencies: dict[str, set[TaskRunInput]] | None = None,
84
+ dependencies: dict[str, set[RunInput]] | None = None,
85
85
  ) -> F: ...
86
86
 
87
87
  @overload
@@ -91,7 +91,7 @@ class TaskRunner(abc.ABC, Generic[F]):
91
91
  task: "Task[Any, R]",
92
92
  parameters: dict[str, Any],
93
93
  wait_for: Iterable[PrefectFuture[Any]] | None = None,
94
- dependencies: dict[str, set[TaskRunInput]] | None = None,
94
+ dependencies: dict[str, set[RunInput]] | None = None,
95
95
  ) -> F: ...
96
96
 
97
97
  @abc.abstractmethod
@@ -100,7 +100,7 @@ class TaskRunner(abc.ABC, Generic[F]):
100
100
  task: "Task[P, R]",
101
101
  parameters: dict[str, Any],
102
102
  wait_for: Iterable[PrefectFuture[Any]] | None = None,
103
- dependencies: dict[str, set[TaskRunInput]] | None = None,
103
+ dependencies: dict[str, set[RunInput]] | None = None,
104
104
  ) -> F: ...
105
105
 
106
106
  def map(
@@ -262,7 +262,7 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture[R]]):
262
262
  task: "Task[P, Coroutine[Any, Any, R]]",
263
263
  parameters: dict[str, Any],
264
264
  wait_for: Iterable[PrefectFuture[Any]] | None = None,
265
- dependencies: dict[str, set[TaskRunInput]] | None = None,
265
+ dependencies: dict[str, set[RunInput]] | None = None,
266
266
  ) -> PrefectConcurrentFuture[R]: ...
267
267
 
268
268
  @overload
@@ -271,7 +271,7 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture[R]]):
271
271
  task: "Task[Any, R]",
272
272
  parameters: dict[str, Any],
273
273
  wait_for: Iterable[PrefectFuture[Any]] | None = None,
274
- dependencies: dict[str, set[TaskRunInput]] | None = None,
274
+ dependencies: dict[str, set[RunInput]] | None = None,
275
275
  ) -> PrefectConcurrentFuture[R]: ...
276
276
 
277
277
  def submit(
@@ -279,7 +279,7 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture[R]]):
279
279
  task: "Task[P, R | Coroutine[Any, Any, R]]",
280
280
  parameters: dict[str, Any],
281
281
  wait_for: Iterable[PrefectFuture[Any]] | None = None,
282
- dependencies: dict[str, set[TaskRunInput]] | None = None,
282
+ dependencies: dict[str, set[RunInput]] | None = None,
283
283
  ) -> PrefectConcurrentFuture[R]:
284
284
  """
285
285
  Submit a task to the task run engine running in a separate thread.
@@ -415,7 +415,7 @@ class PrefectTaskRunner(TaskRunner[PrefectDistributedFuture[R]]):
415
415
  task: "Task[P, Coroutine[Any, Any, R]]",
416
416
  parameters: dict[str, Any],
417
417
  wait_for: Iterable[PrefectFuture[Any]] | None = None,
418
- dependencies: dict[str, set[TaskRunInput]] | None = None,
418
+ dependencies: dict[str, set[RunInput]] | None = None,
419
419
  ) -> PrefectDistributedFuture[R]: ...
420
420
 
421
421
  @overload
@@ -424,7 +424,7 @@ class PrefectTaskRunner(TaskRunner[PrefectDistributedFuture[R]]):
424
424
  task: "Task[Any, R]",
425
425
  parameters: dict[str, Any],
426
426
  wait_for: Iterable[PrefectFuture[Any]] | None = None,
427
- dependencies: dict[str, set[TaskRunInput]] | None = None,
427
+ dependencies: dict[str, set[RunInput]] | None = None,
428
428
  ) -> PrefectDistributedFuture[R]: ...
429
429
 
430
430
  def submit(
@@ -432,7 +432,7 @@ class PrefectTaskRunner(TaskRunner[PrefectDistributedFuture[R]]):
432
432
  task: "Task[P, R]",
433
433
  parameters: dict[str, Any],
434
434
  wait_for: Iterable[PrefectFuture[Any]] | None = None,
435
- dependencies: dict[str, set[TaskRunInput]] | None = None,
435
+ dependencies: dict[str, set[RunInput]] | None = None,
436
436
  ) -> PrefectDistributedFuture[R]:
437
437
  """
438
438
  Submit a task to the task run engine running in a separate thread.