prefect-client 2.20.4__py3-none-any.whl → 3.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (288) hide show
  1. prefect/__init__.py +74 -110
  2. prefect/_internal/compatibility/deprecated.py +6 -115
  3. prefect/_internal/compatibility/experimental.py +4 -79
  4. prefect/_internal/compatibility/migration.py +166 -0
  5. prefect/_internal/concurrency/__init__.py +2 -2
  6. prefect/_internal/concurrency/api.py +1 -35
  7. prefect/_internal/concurrency/calls.py +0 -6
  8. prefect/_internal/concurrency/cancellation.py +0 -3
  9. prefect/_internal/concurrency/event_loop.py +0 -20
  10. prefect/_internal/concurrency/inspection.py +3 -3
  11. prefect/_internal/concurrency/primitives.py +1 -0
  12. prefect/_internal/concurrency/services.py +23 -0
  13. prefect/_internal/concurrency/threads.py +35 -0
  14. prefect/_internal/concurrency/waiters.py +0 -28
  15. prefect/_internal/integrations.py +7 -0
  16. prefect/_internal/pydantic/__init__.py +0 -45
  17. prefect/_internal/pydantic/annotations/pendulum.py +2 -2
  18. prefect/_internal/pydantic/v1_schema.py +21 -22
  19. prefect/_internal/pydantic/v2_schema.py +0 -2
  20. prefect/_internal/pydantic/v2_validated_func.py +18 -23
  21. prefect/_internal/pytz.py +1 -1
  22. prefect/_internal/retries.py +61 -0
  23. prefect/_internal/schemas/bases.py +45 -177
  24. prefect/_internal/schemas/fields.py +1 -43
  25. prefect/_internal/schemas/validators.py +47 -233
  26. prefect/agent.py +3 -695
  27. prefect/artifacts.py +173 -14
  28. prefect/automations.py +39 -4
  29. prefect/blocks/abstract.py +1 -1
  30. prefect/blocks/core.py +405 -153
  31. prefect/blocks/fields.py +2 -57
  32. prefect/blocks/notifications.py +43 -28
  33. prefect/blocks/redis.py +168 -0
  34. prefect/blocks/system.py +67 -20
  35. prefect/blocks/webhook.py +2 -9
  36. prefect/cache_policies.py +239 -0
  37. prefect/client/__init__.py +4 -0
  38. prefect/client/base.py +33 -27
  39. prefect/client/cloud.py +65 -20
  40. prefect/client/collections.py +1 -1
  41. prefect/client/orchestration.py +650 -442
  42. prefect/client/schemas/actions.py +115 -100
  43. prefect/client/schemas/filters.py +46 -52
  44. prefect/client/schemas/objects.py +228 -178
  45. prefect/client/schemas/responses.py +18 -36
  46. prefect/client/schemas/schedules.py +55 -36
  47. prefect/client/schemas/sorting.py +2 -0
  48. prefect/client/subscriptions.py +8 -7
  49. prefect/client/types/flexible_schedule_list.py +11 -0
  50. prefect/client/utilities.py +9 -6
  51. prefect/concurrency/asyncio.py +60 -11
  52. prefect/concurrency/context.py +24 -0
  53. prefect/concurrency/events.py +2 -2
  54. prefect/concurrency/services.py +46 -16
  55. prefect/concurrency/sync.py +51 -7
  56. prefect/concurrency/v1/asyncio.py +143 -0
  57. prefect/concurrency/v1/context.py +27 -0
  58. prefect/concurrency/v1/events.py +61 -0
  59. prefect/concurrency/v1/services.py +116 -0
  60. prefect/concurrency/v1/sync.py +92 -0
  61. prefect/context.py +246 -149
  62. prefect/deployments/__init__.py +33 -18
  63. prefect/deployments/base.py +10 -15
  64. prefect/deployments/deployments.py +2 -1048
  65. prefect/deployments/flow_runs.py +178 -0
  66. prefect/deployments/runner.py +72 -173
  67. prefect/deployments/schedules.py +31 -25
  68. prefect/deployments/steps/__init__.py +0 -1
  69. prefect/deployments/steps/core.py +7 -0
  70. prefect/deployments/steps/pull.py +15 -21
  71. prefect/deployments/steps/utility.py +2 -1
  72. prefect/docker/__init__.py +20 -0
  73. prefect/docker/docker_image.py +82 -0
  74. prefect/engine.py +15 -2475
  75. prefect/events/actions.py +17 -23
  76. prefect/events/cli/automations.py +20 -7
  77. prefect/events/clients.py +142 -80
  78. prefect/events/filters.py +14 -18
  79. prefect/events/related.py +74 -75
  80. prefect/events/schemas/__init__.py +0 -5
  81. prefect/events/schemas/automations.py +55 -46
  82. prefect/events/schemas/deployment_triggers.py +7 -197
  83. prefect/events/schemas/events.py +46 -65
  84. prefect/events/schemas/labelling.py +10 -14
  85. prefect/events/utilities.py +4 -5
  86. prefect/events/worker.py +23 -8
  87. prefect/exceptions.py +15 -0
  88. prefect/filesystems.py +30 -529
  89. prefect/flow_engine.py +827 -0
  90. prefect/flow_runs.py +379 -7
  91. prefect/flows.py +470 -360
  92. prefect/futures.py +382 -331
  93. prefect/infrastructure/__init__.py +5 -26
  94. prefect/infrastructure/base.py +3 -320
  95. prefect/infrastructure/provisioners/__init__.py +5 -3
  96. prefect/infrastructure/provisioners/cloud_run.py +13 -8
  97. prefect/infrastructure/provisioners/container_instance.py +14 -9
  98. prefect/infrastructure/provisioners/ecs.py +10 -8
  99. prefect/infrastructure/provisioners/modal.py +8 -5
  100. prefect/input/__init__.py +4 -0
  101. prefect/input/actions.py +2 -4
  102. prefect/input/run_input.py +9 -9
  103. prefect/logging/formatters.py +2 -4
  104. prefect/logging/handlers.py +9 -14
  105. prefect/logging/loggers.py +5 -5
  106. prefect/main.py +72 -0
  107. prefect/plugins.py +2 -64
  108. prefect/profiles.toml +16 -2
  109. prefect/records/__init__.py +1 -0
  110. prefect/records/base.py +223 -0
  111. prefect/records/filesystem.py +207 -0
  112. prefect/records/memory.py +178 -0
  113. prefect/records/result_store.py +64 -0
  114. prefect/results.py +577 -504
  115. prefect/runner/runner.py +117 -47
  116. prefect/runner/server.py +32 -34
  117. prefect/runner/storage.py +3 -12
  118. prefect/runner/submit.py +2 -10
  119. prefect/runner/utils.py +2 -2
  120. prefect/runtime/__init__.py +1 -0
  121. prefect/runtime/deployment.py +1 -0
  122. prefect/runtime/flow_run.py +40 -5
  123. prefect/runtime/task_run.py +1 -0
  124. prefect/serializers.py +28 -39
  125. prefect/server/api/collections_data/views/aggregate-worker-metadata.json +5 -14
  126. prefect/settings.py +209 -332
  127. prefect/states.py +160 -63
  128. prefect/task_engine.py +1478 -57
  129. prefect/task_runners.py +383 -287
  130. prefect/task_runs.py +240 -0
  131. prefect/task_worker.py +463 -0
  132. prefect/tasks.py +684 -374
  133. prefect/transactions.py +410 -0
  134. prefect/types/__init__.py +72 -86
  135. prefect/types/entrypoint.py +13 -0
  136. prefect/utilities/annotations.py +4 -3
  137. prefect/utilities/asyncutils.py +227 -148
  138. prefect/utilities/callables.py +137 -45
  139. prefect/utilities/collections.py +134 -86
  140. prefect/utilities/dispatch.py +27 -14
  141. prefect/utilities/dockerutils.py +11 -4
  142. prefect/utilities/engine.py +186 -32
  143. prefect/utilities/filesystem.py +4 -5
  144. prefect/utilities/importtools.py +26 -27
  145. prefect/utilities/pydantic.py +128 -38
  146. prefect/utilities/schema_tools/hydration.py +18 -1
  147. prefect/utilities/schema_tools/validation.py +30 -0
  148. prefect/utilities/services.py +35 -9
  149. prefect/utilities/templating.py +12 -2
  150. prefect/utilities/timeout.py +20 -5
  151. prefect/utilities/urls.py +195 -0
  152. prefect/utilities/visualization.py +1 -0
  153. prefect/variables.py +78 -59
  154. prefect/workers/__init__.py +0 -1
  155. prefect/workers/base.py +237 -244
  156. prefect/workers/block.py +5 -226
  157. prefect/workers/cloud.py +6 -0
  158. prefect/workers/process.py +265 -12
  159. prefect/workers/server.py +29 -11
  160. {prefect_client-2.20.4.dist-info → prefect_client-3.0.0.dist-info}/METADATA +28 -24
  161. prefect_client-3.0.0.dist-info/RECORD +201 -0
  162. {prefect_client-2.20.4.dist-info → prefect_client-3.0.0.dist-info}/WHEEL +1 -1
  163. prefect/_internal/pydantic/_base_model.py +0 -51
  164. prefect/_internal/pydantic/_compat.py +0 -82
  165. prefect/_internal/pydantic/_flags.py +0 -20
  166. prefect/_internal/pydantic/_types.py +0 -8
  167. prefect/_internal/pydantic/utilities/config_dict.py +0 -72
  168. prefect/_internal/pydantic/utilities/field_validator.py +0 -150
  169. prefect/_internal/pydantic/utilities/model_construct.py +0 -56
  170. prefect/_internal/pydantic/utilities/model_copy.py +0 -55
  171. prefect/_internal/pydantic/utilities/model_dump.py +0 -136
  172. prefect/_internal/pydantic/utilities/model_dump_json.py +0 -112
  173. prefect/_internal/pydantic/utilities/model_fields.py +0 -50
  174. prefect/_internal/pydantic/utilities/model_fields_set.py +0 -29
  175. prefect/_internal/pydantic/utilities/model_json_schema.py +0 -82
  176. prefect/_internal/pydantic/utilities/model_rebuild.py +0 -80
  177. prefect/_internal/pydantic/utilities/model_validate.py +0 -75
  178. prefect/_internal/pydantic/utilities/model_validate_json.py +0 -68
  179. prefect/_internal/pydantic/utilities/model_validator.py +0 -87
  180. prefect/_internal/pydantic/utilities/type_adapter.py +0 -71
  181. prefect/_vendor/fastapi/__init__.py +0 -25
  182. prefect/_vendor/fastapi/applications.py +0 -946
  183. prefect/_vendor/fastapi/background.py +0 -3
  184. prefect/_vendor/fastapi/concurrency.py +0 -44
  185. prefect/_vendor/fastapi/datastructures.py +0 -58
  186. prefect/_vendor/fastapi/dependencies/__init__.py +0 -0
  187. prefect/_vendor/fastapi/dependencies/models.py +0 -64
  188. prefect/_vendor/fastapi/dependencies/utils.py +0 -877
  189. prefect/_vendor/fastapi/encoders.py +0 -177
  190. prefect/_vendor/fastapi/exception_handlers.py +0 -40
  191. prefect/_vendor/fastapi/exceptions.py +0 -46
  192. prefect/_vendor/fastapi/logger.py +0 -3
  193. prefect/_vendor/fastapi/middleware/__init__.py +0 -1
  194. prefect/_vendor/fastapi/middleware/asyncexitstack.py +0 -25
  195. prefect/_vendor/fastapi/middleware/cors.py +0 -3
  196. prefect/_vendor/fastapi/middleware/gzip.py +0 -3
  197. prefect/_vendor/fastapi/middleware/httpsredirect.py +0 -3
  198. prefect/_vendor/fastapi/middleware/trustedhost.py +0 -3
  199. prefect/_vendor/fastapi/middleware/wsgi.py +0 -3
  200. prefect/_vendor/fastapi/openapi/__init__.py +0 -0
  201. prefect/_vendor/fastapi/openapi/constants.py +0 -2
  202. prefect/_vendor/fastapi/openapi/docs.py +0 -203
  203. prefect/_vendor/fastapi/openapi/models.py +0 -480
  204. prefect/_vendor/fastapi/openapi/utils.py +0 -485
  205. prefect/_vendor/fastapi/param_functions.py +0 -340
  206. prefect/_vendor/fastapi/params.py +0 -453
  207. prefect/_vendor/fastapi/py.typed +0 -0
  208. prefect/_vendor/fastapi/requests.py +0 -4
  209. prefect/_vendor/fastapi/responses.py +0 -40
  210. prefect/_vendor/fastapi/routing.py +0 -1331
  211. prefect/_vendor/fastapi/security/__init__.py +0 -15
  212. prefect/_vendor/fastapi/security/api_key.py +0 -98
  213. prefect/_vendor/fastapi/security/base.py +0 -6
  214. prefect/_vendor/fastapi/security/http.py +0 -172
  215. prefect/_vendor/fastapi/security/oauth2.py +0 -227
  216. prefect/_vendor/fastapi/security/open_id_connect_url.py +0 -34
  217. prefect/_vendor/fastapi/security/utils.py +0 -10
  218. prefect/_vendor/fastapi/staticfiles.py +0 -1
  219. prefect/_vendor/fastapi/templating.py +0 -3
  220. prefect/_vendor/fastapi/testclient.py +0 -1
  221. prefect/_vendor/fastapi/types.py +0 -3
  222. prefect/_vendor/fastapi/utils.py +0 -235
  223. prefect/_vendor/fastapi/websockets.py +0 -7
  224. prefect/_vendor/starlette/__init__.py +0 -1
  225. prefect/_vendor/starlette/_compat.py +0 -28
  226. prefect/_vendor/starlette/_exception_handler.py +0 -80
  227. prefect/_vendor/starlette/_utils.py +0 -88
  228. prefect/_vendor/starlette/applications.py +0 -261
  229. prefect/_vendor/starlette/authentication.py +0 -159
  230. prefect/_vendor/starlette/background.py +0 -43
  231. prefect/_vendor/starlette/concurrency.py +0 -59
  232. prefect/_vendor/starlette/config.py +0 -151
  233. prefect/_vendor/starlette/convertors.py +0 -87
  234. prefect/_vendor/starlette/datastructures.py +0 -707
  235. prefect/_vendor/starlette/endpoints.py +0 -130
  236. prefect/_vendor/starlette/exceptions.py +0 -60
  237. prefect/_vendor/starlette/formparsers.py +0 -276
  238. prefect/_vendor/starlette/middleware/__init__.py +0 -17
  239. prefect/_vendor/starlette/middleware/authentication.py +0 -52
  240. prefect/_vendor/starlette/middleware/base.py +0 -220
  241. prefect/_vendor/starlette/middleware/cors.py +0 -176
  242. prefect/_vendor/starlette/middleware/errors.py +0 -265
  243. prefect/_vendor/starlette/middleware/exceptions.py +0 -74
  244. prefect/_vendor/starlette/middleware/gzip.py +0 -113
  245. prefect/_vendor/starlette/middleware/httpsredirect.py +0 -19
  246. prefect/_vendor/starlette/middleware/sessions.py +0 -82
  247. prefect/_vendor/starlette/middleware/trustedhost.py +0 -64
  248. prefect/_vendor/starlette/middleware/wsgi.py +0 -147
  249. prefect/_vendor/starlette/py.typed +0 -0
  250. prefect/_vendor/starlette/requests.py +0 -328
  251. prefect/_vendor/starlette/responses.py +0 -347
  252. prefect/_vendor/starlette/routing.py +0 -933
  253. prefect/_vendor/starlette/schemas.py +0 -154
  254. prefect/_vendor/starlette/staticfiles.py +0 -248
  255. prefect/_vendor/starlette/status.py +0 -199
  256. prefect/_vendor/starlette/templating.py +0 -231
  257. prefect/_vendor/starlette/testclient.py +0 -804
  258. prefect/_vendor/starlette/types.py +0 -30
  259. prefect/_vendor/starlette/websockets.py +0 -193
  260. prefect/blocks/kubernetes.py +0 -119
  261. prefect/deprecated/__init__.py +0 -0
  262. prefect/deprecated/data_documents.py +0 -350
  263. prefect/deprecated/packaging/__init__.py +0 -12
  264. prefect/deprecated/packaging/base.py +0 -96
  265. prefect/deprecated/packaging/docker.py +0 -146
  266. prefect/deprecated/packaging/file.py +0 -92
  267. prefect/deprecated/packaging/orion.py +0 -80
  268. prefect/deprecated/packaging/serializers.py +0 -171
  269. prefect/events/instrument.py +0 -135
  270. prefect/infrastructure/container.py +0 -824
  271. prefect/infrastructure/kubernetes.py +0 -920
  272. prefect/infrastructure/process.py +0 -289
  273. prefect/manifests.py +0 -20
  274. prefect/new_flow_engine.py +0 -449
  275. prefect/new_task_engine.py +0 -423
  276. prefect/pydantic/__init__.py +0 -76
  277. prefect/pydantic/main.py +0 -39
  278. prefect/software/__init__.py +0 -2
  279. prefect/software/base.py +0 -50
  280. prefect/software/conda.py +0 -199
  281. prefect/software/pip.py +0 -122
  282. prefect/software/python.py +0 -52
  283. prefect/task_server.py +0 -322
  284. prefect_client-2.20.4.dist-info/RECORD +0 -294
  285. /prefect/{_internal/pydantic/utilities → client/types}/__init__.py +0 -0
  286. /prefect/{_vendor → concurrency/v1}/__init__.py +0 -0
  287. {prefect_client-2.20.4.dist-info → prefect_client-3.0.0.dist-info}/LICENSE +0 -0
  288. {prefect_client-2.20.4.dist-info → prefect_client-3.0.0.dist-info}/top_level.txt +0 -0
prefect/task_engine.py CHANGED
@@ -1,76 +1,1497 @@
1
- from contextlib import AsyncExitStack
1
+ import inspect
2
+ import logging
3
+ import threading
4
+ import time
5
+ from asyncio import CancelledError
6
+ from contextlib import ExitStack, asynccontextmanager, contextmanager
7
+ from dataclasses import dataclass, field
8
+ from functools import partial
9
+ from textwrap import dedent
2
10
  from typing import (
3
11
  Any,
12
+ AsyncGenerator,
13
+ Callable,
14
+ Coroutine,
4
15
  Dict,
16
+ Generator,
17
+ Generic,
5
18
  Iterable,
19
+ Literal,
6
20
  Optional,
21
+ Sequence,
22
+ Set,
23
+ Type,
24
+ TypeVar,
25
+ Union,
7
26
  )
27
+ from uuid import UUID
8
28
 
9
29
  import anyio
10
- from typing_extensions import Literal
11
-
12
- from prefect._internal.concurrency.api import create_call, from_async, from_sync
13
- from prefect.client.orchestration import PrefectClient
14
- from prefect.client.schemas.objects import TaskRun
15
- from prefect.context import EngineContext
16
- from prefect.engine import (
17
- begin_task_map,
18
- get_task_call_return_value,
19
- wait_for_task_runs_and_report_crashes,
30
+ import pendulum
31
+ from typing_extensions import ParamSpec
32
+
33
+ from prefect import Task
34
+ from prefect.client.orchestration import PrefectClient, SyncPrefectClient, get_client
35
+ from prefect.client.schemas import TaskRun
36
+ from prefect.client.schemas.objects import State, TaskRunInput
37
+ from prefect.concurrency.context import ConcurrencyContext
38
+ from prefect.concurrency.v1.asyncio import concurrency as aconcurrency
39
+ from prefect.concurrency.v1.context import ConcurrencyContext as ConcurrencyContextV1
40
+ from prefect.concurrency.v1.sync import concurrency
41
+ from prefect.context import (
42
+ AsyncClientContext,
43
+ FlowRunContext,
44
+ SyncClientContext,
45
+ TaskRunContext,
46
+ hydrated_context,
47
+ )
48
+ from prefect.events.schemas.events import Event as PrefectEvent
49
+ from prefect.exceptions import (
50
+ Abort,
51
+ Pause,
52
+ PrefectException,
53
+ TerminationSignal,
54
+ UpstreamTaskError,
20
55
  )
21
56
  from prefect.futures import PrefectFuture
22
- from prefect.results import ResultFactory
23
- from prefect.task_runners import BaseTaskRunner
24
- from prefect.tasks import Task
25
- from prefect.utilities.asyncutils import sync_compatible
57
+ from prefect.logging.loggers import get_logger, patch_print, task_run_logger
58
+ from prefect.records.result_store import ResultRecordStore
59
+ from prefect.results import (
60
+ BaseResult,
61
+ _format_user_supplied_storage_key,
62
+ get_current_result_store,
63
+ )
64
+ from prefect.settings import (
65
+ PREFECT_DEBUG_MODE,
66
+ PREFECT_TASKS_REFRESH_CACHE,
67
+ )
68
+ from prefect.states import (
69
+ AwaitingRetry,
70
+ Completed,
71
+ Failed,
72
+ Pending,
73
+ Retrying,
74
+ Running,
75
+ exception_to_crashed_state,
76
+ exception_to_failed_state,
77
+ return_value_to_state,
78
+ )
79
+ from prefect.transactions import Transaction, transaction
80
+ from prefect.utilities.annotations import NotSet
81
+ from prefect.utilities.asyncutils import run_coro_as_sync
82
+ from prefect.utilities.callables import call_with_parameters, parameters_to_args_kwargs
83
+ from prefect.utilities.collections import visit_collection
84
+ from prefect.utilities.engine import (
85
+ _get_hook_name,
86
+ emit_task_run_state_change_event,
87
+ link_state_to_result,
88
+ resolve_to_final_result,
89
+ )
90
+ from prefect.utilities.math import clamped_poisson_interval
91
+ from prefect.utilities.timeout import timeout, timeout_async
26
92
 
27
- EngineReturnType = Literal["future", "state", "result"]
93
+ P = ParamSpec("P")
94
+ R = TypeVar("R")
28
95
 
96
+ BACKOFF_MAX = 10
29
97
 
30
- @sync_compatible
31
- async def submit_autonomous_task_run_to_engine(
32
- task: Task,
33
- task_run: TaskRun,
34
- task_runner: BaseTaskRunner,
35
- parameters: Optional[Dict[str, Any]] = None,
36
- wait_for: Optional[Iterable[PrefectFuture]] = None,
37
- mapped: bool = False,
38
- return_type: EngineReturnType = "future",
39
- client: Optional[PrefectClient] = None,
40
- ) -> PrefectFuture:
41
- async with AsyncExitStack() as stack:
42
- parameters = parameters or {}
43
- with EngineContext(
44
- flow=None,
45
- flow_run=None,
46
- autonomous_task_run=task_run,
47
- task_runner=task_runner,
48
- client=client,
49
- parameters=parameters,
50
- result_factory=await ResultFactory.from_autonomous_task(task),
51
- background_tasks=await stack.enter_async_context(anyio.create_task_group()),
52
- ) as flow_run_context:
53
- begin_run = create_call(
54
- begin_task_map if mapped else get_task_call_return_value,
55
- task=task,
56
- flow_run_context=flow_run_context,
57
- parameters=parameters,
58
- wait_for=wait_for,
59
- return_type=return_type,
60
- task_runner=task_runner,
61
- )
62
- if task.isasync:
63
- future_result_or_state = await from_async.wait_for_call_in_loop_thread(
64
- begin_run
98
+
99
+ class TaskRunTimeoutError(TimeoutError):
100
+ """Raised when a task run exceeds its timeout."""
101
+
102
+
103
+ @dataclass
104
+ class BaseTaskRunEngine(Generic[P, R]):
105
+ task: Union[Task[P, R], Task[P, Coroutine[Any, Any, R]]]
106
+ logger: logging.Logger = field(default_factory=lambda: get_logger("engine"))
107
+ parameters: Optional[Dict[str, Any]] = None
108
+ task_run: Optional[TaskRun] = None
109
+ retries: int = 0
110
+ wait_for: Optional[Iterable[PrefectFuture]] = None
111
+ context: Optional[Dict[str, Any]] = None
112
+ # holds the return value from the user code
113
+ _return_value: Union[R, Type[NotSet]] = NotSet
114
+ # holds the exception raised by the user code, if any
115
+ _raised: Union[Exception, Type[NotSet]] = NotSet
116
+ _initial_run_context: Optional[TaskRunContext] = None
117
+ _is_started: bool = False
118
+ _task_name_set: bool = False
119
+ _last_event: Optional[PrefectEvent] = None
120
+
121
+ def __post_init__(self):
122
+ if self.parameters is None:
123
+ self.parameters = {}
124
+
125
+ @property
126
+ def state(self) -> State:
127
+ if not self.task_run:
128
+ raise ValueError("Task run is not set")
129
+ return self.task_run.state
130
+
131
+ def is_cancelled(self) -> bool:
132
+ if (
133
+ self.context
134
+ and "cancel_event" in self.context
135
+ and isinstance(self.context["cancel_event"], threading.Event)
136
+ ):
137
+ return self.context["cancel_event"].is_set()
138
+ return False
139
+
140
+ def compute_transaction_key(self) -> Optional[str]:
141
+ key = None
142
+ if self.task.cache_policy:
143
+ flow_run_context = FlowRunContext.get()
144
+ task_run_context = TaskRunContext.get()
145
+
146
+ if flow_run_context:
147
+ parameters = flow_run_context.parameters
148
+ else:
149
+ parameters = None
150
+
151
+ key = self.task.cache_policy.compute_key(
152
+ task_ctx=task_run_context,
153
+ inputs=self.parameters,
154
+ flow_parameters=parameters,
155
+ )
156
+ elif self.task.result_storage_key is not None:
157
+ key = _format_user_supplied_storage_key(self.task.result_storage_key)
158
+ return key
159
+
160
+ def _resolve_parameters(self):
161
+ if not self.parameters:
162
+ return {}
163
+
164
+ resolved_parameters = {}
165
+ for parameter, value in self.parameters.items():
166
+ try:
167
+ resolved_parameters[parameter] = visit_collection(
168
+ value,
169
+ visit_fn=resolve_to_final_result,
170
+ return_data=True,
171
+ max_depth=-1,
172
+ remove_annotations=True,
173
+ context={},
174
+ )
175
+ except UpstreamTaskError:
176
+ raise
177
+ except Exception as exc:
178
+ raise PrefectException(
179
+ f"Failed to resolve inputs in parameter {parameter!r}. If your"
180
+ " parameter type is not supported, consider using the `quote`"
181
+ " annotation to skip resolution of inputs."
182
+ ) from exc
183
+
184
+ self.parameters = resolved_parameters
185
+
186
+ def _wait_for_dependencies(self):
187
+ if not self.wait_for:
188
+ return
189
+
190
+ visit_collection(
191
+ self.wait_for,
192
+ visit_fn=resolve_to_final_result,
193
+ return_data=False,
194
+ max_depth=-1,
195
+ remove_annotations=True,
196
+ context={"current_task_run": self.task_run, "current_task": self.task},
197
+ )
198
+
199
+ def record_terminal_state_timing(self, state: State) -> None:
200
+ if self.task_run and self.task_run.start_time and not self.task_run.end_time:
201
+ self.task_run.end_time = state.timestamp
202
+
203
+ if self.task_run.state.is_running():
204
+ self.task_run.total_run_time += (
205
+ state.timestamp - self.task_run.state.timestamp
206
+ )
207
+
208
+ def is_running(self) -> bool:
209
+ """Whether or not the engine is currently running a task."""
210
+ if (task_run := getattr(self, "task_run", None)) is None:
211
+ return False
212
+ return task_run.state.is_running() or task_run.state.is_scheduled()
213
+
214
+ def log_finished_message(self):
215
+ if not self.task_run:
216
+ return
217
+
218
+ # If debugging, use the more complete `repr` than the usual `str` description
219
+ display_state = repr(self.state) if PREFECT_DEBUG_MODE else str(self.state)
220
+ level = logging.INFO if self.state.is_completed() else logging.ERROR
221
+ msg = f"Finished in state {display_state}"
222
+ if self.state.is_pending():
223
+ msg += (
224
+ "\nPlease wait for all submitted tasks to complete"
225
+ " before exiting your flow by calling `.wait()` on the "
226
+ "`PrefectFuture` returned from your `.submit()` calls."
227
+ )
228
+ msg += dedent(
229
+ """
230
+
231
+ Example:
232
+
233
+ from prefect import flow, task
234
+
235
+ @task
236
+ def say_hello(name):
237
+ print(f"Hello, {name}!")
238
+
239
+ @flow
240
+ def example_flow():
241
+ future = say_hello.submit(name="Marvin")
242
+ future.wait()
243
+
244
+ example_flow()
245
+ """
246
+ )
247
+ self.logger.log(
248
+ level=level,
249
+ msg=msg,
250
+ )
251
+
252
+ def handle_rollback(self, txn: Transaction) -> None:
253
+ assert self.task_run is not None
254
+
255
+ rolled_back_state = Completed(
256
+ name="RolledBack",
257
+ message="Task rolled back as part of transaction",
258
+ )
259
+
260
+ self._last_event = emit_task_run_state_change_event(
261
+ task_run=self.task_run,
262
+ initial_state=self.state,
263
+ validated_state=rolled_back_state,
264
+ follows=self._last_event,
265
+ )
266
+
267
+
268
+ @dataclass
269
+ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
270
+ _client: Optional[SyncPrefectClient] = None
271
+
272
+ @property
273
+ def client(self) -> SyncPrefectClient:
274
+ if not self._is_started or self._client is None:
275
+ raise RuntimeError("Engine has not started.")
276
+ return self._client
277
+
278
+ def can_retry(self, exc: Exception) -> bool:
279
+ retry_condition: Optional[
280
+ Callable[[Task[P, Coroutine[Any, Any, R]], TaskRun, State], bool]
281
+ ] = self.task.retry_condition_fn
282
+ if not self.task_run:
283
+ raise ValueError("Task run is not set")
284
+ try:
285
+ self.logger.debug(
286
+ f"Running `retry_condition_fn` check {retry_condition!r} for task"
287
+ f" {self.task.name!r}"
288
+ )
289
+ state = Failed(
290
+ data=exc,
291
+ message=f"Task run encountered unexpected exception: {repr(exc)}",
292
+ )
293
+ if inspect.iscoroutinefunction(retry_condition):
294
+ should_retry = run_coro_as_sync(
295
+ retry_condition(self.task, self.task_run, state)
296
+ )
297
+ elif inspect.isfunction(retry_condition):
298
+ should_retry = retry_condition(self.task, self.task_run, state)
299
+ else:
300
+ should_retry = not retry_condition
301
+ return should_retry
302
+ except Exception:
303
+ self.logger.error(
304
+ (
305
+ "An error was encountered while running `retry_condition_fn` check"
306
+ f" '{retry_condition!r}' for task {self.task.name!r}"
307
+ ),
308
+ exc_info=True,
309
+ )
310
+ return False
311
+
312
+ def call_hooks(self, state: Optional[State] = None):
313
+ if state is None:
314
+ state = self.state
315
+ task = self.task
316
+ task_run = self.task_run
317
+
318
+ if not task_run:
319
+ raise ValueError("Task run is not set")
320
+
321
+ if state.is_failed() and task.on_failure_hooks:
322
+ hooks = task.on_failure_hooks
323
+ elif state.is_completed() and task.on_completion_hooks:
324
+ hooks = task.on_completion_hooks
325
+ else:
326
+ hooks = None
327
+
328
+ for hook in hooks or []:
329
+ hook_name = _get_hook_name(hook)
330
+
331
+ try:
332
+ self.logger.info(
333
+ f"Running hook {hook_name!r} in response to entering state"
334
+ f" {state.name!r}"
335
+ )
336
+ result = hook(task, task_run, state)
337
+ if inspect.isawaitable(result):
338
+ run_coro_as_sync(result)
339
+ except Exception:
340
+ self.logger.error(
341
+ f"An error was encountered while running hook {hook_name!r}",
342
+ exc_info=True,
343
+ )
344
+ else:
345
+ self.logger.info(f"Hook {hook_name!r} finished running successfully")
346
+
347
+ def begin_run(self):
348
+ try:
349
+ self._resolve_parameters()
350
+ self._wait_for_dependencies()
351
+ except UpstreamTaskError as upstream_exc:
352
+ state = self.set_state(
353
+ Pending(
354
+ name="NotReady",
355
+ message=str(upstream_exc),
356
+ ),
357
+ # if orchestrating a run already in a pending state, force orchestration to
358
+ # update the state name
359
+ force=self.state.is_pending(),
360
+ )
361
+ return
362
+
363
+ new_state = Running()
364
+
365
+ self.task_run.start_time = new_state.timestamp
366
+ self.task_run.run_count += 1
367
+
368
+ flow_run_context = FlowRunContext.get()
369
+ if flow_run_context and flow_run_context.flow_run:
370
+ # Carry forward any task run information from the flow run
371
+ flow_run = flow_run_context.flow_run
372
+ self.task_run.flow_run_run_count = flow_run.run_count
373
+
374
+ state = self.set_state(new_state)
375
+
376
+ # TODO: this is temporary until the API stops rejecting state transitions
377
+ # and the client / transaction store becomes the source of truth
378
+ # this is a bandaid caused by the API storing a Completed state with a bad
379
+ # result reference that no longer exists
380
+ if state.is_completed():
381
+ try:
382
+ state.result(retry_result_failure=False, _sync=True)
383
+ except Exception:
384
+ state = self.set_state(new_state, force=True)
385
+
386
+ backoff_count = 0
387
+
388
+ # TODO: Could this listen for state change events instead of polling?
389
+ while state.is_pending() or state.is_paused():
390
+ if backoff_count < BACKOFF_MAX:
391
+ backoff_count += 1
392
+ interval = clamped_poisson_interval(
393
+ average_interval=backoff_count, clamping_factor=0.3
394
+ )
395
+ time.sleep(interval)
396
+ state = self.set_state(new_state)
397
+
398
+ def set_state(self, state: State, force: bool = False) -> State:
399
+ last_state = self.state
400
+ if not self.task_run:
401
+ raise ValueError("Task run is not set")
402
+
403
+ self.task_run.state = new_state = state
404
+
405
+ # Ensure that the state_details are populated with the current run IDs
406
+ new_state.state_details.task_run_id = self.task_run.id
407
+ new_state.state_details.flow_run_id = self.task_run.flow_run_id
408
+
409
+ # Predictively update the de-normalized task_run.state_* attributes
410
+ self.task_run.state_id = new_state.id
411
+ self.task_run.state_type = new_state.type
412
+ self.task_run.state_name = new_state.name
413
+
414
+ if new_state.is_final():
415
+ if isinstance(state.data, BaseResult) and state.data.has_cached_object():
416
+ # Avoid fetching the result unless it is cached, otherwise we defeat
417
+ # the purpose of disabling `cache_result_in_memory`
418
+ result = state.result(raise_on_failure=False, fetch=True)
419
+ if inspect.isawaitable(result):
420
+ result = run_coro_as_sync(result)
421
+ else:
422
+ result = state.data
423
+
424
+ link_state_to_result(state, result)
425
+
426
+ # emit a state change event
427
+ self._last_event = emit_task_run_state_change_event(
428
+ task_run=self.task_run,
429
+ initial_state=last_state,
430
+ validated_state=self.task_run.state,
431
+ follows=self._last_event,
432
+ )
433
+
434
+ return new_state
435
+
436
+ def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]":
437
+ if self._return_value is not NotSet:
438
+ # if the return value is a BaseResult, we need to fetch it
439
+ if isinstance(self._return_value, BaseResult):
440
+ _result = self._return_value.get()
441
+ if inspect.isawaitable(_result):
442
+ _result = run_coro_as_sync(_result)
443
+ return _result
444
+
445
+ # otherwise, return the value as is
446
+ return self._return_value
447
+
448
+ if self._raised is not NotSet:
449
+ # if the task raised an exception, raise it
450
+ if raise_on_failure:
451
+ raise self._raised
452
+
453
+ # otherwise, return the exception
454
+ return self._raised
455
+
456
+ def handle_success(self, result: R, transaction: Transaction) -> R:
457
+ result_store = getattr(TaskRunContext.get(), "result_store", None)
458
+ if result_store is None:
459
+ raise ValueError("Result store is not set")
460
+
461
+ if self.task.cache_expiration is not None:
462
+ expiration = pendulum.now("utc") + self.task.cache_expiration
463
+ else:
464
+ expiration = None
465
+
466
+ terminal_state = run_coro_as_sync(
467
+ return_value_to_state(
468
+ result,
469
+ result_store=result_store,
470
+ key=transaction.key,
471
+ expiration=expiration,
472
+ )
473
+ )
474
+
475
+ # Avoid logging when running this rollback hook since it is not user-defined
476
+ handle_rollback = partial(self.handle_rollback)
477
+ handle_rollback.log_on_run = False
478
+
479
+ transaction.stage(
480
+ terminal_state.data,
481
+ on_rollback_hooks=[handle_rollback] + self.task.on_rollback_hooks,
482
+ on_commit_hooks=self.task.on_commit_hooks,
483
+ )
484
+ if transaction.is_committed():
485
+ terminal_state.name = "Cached"
486
+
487
+ self.record_terminal_state_timing(terminal_state)
488
+ self.set_state(terminal_state)
489
+ self._return_value = result
490
+ return result
491
+
492
+ def handle_retry(self, exc: Exception) -> bool:
493
+ """Handle any task run retries.
494
+
495
+ - If the task has retries left, and the retry condition is met, set the task to retrying and return True.
496
+ - If the task has a retry delay, place in AwaitingRetry state with a delayed scheduled time.
497
+ - If the task has no retries left, or the retry condition is not met, return False.
498
+ """
499
+ if self.retries < self.task.retries and self.can_retry(exc):
500
+ if self.task.retry_delay_seconds:
501
+ delay = (
502
+ self.task.retry_delay_seconds[
503
+ min(self.retries, len(self.task.retry_delay_seconds) - 1)
504
+ ] # repeat final delay value if attempts exceed specified delays
505
+ if isinstance(self.task.retry_delay_seconds, Sequence)
506
+ else self.task.retry_delay_seconds
507
+ )
508
+ new_state = AwaitingRetry(
509
+ scheduled_time=pendulum.now("utc").add(seconds=delay)
510
+ )
511
+ else:
512
+ delay = None
513
+ new_state = Retrying()
514
+ self.task_run.run_count += 1
515
+
516
+ self.logger.info(
517
+ "Task run failed with exception: %r - " "Retry %s/%s will start %s",
518
+ exc,
519
+ self.retries + 1,
520
+ self.task.retries,
521
+ str(delay) + " second(s) from now" if delay else "immediately",
522
+ )
523
+
524
+ self.set_state(new_state, force=True)
525
+ self.retries = self.retries + 1
526
+ return True
527
+ elif self.retries >= self.task.retries:
528
+ self.logger.error(
529
+ "Task run failed with exception: %r - Retries are exhausted",
530
+ exc,
531
+ exc_info=True,
532
+ )
533
+ return False
534
+
535
+ return False
536
+
537
+ def handle_exception(self, exc: Exception) -> None:
538
+ # If the task fails, and we have retries left, set the task to retrying.
539
+ if not self.handle_retry(exc):
540
+ # If the task has no retries left, or the retry condition is not met, set the task to failed.
541
+ context = TaskRunContext.get()
542
+ state = run_coro_as_sync(
543
+ exception_to_failed_state(
544
+ exc,
545
+ message="Task run encountered an exception",
546
+ result_store=getattr(context, "result_store", None),
547
+ write_result=True,
548
+ )
549
+ )
550
+ self.record_terminal_state_timing(state)
551
+ self.set_state(state)
552
+ self._raised = exc
553
+
554
+ def handle_timeout(self, exc: TimeoutError) -> None:
555
+ if not self.handle_retry(exc):
556
+ if isinstance(exc, TaskRunTimeoutError):
557
+ message = f"Task run exceeded timeout of {self.task.timeout_seconds} second(s)"
558
+ else:
559
+ message = f"Task run failed due to timeout: {exc!r}"
560
+ self.logger.error(message)
561
+ state = Failed(
562
+ data=exc,
563
+ message=message,
564
+ name="TimedOut",
565
+ )
566
+ self.set_state(state)
567
+ self._raised = exc
568
+
569
+ def handle_crash(self, exc: BaseException) -> None:
570
+ state = run_coro_as_sync(exception_to_crashed_state(exc))
571
+ self.logger.error(f"Crash detected! {state.message}")
572
+ self.logger.debug("Crash details:", exc_info=exc)
573
+ self.record_terminal_state_timing(state)
574
+ self.set_state(state, force=True)
575
+ self._raised = exc
576
+
577
+ @contextmanager
578
+ def setup_run_context(self, client: Optional[SyncPrefectClient] = None):
579
+ from prefect.utilities.engine import (
580
+ _resolve_custom_task_run_name,
581
+ should_log_prints,
582
+ )
583
+
584
+ if client is None:
585
+ client = self.client
586
+ if not self.task_run:
587
+ raise ValueError("Task run is not set")
588
+
589
+ with ExitStack() as stack:
590
+ if log_prints := should_log_prints(self.task):
591
+ stack.enter_context(patch_print())
592
+ stack.enter_context(
593
+ TaskRunContext(
594
+ task=self.task,
595
+ log_prints=log_prints,
596
+ task_run=self.task_run,
597
+ parameters=self.parameters,
598
+ result_store=get_current_result_store().update_for_task(
599
+ self.task, _sync=True
600
+ ),
601
+ client=client,
602
+ )
603
+ )
604
+ stack.enter_context(ConcurrencyContextV1())
605
+ stack.enter_context(ConcurrencyContext())
606
+
607
+ self.logger = task_run_logger(task_run=self.task_run, task=self.task) # type: ignore
608
+
609
+ # update the task run name if necessary
610
+ if not self._task_name_set and self.task.task_run_name:
611
+ task_run_name = _resolve_custom_task_run_name(
612
+ task=self.task, parameters=self.parameters
613
+ )
614
+
615
+ self.logger.extra["task_run_name"] = task_run_name
616
+ self.logger.debug(
617
+ f"Renamed task run {self.task_run.name!r} to {task_run_name!r}"
618
+ )
619
+ self.task_run.name = task_run_name
620
+ self._task_name_set = True
621
+ yield
622
+
623
+ @contextmanager
624
+ def initialize_run(
625
+ self,
626
+ task_run_id: Optional[UUID] = None,
627
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
628
+ ) -> Generator["SyncTaskRunEngine", Any, Any]:
629
+ """
630
+ Enters a client context and creates a task run if needed.
631
+ """
632
+
633
+ with hydrated_context(self.context):
634
+ with SyncClientContext.get_or_create() as client_ctx:
635
+ self._client = client_ctx.client
636
+ self._is_started = True
637
+ try:
638
+ if not self.task_run:
639
+ self.task_run = run_coro_as_sync(
640
+ self.task.create_local_run(
641
+ id=task_run_id,
642
+ parameters=self.parameters,
643
+ flow_run_context=FlowRunContext.get(),
644
+ parent_task_run_context=TaskRunContext.get(),
645
+ wait_for=self.wait_for,
646
+ extra_task_inputs=dependencies,
647
+ )
648
+ )
649
+ # Emit an event to capture that the task run was in the `PENDING` state.
650
+ self._last_event = emit_task_run_state_change_event(
651
+ task_run=self.task_run,
652
+ initial_state=None,
653
+ validated_state=self.task_run.state,
654
+ )
655
+
656
+ with self.setup_run_context():
657
+ # setup_run_context might update the task run name, so log creation here
658
+ self.logger.info(
659
+ f"Created task run {self.task_run.name!r} for task {self.task.name!r}"
660
+ )
661
+ yield self
662
+
663
+ except TerminationSignal as exc:
664
+ # TerminationSignals are caught and handled as crashes
665
+ self.handle_crash(exc)
666
+ raise exc
667
+
668
+ except Exception:
669
+ # regular exceptions are caught and re-raised to the user
670
+ raise
671
+ except (Pause, Abort) as exc:
672
+ # Do not capture internal signals as crashes
673
+ if isinstance(exc, Abort):
674
+ self.logger.error("Task run was aborted: %s", exc)
675
+ raise
676
+ except GeneratorExit:
677
+ # Do not capture generator exits as crashes
678
+ raise
679
+ except BaseException as exc:
680
+ # BaseExceptions are caught and handled as crashes
681
+ self.handle_crash(exc)
682
+ raise
683
+ finally:
684
+ self.log_finished_message()
685
+ self._is_started = False
686
+ self._client = None
687
+
688
+ async def wait_until_ready(self):
689
+ """Waits until the scheduled time (if its the future), then enters Running."""
690
+ if scheduled_time := self.state.state_details.scheduled_time:
691
+ sleep_time = (scheduled_time - pendulum.now("utc")).total_seconds()
692
+ await anyio.sleep(sleep_time if sleep_time > 0 else 0)
693
+ self.set_state(
694
+ Retrying() if self.state.name == "AwaitingRetry" else Running(),
695
+ force=True,
696
+ )
697
+
698
+ # --------------------------
699
+ #
700
+ # The following methods compose the main task run loop
701
+ #
702
+ # --------------------------
703
+
704
+ @contextmanager
705
+ def start(
706
+ self,
707
+ task_run_id: Optional[UUID] = None,
708
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
709
+ ) -> Generator[None, None, None]:
710
+ with self.initialize_run(task_run_id=task_run_id, dependencies=dependencies):
711
+ self.begin_run()
712
+ try:
713
+ yield
714
+ finally:
715
+ self.call_hooks()
716
+
717
+ @contextmanager
718
+ def transaction_context(self) -> Generator[Transaction, None, None]:
719
+ # refresh cache setting is now repurposes as overwrite transaction record
720
+ overwrite = (
721
+ self.task.refresh_cache
722
+ if self.task.refresh_cache is not None
723
+ else PREFECT_TASKS_REFRESH_CACHE.value()
724
+ )
725
+
726
+ result_store = getattr(TaskRunContext.get(), "result_store", None)
727
+ if result_store and result_store.persist_result:
728
+ store = ResultRecordStore(result_store=result_store)
729
+ else:
730
+ store = None
731
+
732
+ with transaction(
733
+ key=self.compute_transaction_key(),
734
+ store=store,
735
+ overwrite=overwrite,
736
+ logger=self.logger,
737
+ ) as txn:
738
+ yield txn
739
+
740
+ @contextmanager
741
+ def run_context(self):
742
+ # reenter the run context to ensure it is up to date for every run
743
+ with self.setup_run_context():
744
+ try:
745
+ with timeout(
746
+ seconds=self.task.timeout_seconds,
747
+ timeout_exc_type=TaskRunTimeoutError,
748
+ ):
749
+ self.logger.debug(
750
+ f"Executing task {self.task.name!r} for task run {self.task_run.name!r}..."
751
+ )
752
+ if self.is_cancelled():
753
+ raise CancelledError("Task run cancelled by the task runner")
754
+
755
+ yield self
756
+ except TimeoutError as exc:
757
+ self.handle_timeout(exc)
758
+ except Exception as exc:
759
+ self.handle_exception(exc)
760
+
761
+ def call_task_fn(
762
+ self, transaction: Transaction
763
+ ) -> Union[R, Coroutine[Any, Any, R]]:
764
+ """
765
+ Convenience method to call the task function. Returns a coroutine if the
766
+ task is async.
767
+ """
768
+ parameters = self.parameters or {}
769
+ if transaction.is_committed():
770
+ result = transaction.read()
771
+ else:
772
+ if self.task.tags:
773
+ # Acquire a concurrency slot for each tag, but only if a limit
774
+ # matching the tag already exists.
775
+ with concurrency(list(self.task.tags), self.task_run.id):
776
+ result = call_with_parameters(self.task.fn, parameters)
777
+ else:
778
+ result = call_with_parameters(self.task.fn, parameters)
779
+ self.handle_success(result, transaction=transaction)
780
+ return result
781
+
782
+
783
+ @dataclass
784
+ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
785
+ _client: Optional[PrefectClient] = None
786
+
787
+ @property
788
+ def client(self) -> PrefectClient:
789
+ if not self._is_started or self._client is None:
790
+ raise RuntimeError("Engine has not started.")
791
+ return self._client
792
+
793
+ async def can_retry(self, exc: Exception) -> bool:
794
+ retry_condition: Optional[
795
+ Callable[[Task[P, Coroutine[Any, Any, R]], TaskRun, State], bool]
796
+ ] = self.task.retry_condition_fn
797
+ if not self.task_run:
798
+ raise ValueError("Task run is not set")
799
+ try:
800
+ self.logger.debug(
801
+ f"Running `retry_condition_fn` check {retry_condition!r} for task"
802
+ f" {self.task.name!r}"
803
+ )
804
+ state = Failed(
805
+ data=exc,
806
+ message=f"Task run encountered unexpected exception: {repr(exc)}",
807
+ )
808
+ if inspect.iscoroutinefunction(retry_condition):
809
+ should_retry = await retry_condition(self.task, self.task_run, state)
810
+ elif inspect.isfunction(retry_condition):
811
+ should_retry = retry_condition(self.task, self.task_run, state)
812
+ else:
813
+ should_retry = not retry_condition
814
+ return should_retry
815
+
816
+ except Exception:
817
+ self.logger.error(
818
+ (
819
+ "An error was encountered while running `retry_condition_fn` check"
820
+ f" '{retry_condition!r}' for task {self.task.name!r}"
821
+ ),
822
+ exc_info=True,
823
+ )
824
+ return False
825
+
826
+ async def call_hooks(self, state: Optional[State] = None):
827
+ if state is None:
828
+ state = self.state
829
+ task = self.task
830
+ task_run = self.task_run
831
+
832
+ if not task_run:
833
+ raise ValueError("Task run is not set")
834
+
835
+ if state.is_failed() and task.on_failure_hooks:
836
+ hooks = task.on_failure_hooks
837
+ elif state.is_completed() and task.on_completion_hooks:
838
+ hooks = task.on_completion_hooks
839
+ else:
840
+ hooks = None
841
+
842
+ for hook in hooks or []:
843
+ hook_name = _get_hook_name(hook)
844
+
845
+ try:
846
+ self.logger.info(
847
+ f"Running hook {hook_name!r} in response to entering state"
848
+ f" {state.name!r}"
849
+ )
850
+ result = hook(task, task_run, state)
851
+ if inspect.isawaitable(result):
852
+ await result
853
+ except Exception:
854
+ self.logger.error(
855
+ f"An error was encountered while running hook {hook_name!r}",
856
+ exc_info=True,
65
857
  )
66
858
  else:
67
- future_result_or_state = from_sync.wait_for_call_in_loop_thread(
68
- begin_run
859
+ self.logger.info(f"Hook {hook_name!r} finished running successfully")
860
+
861
+ async def begin_run(self):
862
+ try:
863
+ self._resolve_parameters()
864
+ self._wait_for_dependencies()
865
+ except UpstreamTaskError as upstream_exc:
866
+ state = await self.set_state(
867
+ Pending(
868
+ name="NotReady",
869
+ message=str(upstream_exc),
870
+ ),
871
+ # if orchestrating a run already in a pending state, force orchestration to
872
+ # update the state name
873
+ force=self.state.is_pending(),
874
+ )
875
+ return
876
+
877
+ new_state = Running()
878
+
879
+ self.task_run.start_time = new_state.timestamp
880
+ self.task_run.run_count += 1
881
+
882
+ flow_run_context = FlowRunContext.get()
883
+ if flow_run_context:
884
+ # Carry forward any task run information from the flow run
885
+ flow_run = flow_run_context.flow_run
886
+ self.task_run.flow_run_run_count = flow_run.run_count
887
+
888
+ state = await self.set_state(new_state)
889
+
890
+ # TODO: this is temporary until the API stops rejecting state transitions
891
+ # and the client / transaction store becomes the source of truth
892
+ # this is a bandaid caused by the API storing a Completed state with a bad
893
+ # result reference that no longer exists
894
+ if state.is_completed():
895
+ try:
896
+ await state.result(retry_result_failure=False)
897
+ except Exception:
898
+ state = await self.set_state(new_state, force=True)
899
+
900
+ backoff_count = 0
901
+
902
+ # TODO: Could this listen for state change events instead of polling?
903
+ while state.is_pending() or state.is_paused():
904
+ if backoff_count < BACKOFF_MAX:
905
+ backoff_count += 1
906
+ interval = clamped_poisson_interval(
907
+ average_interval=backoff_count, clamping_factor=0.3
908
+ )
909
+ await anyio.sleep(interval)
910
+ state = await self.set_state(new_state)
911
+
912
+ async def set_state(self, state: State, force: bool = False) -> State:
913
+ last_state = self.state
914
+ if not self.task_run:
915
+ raise ValueError("Task run is not set")
916
+
917
+ self.task_run.state = new_state = state
918
+
919
+ # Ensure that the state_details are populated with the current run IDs
920
+ new_state.state_details.task_run_id = self.task_run.id
921
+ new_state.state_details.flow_run_id = self.task_run.flow_run_id
922
+
923
+ # Predictively update the de-normalized task_run.state_* attributes
924
+ self.task_run.state_id = new_state.id
925
+ self.task_run.state_type = new_state.type
926
+ self.task_run.state_name = new_state.name
927
+
928
+ if new_state.is_final():
929
+ if (
930
+ isinstance(new_state.data, BaseResult)
931
+ and new_state.data.has_cached_object()
932
+ ):
933
+ # Avoid fetching the result unless it is cached, otherwise we defeat
934
+ # the purpose of disabling `cache_result_in_memory`
935
+ result = await new_state.result(raise_on_failure=False, fetch=True)
936
+ else:
937
+ result = new_state.data
938
+
939
+ link_state_to_result(new_state, result)
940
+
941
+ # emit a state change event
942
+ self._last_event = emit_task_run_state_change_event(
943
+ task_run=self.task_run,
944
+ initial_state=last_state,
945
+ validated_state=self.task_run.state,
946
+ follows=self._last_event,
947
+ )
948
+
949
+ return new_state
950
+
951
+ async def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]":
952
+ if self._return_value is not NotSet:
953
+ # if the return value is a BaseResult, we need to fetch it
954
+ if isinstance(self._return_value, BaseResult):
955
+ return await self._return_value.get()
956
+
957
+ # otherwise, return the value as is
958
+ return self._return_value
959
+
960
+ if self._raised is not NotSet:
961
+ # if the task raised an exception, raise it
962
+ if raise_on_failure:
963
+ raise self._raised
964
+
965
+ # otherwise, return the exception
966
+ return self._raised
967
+
968
+ async def handle_success(self, result: R, transaction: Transaction) -> R:
969
+ result_store = getattr(TaskRunContext.get(), "result_store", None)
970
+ if result_store is None:
971
+ raise ValueError("Result store is not set")
972
+
973
+ if self.task.cache_expiration is not None:
974
+ expiration = pendulum.now("utc") + self.task.cache_expiration
975
+ else:
976
+ expiration = None
977
+
978
+ terminal_state = await return_value_to_state(
979
+ result,
980
+ result_store=result_store,
981
+ key=transaction.key,
982
+ expiration=expiration,
983
+ )
984
+
985
+ # Avoid logging when running this rollback hook since it is not user-defined
986
+ handle_rollback = partial(self.handle_rollback)
987
+ handle_rollback.log_on_run = False
988
+
989
+ transaction.stage(
990
+ terminal_state.data,
991
+ on_rollback_hooks=[handle_rollback] + self.task.on_rollback_hooks,
992
+ on_commit_hooks=self.task.on_commit_hooks,
993
+ )
994
+ if transaction.is_committed():
995
+ terminal_state.name = "Cached"
996
+
997
+ self.record_terminal_state_timing(terminal_state)
998
+ await self.set_state(terminal_state)
999
+ self._return_value = result
1000
+ return result
1001
+
1002
+ async def handle_retry(self, exc: Exception) -> bool:
1003
+ """Handle any task run retries.
1004
+
1005
+ - If the task has retries left, and the retry condition is met, set the task to retrying and return True.
1006
+ - If the task has a retry delay, place in AwaitingRetry state with a delayed scheduled time.
1007
+ - If the task has no retries left, or the retry condition is not met, return False.
1008
+ """
1009
+ if self.retries < self.task.retries and await self.can_retry(exc):
1010
+ if self.task.retry_delay_seconds:
1011
+ delay = (
1012
+ self.task.retry_delay_seconds[
1013
+ min(self.retries, len(self.task.retry_delay_seconds) - 1)
1014
+ ] # repeat final delay value if attempts exceed specified delays
1015
+ if isinstance(self.task.retry_delay_seconds, Sequence)
1016
+ else self.task.retry_delay_seconds
69
1017
  )
1018
+ new_state = AwaitingRetry(
1019
+ scheduled_time=pendulum.now("utc").add(seconds=delay)
1020
+ )
1021
+ else:
1022
+ delay = None
1023
+ new_state = Retrying()
1024
+ self.task_run.run_count += 1
1025
+
1026
+ self.logger.info(
1027
+ "Task run failed with exception: %r - " "Retry %s/%s will start %s",
1028
+ exc,
1029
+ self.retries + 1,
1030
+ self.task.retries,
1031
+ str(delay) + " second(s) from now" if delay else "immediately",
1032
+ )
1033
+
1034
+ await self.set_state(new_state, force=True)
1035
+ self.retries = self.retries + 1
1036
+ return True
1037
+ elif self.retries >= self.task.retries:
1038
+ self.logger.error(
1039
+ "Task run failed with exception: %r - Retries are exhausted",
1040
+ exc,
1041
+ exc_info=True,
1042
+ )
1043
+ return False
1044
+
1045
+ return False
1046
+
1047
+ async def handle_exception(self, exc: Exception) -> None:
1048
+ # If the task fails, and we have retries left, set the task to retrying.
1049
+ if not await self.handle_retry(exc):
1050
+ # If the task has no retries left, or the retry condition is not met, set the task to failed.
1051
+ context = TaskRunContext.get()
1052
+ state = await exception_to_failed_state(
1053
+ exc,
1054
+ message="Task run encountered an exception",
1055
+ result_store=getattr(context, "result_store", None),
1056
+ )
1057
+ self.record_terminal_state_timing(state)
1058
+ await self.set_state(state)
1059
+ self._raised = exc
1060
+
1061
+ async def handle_timeout(self, exc: TimeoutError) -> None:
1062
+ if not await self.handle_retry(exc):
1063
+ if isinstance(exc, TaskRunTimeoutError):
1064
+ message = f"Task run exceeded timeout of {self.task.timeout_seconds} second(s)"
1065
+ else:
1066
+ message = f"Task run failed due to timeout: {exc!r}"
1067
+ self.logger.error(message)
1068
+ state = Failed(
1069
+ data=exc,
1070
+ message=message,
1071
+ name="TimedOut",
1072
+ )
1073
+ await self.set_state(state)
1074
+ self._raised = exc
1075
+
1076
+ async def handle_crash(self, exc: BaseException) -> None:
1077
+ state = await exception_to_crashed_state(exc)
1078
+ self.logger.error(f"Crash detected! {state.message}")
1079
+ self.logger.debug("Crash details:", exc_info=exc)
1080
+ self.record_terminal_state_timing(state)
1081
+ await self.set_state(state, force=True)
1082
+ self._raised = exc
70
1083
 
71
- if return_type == "future":
72
- await wait_for_task_runs_and_report_crashes(
73
- task_run_futures=[future_result_or_state],
1084
+ @asynccontextmanager
1085
+ async def setup_run_context(self, client: Optional[PrefectClient] = None):
1086
+ from prefect.utilities.engine import (
1087
+ _resolve_custom_task_run_name,
1088
+ should_log_prints,
1089
+ )
1090
+
1091
+ if client is None:
1092
+ client = self.client
1093
+ if not self.task_run:
1094
+ raise ValueError("Task run is not set")
1095
+
1096
+ with ExitStack() as stack:
1097
+ if log_prints := should_log_prints(self.task):
1098
+ stack.enter_context(patch_print())
1099
+ stack.enter_context(
1100
+ TaskRunContext(
1101
+ task=self.task,
1102
+ log_prints=log_prints,
1103
+ task_run=self.task_run,
1104
+ parameters=self.parameters,
1105
+ result_store=await get_current_result_store().update_for_task(
1106
+ self.task, _sync=False
1107
+ ),
74
1108
  client=client,
75
1109
  )
76
- return future_result_or_state
1110
+ )
1111
+ stack.enter_context(ConcurrencyContext())
1112
+
1113
+ self.logger = task_run_logger(task_run=self.task_run, task=self.task) # type: ignore
1114
+
1115
+ if not self._task_name_set and self.task.task_run_name:
1116
+ task_run_name = _resolve_custom_task_run_name(
1117
+ task=self.task, parameters=self.parameters
1118
+ )
1119
+ self.logger.extra["task_run_name"] = task_run_name
1120
+ self.logger.debug(
1121
+ f"Renamed task run {self.task_run.name!r} to {task_run_name!r}"
1122
+ )
1123
+ self.task_run.name = task_run_name
1124
+ self._task_name_set = True
1125
+ yield
1126
+
1127
+ @asynccontextmanager
1128
+ async def initialize_run(
1129
+ self,
1130
+ task_run_id: Optional[UUID] = None,
1131
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1132
+ ) -> AsyncGenerator["AsyncTaskRunEngine", Any]:
1133
+ """
1134
+ Enters a client context and creates a task run if needed.
1135
+ """
1136
+
1137
+ with hydrated_context(self.context):
1138
+ async with AsyncClientContext.get_or_create():
1139
+ self._client = get_client()
1140
+ self._is_started = True
1141
+ try:
1142
+ if not self.task_run:
1143
+ self.task_run = await self.task.create_local_run(
1144
+ id=task_run_id,
1145
+ parameters=self.parameters,
1146
+ flow_run_context=FlowRunContext.get(),
1147
+ parent_task_run_context=TaskRunContext.get(),
1148
+ wait_for=self.wait_for,
1149
+ extra_task_inputs=dependencies,
1150
+ )
1151
+ # Emit an event to capture that the task run was in the `PENDING` state.
1152
+ self._last_event = emit_task_run_state_change_event(
1153
+ task_run=self.task_run,
1154
+ initial_state=None,
1155
+ validated_state=self.task_run.state,
1156
+ )
1157
+
1158
+ async with self.setup_run_context():
1159
+ # setup_run_context might update the task run name, so log creation here
1160
+ self.logger.info(
1161
+ f"Created task run {self.task_run.name!r} for task {self.task.name!r}"
1162
+ )
1163
+ yield self
1164
+
1165
+ except TerminationSignal as exc:
1166
+ # TerminationSignals are caught and handled as crashes
1167
+ await self.handle_crash(exc)
1168
+ raise exc
1169
+
1170
+ except Exception:
1171
+ # regular exceptions are caught and re-raised to the user
1172
+ raise
1173
+ except (Pause, Abort) as exc:
1174
+ # Do not capture internal signals as crashes
1175
+ if isinstance(exc, Abort):
1176
+ self.logger.error("Task run was aborted: %s", exc)
1177
+ raise
1178
+ except GeneratorExit:
1179
+ # Do not capture generator exits as crashes
1180
+ raise
1181
+ except BaseException as exc:
1182
+ # BaseExceptions are caught and handled as crashes
1183
+ await self.handle_crash(exc)
1184
+ raise
1185
+ finally:
1186
+ self.log_finished_message()
1187
+ self._is_started = False
1188
+ self._client = None
1189
+
1190
+ async def wait_until_ready(self):
1191
+ """Waits until the scheduled time (if its the future), then enters Running."""
1192
+ if scheduled_time := self.state.state_details.scheduled_time:
1193
+ sleep_time = (scheduled_time - pendulum.now("utc")).total_seconds()
1194
+ await anyio.sleep(sleep_time if sleep_time > 0 else 0)
1195
+ await self.set_state(
1196
+ Retrying() if self.state.name == "AwaitingRetry" else Running(),
1197
+ force=True,
1198
+ )
1199
+
1200
+ # --------------------------
1201
+ #
1202
+ # The following methods compose the main task run loop
1203
+ #
1204
+ # --------------------------
1205
+
1206
+ @asynccontextmanager
1207
+ async def start(
1208
+ self,
1209
+ task_run_id: Optional[UUID] = None,
1210
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1211
+ ) -> AsyncGenerator[None, None]:
1212
+ async with self.initialize_run(
1213
+ task_run_id=task_run_id, dependencies=dependencies
1214
+ ):
1215
+ await self.begin_run()
1216
+ try:
1217
+ yield
1218
+ finally:
1219
+ await self.call_hooks()
1220
+
1221
+ @asynccontextmanager
1222
+ async def transaction_context(self) -> AsyncGenerator[Transaction, None]:
1223
+ # refresh cache setting is now repurposes as overwrite transaction record
1224
+ overwrite = (
1225
+ self.task.refresh_cache
1226
+ if self.task.refresh_cache is not None
1227
+ else PREFECT_TASKS_REFRESH_CACHE.value()
1228
+ )
1229
+ result_store = getattr(TaskRunContext.get(), "result_store", None)
1230
+ if result_store and result_store.persist_result:
1231
+ store = ResultRecordStore(result_store=result_store)
1232
+ else:
1233
+ store = None
1234
+
1235
+ with transaction(
1236
+ key=self.compute_transaction_key(),
1237
+ store=store,
1238
+ overwrite=overwrite,
1239
+ logger=self.logger,
1240
+ ) as txn:
1241
+ yield txn
1242
+
1243
+ @asynccontextmanager
1244
+ async def run_context(self):
1245
+ # reenter the run context to ensure it is up to date for every run
1246
+ async with self.setup_run_context():
1247
+ try:
1248
+ with timeout_async(
1249
+ seconds=self.task.timeout_seconds,
1250
+ timeout_exc_type=TaskRunTimeoutError,
1251
+ ):
1252
+ self.logger.debug(
1253
+ f"Executing task {self.task.name!r} for task run {self.task_run.name!r}..."
1254
+ )
1255
+ if self.is_cancelled():
1256
+ raise CancelledError("Task run cancelled by the task runner")
1257
+
1258
+ yield self
1259
+ except TimeoutError as exc:
1260
+ await self.handle_timeout(exc)
1261
+ except Exception as exc:
1262
+ await self.handle_exception(exc)
1263
+
1264
+ async def call_task_fn(
1265
+ self, transaction: Transaction
1266
+ ) -> Union[R, Coroutine[Any, Any, R]]:
1267
+ """
1268
+ Convenience method to call the task function. Returns a coroutine if the
1269
+ task is async.
1270
+ """
1271
+ parameters = self.parameters or {}
1272
+ if transaction.is_committed():
1273
+ result = transaction.read()
1274
+ else:
1275
+ if self.task.tags:
1276
+ # Acquire a concurrency slot for each tag, but only if a limit
1277
+ # matching the tag already exists.
1278
+ async with aconcurrency(list(self.task.tags), self.task_run.id):
1279
+ result = await call_with_parameters(self.task.fn, parameters)
1280
+ else:
1281
+ result = await call_with_parameters(self.task.fn, parameters)
1282
+ await self.handle_success(result, transaction=transaction)
1283
+ return result
1284
+
1285
+
1286
+ def run_task_sync(
1287
+ task: Task[P, R],
1288
+ task_run_id: Optional[UUID] = None,
1289
+ task_run: Optional[TaskRun] = None,
1290
+ parameters: Optional[Dict[str, Any]] = None,
1291
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
1292
+ return_type: Literal["state", "result"] = "result",
1293
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1294
+ context: Optional[Dict[str, Any]] = None,
1295
+ ) -> Union[R, State, None]:
1296
+ engine = SyncTaskRunEngine[P, R](
1297
+ task=task,
1298
+ parameters=parameters,
1299
+ task_run=task_run,
1300
+ wait_for=wait_for,
1301
+ context=context,
1302
+ )
1303
+
1304
+ with engine.start(task_run_id=task_run_id, dependencies=dependencies):
1305
+ while engine.is_running():
1306
+ run_coro_as_sync(engine.wait_until_ready())
1307
+ with engine.run_context(), engine.transaction_context() as txn:
1308
+ engine.call_task_fn(txn)
1309
+
1310
+ return engine.state if return_type == "state" else engine.result()
1311
+
1312
+
1313
+ async def run_task_async(
1314
+ task: Task[P, R],
1315
+ task_run_id: Optional[UUID] = None,
1316
+ task_run: Optional[TaskRun] = None,
1317
+ parameters: Optional[Dict[str, Any]] = None,
1318
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
1319
+ return_type: Literal["state", "result"] = "result",
1320
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1321
+ context: Optional[Dict[str, Any]] = None,
1322
+ ) -> Union[R, State, None]:
1323
+ engine = AsyncTaskRunEngine[P, R](
1324
+ task=task,
1325
+ parameters=parameters,
1326
+ task_run=task_run,
1327
+ wait_for=wait_for,
1328
+ context=context,
1329
+ )
1330
+
1331
+ async with engine.start(task_run_id=task_run_id, dependencies=dependencies):
1332
+ while engine.is_running():
1333
+ await engine.wait_until_ready()
1334
+ async with engine.run_context(), engine.transaction_context() as txn:
1335
+ await engine.call_task_fn(txn)
1336
+
1337
+ return engine.state if return_type == "state" else await engine.result()
1338
+
1339
+
1340
+ def run_generator_task_sync(
1341
+ task: Task[P, R],
1342
+ task_run_id: Optional[UUID] = None,
1343
+ task_run: Optional[TaskRun] = None,
1344
+ parameters: Optional[Dict[str, Any]] = None,
1345
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
1346
+ return_type: Literal["state", "result"] = "result",
1347
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1348
+ context: Optional[Dict[str, Any]] = None,
1349
+ ) -> Generator[R, None, None]:
1350
+ if return_type != "result":
1351
+ raise ValueError("The return_type for a generator task must be 'result'")
1352
+
1353
+ engine = SyncTaskRunEngine[P, R](
1354
+ task=task,
1355
+ parameters=parameters,
1356
+ task_run=task_run,
1357
+ wait_for=wait_for,
1358
+ context=context,
1359
+ )
1360
+
1361
+ with engine.start(task_run_id=task_run_id, dependencies=dependencies):
1362
+ while engine.is_running():
1363
+ run_coro_as_sync(engine.wait_until_ready())
1364
+ with engine.run_context(), engine.transaction_context() as txn:
1365
+ # TODO: generators should default to commit_mode=OFF
1366
+ # because they are dynamic by definition
1367
+ # for now we just prevent this branch explicitly
1368
+ if False and txn.is_committed():
1369
+ txn.read()
1370
+ else:
1371
+ call_args, call_kwargs = parameters_to_args_kwargs(
1372
+ task.fn, engine.parameters or {}
1373
+ )
1374
+ gen = task.fn(*call_args, **call_kwargs)
1375
+ try:
1376
+ while True:
1377
+ gen_result = next(gen)
1378
+ # link the current state to the result for dependency tracking
1379
+ #
1380
+ # TODO: this could grow the task_run_result
1381
+ # dictionary in an unbounded way, so finding a
1382
+ # way to periodically clean it up (using
1383
+ # weakrefs or similar) would be good
1384
+ link_state_to_result(engine.state, gen_result)
1385
+ yield gen_result
1386
+ except StopIteration as exc:
1387
+ engine.handle_success(exc.value, transaction=txn)
1388
+ except GeneratorExit as exc:
1389
+ engine.handle_success(None, transaction=txn)
1390
+ gen.throw(exc)
1391
+
1392
+ return engine.result()
1393
+
1394
+
1395
+ async def run_generator_task_async(
1396
+ task: Task[P, R],
1397
+ task_run_id: Optional[UUID] = None,
1398
+ task_run: Optional[TaskRun] = None,
1399
+ parameters: Optional[Dict[str, Any]] = None,
1400
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
1401
+ return_type: Literal["state", "result"] = "result",
1402
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1403
+ context: Optional[Dict[str, Any]] = None,
1404
+ ) -> AsyncGenerator[R, None]:
1405
+ if return_type != "result":
1406
+ raise ValueError("The return_type for a generator task must be 'result'")
1407
+ engine = AsyncTaskRunEngine[P, R](
1408
+ task=task,
1409
+ parameters=parameters,
1410
+ task_run=task_run,
1411
+ wait_for=wait_for,
1412
+ context=context,
1413
+ )
1414
+
1415
+ async with engine.start(task_run_id=task_run_id, dependencies=dependencies):
1416
+ while engine.is_running():
1417
+ await engine.wait_until_ready()
1418
+ async with engine.run_context(), engine.transaction_context() as txn:
1419
+ # TODO: generators should default to commit_mode=OFF
1420
+ # because they are dynamic by definition
1421
+ # for now we just prevent this branch explicitly
1422
+ if False and txn.is_committed():
1423
+ txn.read()
1424
+ else:
1425
+ call_args, call_kwargs = parameters_to_args_kwargs(
1426
+ task.fn, engine.parameters or {}
1427
+ )
1428
+ gen = task.fn(*call_args, **call_kwargs)
1429
+ try:
1430
+ while True:
1431
+ # can't use anext in Python < 3.10
1432
+ gen_result = await gen.__anext__()
1433
+ # link the current state to the result for dependency tracking
1434
+ #
1435
+ # TODO: this could grow the task_run_result
1436
+ # dictionary in an unbounded way, so finding a
1437
+ # way to periodically clean it up (using
1438
+ # weakrefs or similar) would be good
1439
+ link_state_to_result(engine.state, gen_result)
1440
+ yield gen_result
1441
+ except (StopAsyncIteration, GeneratorExit) as exc:
1442
+ await engine.handle_success(None, transaction=txn)
1443
+ if isinstance(exc, GeneratorExit):
1444
+ gen.throw(exc)
1445
+
1446
+ # async generators can't return, but we can raise failures here
1447
+ if engine.state.is_failed():
1448
+ await engine.result()
1449
+
1450
+
1451
+ def run_task(
1452
+ task: Task[P, Union[R, Coroutine[Any, Any, R]]],
1453
+ task_run_id: Optional[UUID] = None,
1454
+ task_run: Optional[TaskRun] = None,
1455
+ parameters: Optional[Dict[str, Any]] = None,
1456
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
1457
+ return_type: Literal["state", "result"] = "result",
1458
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
1459
+ context: Optional[Dict[str, Any]] = None,
1460
+ ) -> Union[R, State, None, Coroutine[Any, Any, Union[R, State, None]]]:
1461
+ """
1462
+ Runs the provided task.
1463
+
1464
+ Args:
1465
+ task: The task to run
1466
+ task_run_id: The ID of the task run; if not provided, a new task run
1467
+ will be created
1468
+ task_run: The task run object; if not provided, a new task run
1469
+ will be created
1470
+ parameters: The parameters to pass to the task
1471
+ wait_for: A list of futures to wait for before running the task
1472
+ return_type: The return type to return; either "state" or "result"
1473
+ dependencies: A dictionary of task run inputs to use for dependency tracking
1474
+ context: A dictionary containing the context to use for the task run; only
1475
+ required if the task is running on in a remote environment
1476
+
1477
+ Returns:
1478
+ The result of the task run
1479
+ """
1480
+ kwargs = dict(
1481
+ task=task,
1482
+ task_run_id=task_run_id,
1483
+ task_run=task_run,
1484
+ parameters=parameters,
1485
+ wait_for=wait_for,
1486
+ return_type=return_type,
1487
+ dependencies=dependencies,
1488
+ context=context,
1489
+ )
1490
+ if task.isasync and task.isgenerator:
1491
+ return run_generator_task_async(**kwargs)
1492
+ elif task.isgenerator:
1493
+ return run_generator_task_sync(**kwargs)
1494
+ elif task.isasync:
1495
+ return run_task_async(**kwargs)
1496
+ else:
1497
+ return run_task_sync(**kwargs)