prefect-client 2.19.2__py3-none-any.whl → 3.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (239) hide show
  1. prefect/__init__.py +8 -56
  2. prefect/_internal/compatibility/deprecated.py +6 -115
  3. prefect/_internal/compatibility/experimental.py +4 -79
  4. prefect/_internal/concurrency/api.py +0 -34
  5. prefect/_internal/concurrency/calls.py +0 -6
  6. prefect/_internal/concurrency/cancellation.py +0 -3
  7. prefect/_internal/concurrency/event_loop.py +0 -20
  8. prefect/_internal/concurrency/inspection.py +3 -3
  9. prefect/_internal/concurrency/threads.py +35 -0
  10. prefect/_internal/concurrency/waiters.py +0 -28
  11. prefect/_internal/pydantic/__init__.py +0 -45
  12. prefect/_internal/pydantic/v1_schema.py +21 -22
  13. prefect/_internal/pydantic/v2_schema.py +0 -2
  14. prefect/_internal/pydantic/v2_validated_func.py +18 -23
  15. prefect/_internal/schemas/bases.py +44 -177
  16. prefect/_internal/schemas/fields.py +1 -43
  17. prefect/_internal/schemas/validators.py +60 -158
  18. prefect/artifacts.py +161 -14
  19. prefect/automations.py +39 -4
  20. prefect/blocks/abstract.py +1 -1
  21. prefect/blocks/core.py +268 -148
  22. prefect/blocks/fields.py +2 -57
  23. prefect/blocks/kubernetes.py +8 -12
  24. prefect/blocks/notifications.py +40 -20
  25. prefect/blocks/system.py +22 -11
  26. prefect/blocks/webhook.py +2 -9
  27. prefect/client/base.py +4 -4
  28. prefect/client/cloud.py +8 -13
  29. prefect/client/orchestration.py +347 -341
  30. prefect/client/schemas/actions.py +92 -86
  31. prefect/client/schemas/filters.py +20 -40
  32. prefect/client/schemas/objects.py +151 -145
  33. prefect/client/schemas/responses.py +16 -24
  34. prefect/client/schemas/schedules.py +47 -35
  35. prefect/client/subscriptions.py +2 -2
  36. prefect/client/utilities.py +5 -2
  37. prefect/concurrency/asyncio.py +3 -1
  38. prefect/concurrency/events.py +1 -1
  39. prefect/concurrency/services.py +6 -3
  40. prefect/context.py +195 -27
  41. prefect/deployments/__init__.py +5 -6
  42. prefect/deployments/base.py +7 -5
  43. prefect/deployments/flow_runs.py +185 -0
  44. prefect/deployments/runner.py +50 -45
  45. prefect/deployments/schedules.py +28 -23
  46. prefect/deployments/steps/__init__.py +0 -1
  47. prefect/deployments/steps/core.py +1 -0
  48. prefect/deployments/steps/pull.py +7 -21
  49. prefect/engine.py +12 -2422
  50. prefect/events/actions.py +17 -23
  51. prefect/events/cli/automations.py +19 -6
  52. prefect/events/clients.py +14 -37
  53. prefect/events/filters.py +14 -18
  54. prefect/events/related.py +2 -2
  55. prefect/events/schemas/__init__.py +0 -5
  56. prefect/events/schemas/automations.py +55 -46
  57. prefect/events/schemas/deployment_triggers.py +7 -197
  58. prefect/events/schemas/events.py +34 -65
  59. prefect/events/schemas/labelling.py +10 -14
  60. prefect/events/utilities.py +2 -3
  61. prefect/events/worker.py +2 -3
  62. prefect/filesystems.py +6 -517
  63. prefect/{new_flow_engine.py → flow_engine.py} +313 -72
  64. prefect/flow_runs.py +377 -5
  65. prefect/flows.py +307 -166
  66. prefect/futures.py +186 -345
  67. prefect/infrastructure/__init__.py +0 -27
  68. prefect/infrastructure/provisioners/__init__.py +5 -3
  69. prefect/infrastructure/provisioners/cloud_run.py +11 -6
  70. prefect/infrastructure/provisioners/container_instance.py +11 -7
  71. prefect/infrastructure/provisioners/ecs.py +6 -4
  72. prefect/infrastructure/provisioners/modal.py +8 -5
  73. prefect/input/actions.py +2 -4
  74. prefect/input/run_input.py +5 -7
  75. prefect/logging/formatters.py +0 -2
  76. prefect/logging/handlers.py +3 -11
  77. prefect/logging/loggers.py +2 -2
  78. prefect/manifests.py +2 -1
  79. prefect/records/__init__.py +1 -0
  80. prefect/records/result_store.py +42 -0
  81. prefect/records/store.py +9 -0
  82. prefect/results.py +43 -39
  83. prefect/runner/runner.py +19 -15
  84. prefect/runner/server.py +6 -10
  85. prefect/runner/storage.py +3 -8
  86. prefect/runner/submit.py +2 -2
  87. prefect/runner/utils.py +2 -2
  88. prefect/serializers.py +24 -35
  89. prefect/server/api/collections_data/views/aggregate-worker-metadata.json +5 -14
  90. prefect/settings.py +70 -133
  91. prefect/states.py +17 -47
  92. prefect/task_engine.py +697 -58
  93. prefect/task_runners.py +269 -301
  94. prefect/task_server.py +53 -34
  95. prefect/tasks.py +327 -337
  96. prefect/transactions.py +220 -0
  97. prefect/types/__init__.py +61 -82
  98. prefect/utilities/asyncutils.py +195 -136
  99. prefect/utilities/callables.py +311 -43
  100. prefect/utilities/collections.py +23 -38
  101. prefect/utilities/dispatch.py +11 -3
  102. prefect/utilities/dockerutils.py +4 -0
  103. prefect/utilities/engine.py +140 -20
  104. prefect/utilities/importtools.py +97 -27
  105. prefect/utilities/pydantic.py +128 -38
  106. prefect/utilities/schema_tools/hydration.py +5 -1
  107. prefect/utilities/templating.py +12 -2
  108. prefect/variables.py +78 -61
  109. prefect/workers/__init__.py +0 -1
  110. prefect/workers/base.py +15 -17
  111. prefect/workers/process.py +3 -8
  112. prefect/workers/server.py +2 -2
  113. {prefect_client-2.19.2.dist-info → prefect_client-3.0.0rc1.dist-info}/METADATA +22 -21
  114. prefect_client-3.0.0rc1.dist-info/RECORD +176 -0
  115. prefect/_internal/pydantic/_base_model.py +0 -51
  116. prefect/_internal/pydantic/_compat.py +0 -82
  117. prefect/_internal/pydantic/_flags.py +0 -20
  118. prefect/_internal/pydantic/_types.py +0 -8
  119. prefect/_internal/pydantic/utilities/__init__.py +0 -0
  120. prefect/_internal/pydantic/utilities/config_dict.py +0 -72
  121. prefect/_internal/pydantic/utilities/field_validator.py +0 -150
  122. prefect/_internal/pydantic/utilities/model_construct.py +0 -56
  123. prefect/_internal/pydantic/utilities/model_copy.py +0 -55
  124. prefect/_internal/pydantic/utilities/model_dump.py +0 -136
  125. prefect/_internal/pydantic/utilities/model_dump_json.py +0 -112
  126. prefect/_internal/pydantic/utilities/model_fields.py +0 -50
  127. prefect/_internal/pydantic/utilities/model_fields_set.py +0 -29
  128. prefect/_internal/pydantic/utilities/model_json_schema.py +0 -82
  129. prefect/_internal/pydantic/utilities/model_rebuild.py +0 -80
  130. prefect/_internal/pydantic/utilities/model_validate.py +0 -75
  131. prefect/_internal/pydantic/utilities/model_validate_json.py +0 -68
  132. prefect/_internal/pydantic/utilities/model_validator.py +0 -87
  133. prefect/_internal/pydantic/utilities/type_adapter.py +0 -71
  134. prefect/_vendor/__init__.py +0 -0
  135. prefect/_vendor/fastapi/__init__.py +0 -25
  136. prefect/_vendor/fastapi/applications.py +0 -946
  137. prefect/_vendor/fastapi/background.py +0 -3
  138. prefect/_vendor/fastapi/concurrency.py +0 -44
  139. prefect/_vendor/fastapi/datastructures.py +0 -58
  140. prefect/_vendor/fastapi/dependencies/__init__.py +0 -0
  141. prefect/_vendor/fastapi/dependencies/models.py +0 -64
  142. prefect/_vendor/fastapi/dependencies/utils.py +0 -877
  143. prefect/_vendor/fastapi/encoders.py +0 -177
  144. prefect/_vendor/fastapi/exception_handlers.py +0 -40
  145. prefect/_vendor/fastapi/exceptions.py +0 -46
  146. prefect/_vendor/fastapi/logger.py +0 -3
  147. prefect/_vendor/fastapi/middleware/__init__.py +0 -1
  148. prefect/_vendor/fastapi/middleware/asyncexitstack.py +0 -25
  149. prefect/_vendor/fastapi/middleware/cors.py +0 -3
  150. prefect/_vendor/fastapi/middleware/gzip.py +0 -3
  151. prefect/_vendor/fastapi/middleware/httpsredirect.py +0 -3
  152. prefect/_vendor/fastapi/middleware/trustedhost.py +0 -3
  153. prefect/_vendor/fastapi/middleware/wsgi.py +0 -3
  154. prefect/_vendor/fastapi/openapi/__init__.py +0 -0
  155. prefect/_vendor/fastapi/openapi/constants.py +0 -2
  156. prefect/_vendor/fastapi/openapi/docs.py +0 -203
  157. prefect/_vendor/fastapi/openapi/models.py +0 -480
  158. prefect/_vendor/fastapi/openapi/utils.py +0 -485
  159. prefect/_vendor/fastapi/param_functions.py +0 -340
  160. prefect/_vendor/fastapi/params.py +0 -453
  161. prefect/_vendor/fastapi/requests.py +0 -4
  162. prefect/_vendor/fastapi/responses.py +0 -40
  163. prefect/_vendor/fastapi/routing.py +0 -1331
  164. prefect/_vendor/fastapi/security/__init__.py +0 -15
  165. prefect/_vendor/fastapi/security/api_key.py +0 -98
  166. prefect/_vendor/fastapi/security/base.py +0 -6
  167. prefect/_vendor/fastapi/security/http.py +0 -172
  168. prefect/_vendor/fastapi/security/oauth2.py +0 -227
  169. prefect/_vendor/fastapi/security/open_id_connect_url.py +0 -34
  170. prefect/_vendor/fastapi/security/utils.py +0 -10
  171. prefect/_vendor/fastapi/staticfiles.py +0 -1
  172. prefect/_vendor/fastapi/templating.py +0 -3
  173. prefect/_vendor/fastapi/testclient.py +0 -1
  174. prefect/_vendor/fastapi/types.py +0 -3
  175. prefect/_vendor/fastapi/utils.py +0 -235
  176. prefect/_vendor/fastapi/websockets.py +0 -7
  177. prefect/_vendor/starlette/__init__.py +0 -1
  178. prefect/_vendor/starlette/_compat.py +0 -28
  179. prefect/_vendor/starlette/_exception_handler.py +0 -80
  180. prefect/_vendor/starlette/_utils.py +0 -88
  181. prefect/_vendor/starlette/applications.py +0 -261
  182. prefect/_vendor/starlette/authentication.py +0 -159
  183. prefect/_vendor/starlette/background.py +0 -43
  184. prefect/_vendor/starlette/concurrency.py +0 -59
  185. prefect/_vendor/starlette/config.py +0 -151
  186. prefect/_vendor/starlette/convertors.py +0 -87
  187. prefect/_vendor/starlette/datastructures.py +0 -707
  188. prefect/_vendor/starlette/endpoints.py +0 -130
  189. prefect/_vendor/starlette/exceptions.py +0 -60
  190. prefect/_vendor/starlette/formparsers.py +0 -276
  191. prefect/_vendor/starlette/middleware/__init__.py +0 -17
  192. prefect/_vendor/starlette/middleware/authentication.py +0 -52
  193. prefect/_vendor/starlette/middleware/base.py +0 -220
  194. prefect/_vendor/starlette/middleware/cors.py +0 -176
  195. prefect/_vendor/starlette/middleware/errors.py +0 -265
  196. prefect/_vendor/starlette/middleware/exceptions.py +0 -74
  197. prefect/_vendor/starlette/middleware/gzip.py +0 -113
  198. prefect/_vendor/starlette/middleware/httpsredirect.py +0 -19
  199. prefect/_vendor/starlette/middleware/sessions.py +0 -82
  200. prefect/_vendor/starlette/middleware/trustedhost.py +0 -64
  201. prefect/_vendor/starlette/middleware/wsgi.py +0 -147
  202. prefect/_vendor/starlette/requests.py +0 -328
  203. prefect/_vendor/starlette/responses.py +0 -347
  204. prefect/_vendor/starlette/routing.py +0 -933
  205. prefect/_vendor/starlette/schemas.py +0 -154
  206. prefect/_vendor/starlette/staticfiles.py +0 -248
  207. prefect/_vendor/starlette/status.py +0 -199
  208. prefect/_vendor/starlette/templating.py +0 -231
  209. prefect/_vendor/starlette/testclient.py +0 -804
  210. prefect/_vendor/starlette/types.py +0 -30
  211. prefect/_vendor/starlette/websockets.py +0 -193
  212. prefect/agent.py +0 -698
  213. prefect/deployments/deployments.py +0 -1042
  214. prefect/deprecated/__init__.py +0 -0
  215. prefect/deprecated/data_documents.py +0 -350
  216. prefect/deprecated/packaging/__init__.py +0 -12
  217. prefect/deprecated/packaging/base.py +0 -96
  218. prefect/deprecated/packaging/docker.py +0 -146
  219. prefect/deprecated/packaging/file.py +0 -92
  220. prefect/deprecated/packaging/orion.py +0 -80
  221. prefect/deprecated/packaging/serializers.py +0 -171
  222. prefect/events/instrument.py +0 -135
  223. prefect/infrastructure/base.py +0 -323
  224. prefect/infrastructure/container.py +0 -818
  225. prefect/infrastructure/kubernetes.py +0 -920
  226. prefect/infrastructure/process.py +0 -289
  227. prefect/new_task_engine.py +0 -423
  228. prefect/pydantic/__init__.py +0 -76
  229. prefect/pydantic/main.py +0 -39
  230. prefect/software/__init__.py +0 -2
  231. prefect/software/base.py +0 -50
  232. prefect/software/conda.py +0 -199
  233. prefect/software/pip.py +0 -122
  234. prefect/software/python.py +0 -52
  235. prefect/workers/block.py +0 -218
  236. prefect_client-2.19.2.dist-info/RECORD +0 -292
  237. {prefect_client-2.19.2.dist-info → prefect_client-3.0.0rc1.dist-info}/LICENSE +0 -0
  238. {prefect_client-2.19.2.dist-info → prefect_client-3.0.0rc1.dist-info}/WHEEL +0 -0
  239. {prefect_client-2.19.2.dist-info → prefect_client-3.0.0rc1.dist-info}/top_level.txt +0 -0
prefect/task_runners.py CHANGED
@@ -1,365 +1,333 @@
1
- """
2
- Interface and implementations of various task runners.
3
-
4
- [Task Runners](/concepts/task-runners/) in Prefect are responsible for managing the execution of Prefect task runs. Generally speaking, users are not expected to interact with task runners outside of configuring and initializing them for a flow.
5
-
6
- Example:
7
- ```
8
- >>> from prefect import flow, task
9
- >>> from prefect.task_runners import SequentialTaskRunner
10
- >>> from typing import List
11
- >>>
12
- >>> @task
13
- >>> def say_hello(name):
14
- ... print(f"hello {name}")
15
- >>>
16
- >>> @task
17
- >>> def say_goodbye(name):
18
- ... print(f"goodbye {name}")
19
- >>>
20
- >>> @flow(task_runner=SequentialTaskRunner())
21
- >>> def greetings(names: List[str]):
22
- ... for name in names:
23
- ... say_hello(name)
24
- ... say_goodbye(name)
25
- >>>
26
- >>> greetings(["arthur", "trillian", "ford", "marvin"])
27
- hello arthur
28
- goodbye arthur
29
- hello trillian
30
- goodbye trillian
31
- hello ford
32
- goodbye ford
33
- hello marvin
34
- goodbye marvin
35
- ```
36
-
37
- Switching to a `DaskTaskRunner`:
38
- ```
39
- >>> from prefect_dask.task_runners import DaskTaskRunner
40
- >>> flow.task_runner = DaskTaskRunner()
41
- >>> greetings(["arthur", "trillian", "ford", "marvin"])
42
- hello arthur
43
- goodbye arthur
44
- hello trillian
45
- hello ford
46
- goodbye marvin
47
- hello marvin
48
- goodbye ford
49
- goodbye trillian
50
- ```
51
-
52
- For usage details, see the [Task Runners](/concepts/task-runners/) documentation.
53
- """
54
1
  import abc
55
- from contextlib import AsyncExitStack, asynccontextmanager
56
- from typing import (
57
- TYPE_CHECKING,
58
- Any,
59
- AsyncIterator,
60
- Awaitable,
61
- Callable,
62
- Dict,
63
- Optional,
64
- Set,
65
- TypeVar,
2
+ import asyncio
3
+ import uuid
4
+ from concurrent.futures import ThreadPoolExecutor
5
+ from contextvars import copy_context
6
+ from typing import TYPE_CHECKING, Any, Dict, Generic, Iterable, Optional, Set
7
+
8
+ from typing_extensions import ParamSpec, Self, TypeVar
9
+
10
+ from prefect.client.schemas.objects import TaskRunInput
11
+ from prefect.exceptions import MappingLengthMismatch, MappingMissingIterable
12
+ from prefect.futures import (
13
+ PrefectConcurrentFuture,
14
+ PrefectDistributedFuture,
15
+ PrefectFuture,
66
16
  )
67
- from uuid import UUID
68
-
69
- import anyio
70
-
71
- from prefect._internal.concurrency.primitives import Event
72
- from prefect.client.schemas.objects import State
73
- from prefect.logging import get_logger
74
- from prefect.states import exception_to_crashed_state
75
- from prefect.utilities.collections import AutoEnum
17
+ from prefect.logging.loggers import get_logger, get_run_logger
18
+ from prefect.utilities.annotations import allow_failure, quote, unmapped
19
+ from prefect.utilities.callables import (
20
+ collapse_variadic_parameters,
21
+ explode_variadic_parameter,
22
+ get_parameter_defaults,
23
+ )
24
+ from prefect.utilities.collections import isiterable
76
25
 
77
26
  if TYPE_CHECKING:
78
- import anyio.abc
27
+ from prefect.tasks import Task
79
28
 
29
+ P = ParamSpec("P")
30
+ T = TypeVar("T")
31
+ F = TypeVar("F", bound=PrefectFuture)
80
32
 
81
- T = TypeVar("T", bound="BaseTaskRunner")
82
- R = TypeVar("R")
83
-
84
-
85
- class TaskConcurrencyType(AutoEnum):
86
- SEQUENTIAL = AutoEnum.auto()
87
- CONCURRENT = AutoEnum.auto()
88
- PARALLEL = AutoEnum.auto()
89
33
 
34
+ class TaskRunner(abc.ABC, Generic[F]):
35
+ """
36
+ Abstract base class for task runners.
90
37
 
91
- CONCURRENCY_MESSAGES = {
92
- TaskConcurrencyType.SEQUENTIAL: "sequentially",
93
- TaskConcurrencyType.CONCURRENT: "concurrently",
94
- TaskConcurrencyType.PARALLEL: "in parallel",
95
- }
38
+ A task runner is responsible for submitting tasks to the task run engine running
39
+ in an execution environment. Submitted tasks are non-blocking and return a future
40
+ object that can be used to wait for the task to complete and retrieve the result.
96
41
 
42
+ Task runners are context managers and should be used in a `with` block to ensure
43
+ proper cleanup of resources.
44
+ """
97
45
 
98
- class BaseTaskRunner(metaclass=abc.ABCMeta):
99
- def __init__(self) -> None:
46
+ def __init__(self):
100
47
  self.logger = get_logger(f"task_runner.{self.name}")
101
- self._started: bool = False
102
-
103
- @property
104
- @abc.abstractmethod
105
- def concurrency_type(self) -> TaskConcurrencyType:
106
- pass # noqa
48
+ self._started = False
107
49
 
108
50
  @property
109
51
  def name(self):
52
+ """The name of this task runner"""
110
53
  return type(self).__name__.lower().replace("taskrunner", "")
111
54
 
112
- def duplicate(self):
113
- """
114
- Return a new task runner instance with the same options.
115
- """
116
- # The base class returns `NotImplemented` to indicate that this is not yet
117
- # implemented by a given task runner.
118
- return NotImplemented
119
-
120
- def __eq__(self, other: object) -> bool:
121
- """
122
- Returns true if the task runners use the same options.
123
- """
124
- if type(other) == type(self) and (
125
- # Compare public attributes for naive equality check
126
- # Subclasses should implement this method with a check init option equality
127
- {k: v for k, v in self.__dict__.items() if not k.startswith("_")}
128
- == {k: v for k, v in other.__dict__.items() if not k.startswith("_")}
129
- ):
130
- return True
131
- else:
132
- return NotImplemented
55
+ @abc.abstractmethod
56
+ def duplicate(self) -> Self:
57
+ """Return a new instance of this task runner with the same configuration."""
58
+ ...
133
59
 
134
60
  @abc.abstractmethod
135
- async def submit(
61
+ def submit(
136
62
  self,
137
- key: UUID,
138
- call: Callable[..., Awaitable[State[R]]],
139
- ) -> None:
63
+ task: "Task",
64
+ parameters: Dict[str, Any],
65
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
66
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
67
+ ) -> F:
140
68
  """
141
- Submit a call for execution and return a `PrefectFuture` that can be used to
142
- get the call result.
69
+ Submit a task to the task run engine.
143
70
 
144
71
  Args:
145
- task_run: The task run being submitted.
146
- task_key: A unique key for this orchestration run of the task. Can be used
147
- for caching.
148
- call: The function to be executed
149
- run_kwargs: A dict of keyword arguments to pass to `call`
72
+ task: The task to submit.
73
+ parameters: The parameters to use when running the task.
74
+ wait_for: A list of futures that the task depends on.
150
75
 
151
76
  Returns:
152
- A future representing the result of `call` execution
77
+ A future object that can be used to wait for the task to complete and
78
+ retrieve the result.
153
79
  """
154
- raise NotImplementedError()
80
+ ...
155
81
 
156
- @abc.abstractmethod
157
- async def wait(self, key: UUID, timeout: float = None) -> Optional[State]:
158
- """
159
- Given a `PrefectFuture`, wait for its return state up to `timeout` seconds.
160
- If it is not finished after the timeout expires, `None` should be returned.
161
-
162
- Implementers should be careful to ensure that this function never returns or
163
- raises an exception.
164
- """
165
- raise NotImplementedError()
166
-
167
- @asynccontextmanager
168
- async def start(
169
- self: T,
170
- ) -> AsyncIterator[T]:
82
+ def map(
83
+ self,
84
+ task: "Task",
85
+ parameters: Dict[str, Any],
86
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
87
+ ) -> Iterable[F]:
171
88
  """
172
- Start the task runner, preparing any resources necessary for task submission.
89
+ Submit multiple tasks to the task run engine.
173
90
 
174
- Children should implement `_start` to prepare and clean up resources.
175
-
176
- Yields:
177
- The prepared task runner
178
- """
179
- if self._started:
180
- raise RuntimeError("The task runner is already started!")
181
-
182
- async with AsyncExitStack() as exit_stack:
183
- self.logger.debug("Starting task runner...")
184
- try:
185
- await self._start(exit_stack)
186
- self._started = True
187
- yield self
188
- finally:
189
- self.logger.debug("Shutting down task runner...")
190
- self._started = False
191
-
192
- async def _start(self, exit_stack: AsyncExitStack) -> None:
193
- """
194
- Create any resources required for this task runner to submit work.
91
+ Args:
92
+ task: The task to submit.
93
+ parameters: The parameters to use when running the task.
94
+ wait_for: A list of futures that the task depends on.
195
95
 
196
- Cleanup of resources should be submitted to the `exit_stack`.
96
+ Returns:
97
+ An iterable of future objects that can be used to wait for the tasks to
98
+ complete and retrieve the results.
197
99
  """
198
- pass # noqa
100
+ if not self._started:
101
+ raise RuntimeError(
102
+ "The task runner must be started before submitting work."
103
+ )
199
104
 
200
- def __str__(self) -> str:
201
- return type(self).__name__
105
+ from prefect.utilities.engine import (
106
+ collect_task_run_inputs_sync,
107
+ resolve_inputs_sync,
108
+ )
202
109
 
110
+ # We need to resolve some futures to map over their data, collect the upstream
111
+ # links beforehand to retain relationship tracking.
112
+ task_inputs = {
113
+ k: collect_task_run_inputs_sync(v, max_depth=0)
114
+ for k, v in parameters.items()
115
+ }
116
+
117
+ # Resolve the top-level parameters in order to get mappable data of a known length.
118
+ # Nested parameters will be resolved in each mapped child where their relationships
119
+ # will also be tracked.
120
+ parameters = resolve_inputs_sync(parameters, max_depth=0)
121
+
122
+ # Ensure that any parameters in kwargs are expanded before this check
123
+ parameters = explode_variadic_parameter(task.fn, parameters)
124
+
125
+ iterable_parameters = {}
126
+ static_parameters = {}
127
+ annotated_parameters = {}
128
+ for key, val in parameters.items():
129
+ if isinstance(val, (allow_failure, quote)):
130
+ # Unwrap annotated parameters to determine if they are iterable
131
+ annotated_parameters[key] = val
132
+ val = val.unwrap()
133
+
134
+ if isinstance(val, unmapped):
135
+ static_parameters[key] = val.value
136
+ elif isiterable(val):
137
+ iterable_parameters[key] = list(val)
138
+ else:
139
+ static_parameters[key] = val
140
+
141
+ if not len(iterable_parameters):
142
+ raise MappingMissingIterable(
143
+ "No iterable parameters were received. Parameters for map must "
144
+ f"include at least one iterable. Parameters: {parameters}"
145
+ )
203
146
 
204
- class SequentialTaskRunner(BaseTaskRunner):
205
- """
206
- A simple task runner that executes calls as they are submitted.
147
+ iterable_parameter_lengths = {
148
+ key: len(val) for key, val in iterable_parameters.items()
149
+ }
150
+ lengths = set(iterable_parameter_lengths.values())
151
+ if len(lengths) > 1:
152
+ raise MappingLengthMismatch(
153
+ "Received iterable parameters with different lengths. Parameters for map"
154
+ f" must all be the same length. Got lengths: {iterable_parameter_lengths}"
155
+ )
207
156
 
208
- If writing synchronous tasks, this runner will always execute tasks sequentially.
209
- If writing async tasks, this runner will execute tasks sequentially unless grouped
210
- using `anyio.create_task_group` or `asyncio.gather`.
211
- """
157
+ map_length = list(lengths)[0]
212
158
 
213
- def __init__(self) -> None:
214
- super().__init__()
215
- self._results: Dict[str, State] = {}
159
+ futures = []
160
+ for i in range(map_length):
161
+ call_parameters = {
162
+ key: value[i] for key, value in iterable_parameters.items()
163
+ }
164
+ call_parameters.update(
165
+ {key: value for key, value in static_parameters.items()}
166
+ )
216
167
 
217
- @property
218
- def concurrency_type(self) -> TaskConcurrencyType:
219
- return TaskConcurrencyType.SEQUENTIAL
168
+ # Add default values for parameters; these are skipped earlier since they should
169
+ # not be mapped over
170
+ for key, value in get_parameter_defaults(task.fn).items():
171
+ call_parameters.setdefault(key, value)
172
+
173
+ # Re-apply annotations to each key again
174
+ for key, annotation in annotated_parameters.items():
175
+ call_parameters[key] = annotation.rewrap(call_parameters[key])
176
+
177
+ # Collapse any previously exploded kwargs
178
+ call_parameters = collapse_variadic_parameters(task.fn, call_parameters)
179
+
180
+ futures.append(
181
+ self.submit(
182
+ task=task,
183
+ parameters=call_parameters,
184
+ wait_for=wait_for,
185
+ dependencies=task_inputs,
186
+ )
187
+ )
220
188
 
221
- def duplicate(self):
222
- return type(self)()
189
+ return futures
223
190
 
224
- async def submit(
225
- self,
226
- key: UUID,
227
- call: Callable[..., Awaitable[State[R]]],
228
- ) -> None:
229
- # Run the function immediately and store the result in memory
230
- try:
231
- result = await call()
232
- except BaseException as exc:
233
- result = await exception_to_crashed_state(exc)
191
+ def __enter__(self):
192
+ if self._started:
193
+ raise RuntimeError("This task runner is already started")
234
194
 
235
- self._results[key] = result
195
+ self.logger.debug("Starting task runner")
196
+ self._started = True
197
+ return self
236
198
 
237
- async def wait(self, key: UUID, timeout: float = None) -> Optional[State]:
238
- return self._results[key]
199
+ def __exit__(self, exc_type, exc_value, traceback):
200
+ self.logger.debug("Stopping task runner")
201
+ self._started = False
239
202
 
240
203
 
241
- class ConcurrentTaskRunner(BaseTaskRunner):
242
- """
243
- A concurrent task runner that allows tasks to switch when blocking on IO.
244
- Synchronous tasks will be submitted to a thread pool maintained by `anyio`.
245
-
246
- Example:
247
- ```
248
- Using a thread for concurrency:
249
- >>> from prefect import flow
250
- >>> from prefect.task_runners import ConcurrentTaskRunner
251
- >>> @flow(task_runner=ConcurrentTaskRunner)
252
- >>> def my_flow():
253
- >>> ...
254
- ```
255
- """
256
-
204
+ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture]):
257
205
  def __init__(self):
258
- # TODO: Consider adding `max_workers` support using anyio capacity limiters
259
-
260
- # Runtime attributes
261
- self._task_group: anyio.abc.TaskGroup = None
262
- self._result_events: Dict[UUID, Event] = {}
263
- self._results: Dict[UUID, Any] = {}
264
- self._keys: Set[UUID] = set()
265
-
266
206
  super().__init__()
207
+ self._executor: Optional[ThreadPoolExecutor] = None
267
208
 
268
- @property
269
- def concurrency_type(self) -> TaskConcurrencyType:
270
- return TaskConcurrencyType.CONCURRENT
271
-
272
- def duplicate(self):
209
+ def duplicate(self) -> "ThreadPoolTaskRunner":
273
210
  return type(self)()
274
211
 
275
- async def submit(
212
+ def submit(
276
213
  self,
277
- key: UUID,
278
- call: Callable[[], Awaitable[State[R]]],
279
- ) -> None:
280
- if not self._started:
281
- raise RuntimeError(
282
- "The task runner must be started before submitting work."
283
- )
214
+ task: "Task",
215
+ parameters: Dict[str, Any],
216
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
217
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
218
+ ) -> PrefectConcurrentFuture:
219
+ """
220
+ Submit a task to the task run engine running in a separate thread.
284
221
 
285
- if not self._task_group:
286
- raise RuntimeError(
287
- "The concurrent task runner cannot be used to submit work after "
288
- "serialization."
289
- )
222
+ Args:
223
+ task: The task to submit.
224
+ parameters: The parameters to use when running the task.
225
+ wait_for: A list of futures that the task depends on.
290
226
 
291
- # Create an event to set on completion
292
- self._result_events[key] = Event()
227
+ Returns:
228
+ A future object that can be used to wait for the task to complete and
229
+ retrieve the result.
230
+ """
231
+ if not self._started or self._executor is None:
232
+ raise RuntimeError("Task runner is not started")
293
233
 
294
- # Rely on the event loop for concurrency
295
- self._task_group.start_soon(self._run_and_store_result, key, call)
234
+ from prefect.context import FlowRunContext
235
+ from prefect.task_engine import run_task_async, run_task_sync
296
236
 
297
- async def wait(
298
- self,
299
- key: UUID,
300
- timeout: float = None,
301
- ) -> Optional[State]:
302
- if not self._task_group:
303
- raise RuntimeError(
304
- "The concurrent task runner cannot be used to wait for work after "
305
- "serialization."
237
+ task_run_id = uuid.uuid4()
238
+ context = copy_context()
239
+
240
+ flow_run_ctx = FlowRunContext.get()
241
+ if flow_run_ctx:
242
+ get_run_logger(flow_run_ctx).info(
243
+ f"Submitting task {task.name} to thread pool executor..."
244
+ )
245
+ else:
246
+ self.logger.info(f"Submitting task {task.name} to thread pool executor...")
247
+
248
+ if task.isasync:
249
+ # TODO: Explore possibly using a long-lived thread with an event loop
250
+ # for better performance
251
+ future = self._executor.submit(
252
+ context.run,
253
+ asyncio.run,
254
+ run_task_async(
255
+ task=task,
256
+ task_run_id=task_run_id,
257
+ parameters=parameters,
258
+ wait_for=wait_for,
259
+ return_type="state",
260
+ dependencies=dependencies,
261
+ ),
262
+ )
263
+ else:
264
+ future = self._executor.submit(
265
+ context.run,
266
+ run_task_sync,
267
+ task=task,
268
+ task_run_id=task_run_id,
269
+ parameters=parameters,
270
+ wait_for=wait_for,
271
+ return_type="state",
272
+ dependencies=dependencies,
306
273
  )
274
+ prefect_future = PrefectConcurrentFuture(
275
+ task_run_id=task_run_id, wrapped_future=future
276
+ )
277
+ return prefect_future
307
278
 
308
- return await self._get_run_result(key, timeout)
279
+ def __enter__(self):
280
+ super().__enter__()
281
+ self._executor = ThreadPoolExecutor()
282
+ return self
309
283
 
310
- async def _run_and_store_result(
311
- self, key: UUID, call: Callable[[], Awaitable[State[R]]]
312
- ):
313
- """
314
- Simple utility to store the orchestration result in memory on completion
284
+ def __exit__(self, exc_type, exc_value, traceback):
285
+ if self._executor is not None:
286
+ self._executor.shutdown()
287
+ self._executor = None
288
+ super().__exit__(exc_type, exc_value, traceback)
315
289
 
316
- Since this run is occurring on the main thread, we capture exceptions to prevent
317
- task crashes from crashing the flow run.
318
- """
319
- try:
320
- result = await call()
321
- except BaseException as exc:
322
- result = await exception_to_crashed_state(exc)
323
290
 
324
- self._results[key] = result
325
- self._result_events[key].set()
291
+ class PrefectTaskRunner(TaskRunner[PrefectDistributedFuture]):
292
+ def __init__(self):
293
+ super().__init__()
326
294
 
327
- async def _get_run_result(
328
- self, key: UUID, timeout: float = None
329
- ) -> Optional[State]:
330
- """
331
- Block until the run result has been populated.
332
- """
333
- result = None # retval on timeout
295
+ def duplicate(self) -> "PrefectTaskRunner":
296
+ return type(self)()
334
297
 
335
- # Note we do not use `asyncio.wrap_future` and instead use an `Event` to avoid
336
- # stdlib behavior where the wrapped future is cancelled if the parent future is
337
- # cancelled (as it would be during a timeout here)
338
- with anyio.move_on_after(timeout):
339
- await self._result_events[key].wait()
340
- result = self._results[key]
298
+ def submit(
299
+ self,
300
+ task: "Task",
301
+ parameters: Dict[str, Any],
302
+ wait_for: Optional[Iterable[PrefectFuture]] = None,
303
+ dependencies: Optional[Dict[str, Set[TaskRunInput]]] = None,
304
+ ) -> PrefectDistributedFuture:
305
+ """
306
+ Submit a task to the task run engine running in a separate thread.
341
307
 
342
- return result # timeout reached
308
+ Args:
309
+ task: The task to submit.
310
+ parameters: The parameters to use when running the task.
311
+ wait_for: A list of futures that the task depends on.
343
312
 
344
- async def _start(self, exit_stack: AsyncExitStack):
345
- """
346
- Start the process pool
313
+ Returns:
314
+ A future object that can be used to wait for the task to complete and
315
+ retrieve the result.
347
316
  """
348
- self._task_group = await exit_stack.enter_async_context(
349
- anyio.create_task_group()
350
- )
317
+ if not self._started:
318
+ raise RuntimeError("Task runner is not started")
319
+ from prefect.context import FlowRunContext
351
320
 
352
- def __getstate__(self):
353
- """
354
- Allow the `ConcurrentTaskRunner` to be serialized by dropping the task group.
355
- """
356
- data = self.__dict__.copy()
357
- data.update({k: None for k in {"_task_group"}})
358
- return data
321
+ flow_run_ctx = FlowRunContext.get()
322
+ if flow_run_ctx:
323
+ get_run_logger(flow_run_ctx).info(
324
+ f"Submitting task {task.name} to for execution by a Prefect task server..."
325
+ )
326
+ else:
327
+ self.logger.info(
328
+ f"Submitting task {task.name} to for execution by a Prefect task server..."
329
+ )
359
330
 
360
- def __setstate__(self, data: dict):
361
- """
362
- When deserialized, we will no longer have a reference to the task group.
363
- """
364
- self.__dict__.update(data)
365
- self._task_group = None
331
+ return task.apply_async(
332
+ kwargs=parameters, wait_for=wait_for, dependencies=dependencies
333
+ )