prefect-client 2.16.8__py3-none-any.whl → 2.17.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. prefect/__init__.py +0 -18
  2. prefect/_internal/compatibility/deprecated.py +108 -5
  3. prefect/_internal/compatibility/experimental.py +9 -8
  4. prefect/_internal/concurrency/api.py +23 -42
  5. prefect/_internal/concurrency/waiters.py +25 -22
  6. prefect/_internal/pydantic/__init__.py +16 -3
  7. prefect/_internal/pydantic/_base_model.py +39 -4
  8. prefect/_internal/pydantic/_compat.py +69 -452
  9. prefect/_internal/pydantic/_flags.py +5 -0
  10. prefect/_internal/pydantic/_types.py +8 -0
  11. prefect/_internal/pydantic/utilities/__init__.py +0 -0
  12. prefect/_internal/pydantic/utilities/config_dict.py +72 -0
  13. prefect/_internal/pydantic/utilities/field_validator.py +135 -0
  14. prefect/_internal/pydantic/utilities/model_construct.py +56 -0
  15. prefect/_internal/pydantic/utilities/model_copy.py +55 -0
  16. prefect/_internal/pydantic/utilities/model_dump.py +136 -0
  17. prefect/_internal/pydantic/utilities/model_dump_json.py +112 -0
  18. prefect/_internal/pydantic/utilities/model_fields.py +50 -0
  19. prefect/_internal/pydantic/utilities/model_fields_set.py +29 -0
  20. prefect/_internal/pydantic/utilities/model_json_schema.py +82 -0
  21. prefect/_internal/pydantic/utilities/model_rebuild.py +80 -0
  22. prefect/_internal/pydantic/utilities/model_validate.py +75 -0
  23. prefect/_internal/pydantic/utilities/model_validate_json.py +68 -0
  24. prefect/_internal/pydantic/utilities/model_validator.py +79 -0
  25. prefect/_internal/pydantic/utilities/type_adapter.py +71 -0
  26. prefect/_internal/schemas/bases.py +1 -17
  27. prefect/_internal/schemas/validators.py +425 -4
  28. prefect/agent.py +1 -1
  29. prefect/blocks/kubernetes.py +7 -3
  30. prefect/blocks/notifications.py +18 -18
  31. prefect/blocks/webhook.py +1 -1
  32. prefect/client/base.py +7 -0
  33. prefect/client/cloud.py +1 -1
  34. prefect/client/orchestration.py +51 -11
  35. prefect/client/schemas/actions.py +367 -297
  36. prefect/client/schemas/filters.py +28 -28
  37. prefect/client/schemas/objects.py +78 -147
  38. prefect/client/schemas/responses.py +240 -60
  39. prefect/client/schemas/schedules.py +6 -8
  40. prefect/concurrency/events.py +2 -2
  41. prefect/context.py +4 -2
  42. prefect/deployments/base.py +6 -13
  43. prefect/deployments/deployments.py +34 -9
  44. prefect/deployments/runner.py +9 -27
  45. prefect/deprecated/packaging/base.py +5 -6
  46. prefect/deprecated/packaging/docker.py +19 -25
  47. prefect/deprecated/packaging/file.py +10 -5
  48. prefect/deprecated/packaging/orion.py +9 -4
  49. prefect/deprecated/packaging/serializers.py +8 -58
  50. prefect/engine.py +55 -618
  51. prefect/events/actions.py +16 -1
  52. prefect/events/clients.py +45 -13
  53. prefect/events/filters.py +19 -2
  54. prefect/events/related.py +4 -4
  55. prefect/events/schemas/automations.py +13 -2
  56. prefect/events/schemas/deployment_triggers.py +73 -5
  57. prefect/events/schemas/events.py +1 -1
  58. prefect/events/utilities.py +12 -4
  59. prefect/events/worker.py +26 -8
  60. prefect/exceptions.py +3 -8
  61. prefect/filesystems.py +7 -7
  62. prefect/flows.py +7 -3
  63. prefect/infrastructure/provisioners/ecs.py +1 -0
  64. prefect/logging/configuration.py +2 -2
  65. prefect/manifests.py +1 -8
  66. prefect/profiles.toml +1 -1
  67. prefect/pydantic/__init__.py +74 -2
  68. prefect/pydantic/main.py +26 -2
  69. prefect/serializers.py +6 -31
  70. prefect/settings.py +72 -26
  71. prefect/software/python.py +3 -5
  72. prefect/task_server.py +2 -2
  73. prefect/utilities/callables.py +1 -1
  74. prefect/utilities/collections.py +2 -1
  75. prefect/utilities/dispatch.py +1 -0
  76. prefect/utilities/engine.py +629 -0
  77. prefect/utilities/pydantic.py +1 -1
  78. prefect/utilities/schema_tools/validation.py +2 -2
  79. prefect/utilities/visualization.py +1 -1
  80. prefect/variables.py +88 -12
  81. prefect/workers/base.py +20 -11
  82. prefect/workers/block.py +4 -8
  83. prefect/workers/process.py +2 -5
  84. {prefect_client-2.16.8.dist-info → prefect_client-2.17.0.dist-info}/METADATA +4 -3
  85. {prefect_client-2.16.8.dist-info → prefect_client-2.17.0.dist-info}/RECORD +88 -72
  86. prefect/_internal/schemas/transformations.py +0 -106
  87. {prefect_client-2.16.8.dist-info → prefect_client-2.17.0.dist-info}/LICENSE +0 -0
  88. {prefect_client-2.16.8.dist-info → prefect_client-2.17.0.dist-info}/WHEEL +0 -0
  89. {prefect_client-2.16.8.dist-info → prefect_client-2.17.0.dist-info}/top_level.txt +0 -0
prefect/engine.py CHANGED
@@ -82,11 +82,9 @@ Client-side execution and orchestration of flows and tasks.
82
82
  """
83
83
 
84
84
  import asyncio
85
- import contextlib
86
85
  import logging
87
86
  import os
88
87
  import random
89
- import signal
90
88
  import sys
91
89
  import threading
92
90
  import time
@@ -95,7 +93,6 @@ from functools import partial
95
93
  from typing import (
96
94
  Any,
97
95
  Awaitable,
98
- Callable,
99
96
  Dict,
100
97
  Iterable,
101
98
  List,
@@ -120,16 +117,15 @@ from prefect._internal.compatibility.deprecated import deprecated_parameter
120
117
  from prefect._internal.compatibility.experimental import experimental_parameter
121
118
  from prefect._internal.concurrency.api import create_call, from_async, from_sync
122
119
  from prefect._internal.concurrency.calls import get_current_call
123
- from prefect._internal.concurrency.cancellation import CancelledError, get_deadline
120
+ from prefect._internal.concurrency.cancellation import CancelledError
124
121
  from prefect._internal.concurrency.threads import wait_for_global_loop_exit
125
122
  from prefect.client.orchestration import PrefectClient, get_client
126
- from prefect.client.schemas import FlowRun, OrchestrationResult, TaskRun
123
+ from prefect.client.schemas import FlowRun, TaskRun
127
124
  from prefect.client.schemas.filters import FlowRunFilter
128
125
  from prefect.client.schemas.objects import (
129
126
  StateDetails,
130
127
  StateType,
131
128
  TaskRunInput,
132
- TaskRunResult,
133
129
  )
134
130
  from prefect.client.schemas.responses import SetStateStatus
135
131
  from prefect.client.schemas.sorting import FlowRunSort
@@ -141,7 +137,6 @@ from prefect.context import (
141
137
  TaskRunContext,
142
138
  )
143
139
  from prefect.deployments import load_flow_from_flow_run
144
- from prefect.events import Event, emit_event
145
140
  from prefect.exceptions import (
146
141
  Abort,
147
142
  FlowPauseTimeout,
@@ -150,8 +145,6 @@ from prefect.exceptions import (
150
145
  NotPausedError,
151
146
  Pause,
152
147
  PausedRun,
153
- PrefectException,
154
- TerminationSignal,
155
148
  UpstreamTaskError,
156
149
  )
157
150
  from prefect.flows import Flow, load_flow_from_entrypoint
@@ -167,11 +160,9 @@ from prefect.logging.loggers import (
167
160
  patch_print,
168
161
  task_run_logger,
169
162
  )
170
- from prefect.results import BaseResult, ResultFactory, UnknownResult
163
+ from prefect.results import ResultFactory, UnknownResult
171
164
  from prefect.settings import (
172
165
  PREFECT_DEBUG_MODE,
173
- PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING,
174
- PREFECT_LOGGING_LOG_PRINTS,
175
166
  PREFECT_TASK_INTROSPECTION_WARN_THRESHOLD,
176
167
  PREFECT_TASKS_REFRESH_CACHE,
177
168
  PREFECT_UI_URL,
@@ -186,8 +177,6 @@ from prefect.states import (
186
177
  Suspended,
187
178
  exception_to_crashed_state,
188
179
  exception_to_failed_state,
189
- get_state_exception,
190
- is_state,
191
180
  return_value_to_state,
192
181
  )
193
182
  from prefect.task_runners import (
@@ -208,9 +197,22 @@ from prefect.utilities.callables import (
208
197
  get_parameter_defaults,
209
198
  parameters_to_args_kwargs,
210
199
  )
211
- from prefect.utilities.collections import StopVisiting, isiterable, visit_collection
212
- from prefect.utilities.pydantic import PartialModel
213
- from prefect.utilities.text import truncated_to
200
+ from prefect.utilities.collections import isiterable
201
+ from prefect.utilities.engine import (
202
+ _dynamic_key_for_task_run,
203
+ _get_hook_name,
204
+ _observed_flow_pauses,
205
+ _resolve_custom_flow_run_name,
206
+ _resolve_custom_task_run_name,
207
+ capture_sigterm,
208
+ check_api_reachable,
209
+ collect_task_run_inputs,
210
+ emit_task_run_state_change_event,
211
+ propose_state,
212
+ resolve_inputs,
213
+ should_log_prints,
214
+ wait_for_task_runs_and_report_crashes,
215
+ )
214
216
 
215
217
  R = TypeVar("R")
216
218
  T = TypeVar("T")
@@ -218,8 +220,6 @@ EngineReturnType = Literal["future", "state", "result"]
218
220
 
219
221
  NUM_CHARS_DYNAMIC_KEY = 8
220
222
 
221
- API_HEALTHCHECKS = {}
222
- UNTRACKABLE_TYPES = {bool, type(None), type(...), type(NotImplemented)}
223
223
  engine_logger = get_logger("engine")
224
224
 
225
225
 
@@ -246,12 +246,6 @@ def enter_flow_run_engine_from_flow_call(
246
246
  )
247
247
  return None
248
248
 
249
- if TaskRunContext.get():
250
- raise RuntimeError(
251
- "Flows cannot be run from within tasks. Did you mean to call this "
252
- "flow in a flow?"
253
- )
254
-
255
249
  parent_flow_run_context = FlowRunContext.get()
256
250
  is_subflow_run = parent_flow_run_context is not None
257
251
 
@@ -509,7 +503,7 @@ async def begin_flow_run(
509
503
  logger = flow_run_logger(flow_run, flow)
510
504
 
511
505
  log_prints = should_log_prints(flow)
512
- flow_run_context = PartialModel(FlowRunContext, log_prints=log_prints)
506
+ flow_run_context = FlowRunContext.construct(log_prints=log_prints)
513
507
 
514
508
  async with AsyncExitStack() as stack:
515
509
  await stack.enter_async_context(
@@ -622,13 +616,21 @@ async def create_and_begin_subflow_run(
622
616
  if wait_for:
623
617
  task_inputs["wait_for"] = await collect_task_run_inputs(wait_for)
624
618
 
625
- rerunning = parent_flow_run_context.flow_run.run_count > 1
619
+ rerunning = (
620
+ parent_flow_run_context.flow_run.run_count > 1
621
+ if getattr(parent_flow_run_context, "flow_run", None)
622
+ else False
623
+ )
626
624
 
627
625
  # Generate a task in the parent flow run to represent the result of the subflow run
628
626
  dummy_task = Task(name=flow.name, fn=flow.fn, version=flow.version)
629
627
  parent_task_run = await client.create_task_run(
630
628
  task=dummy_task,
631
- flow_run_id=parent_flow_run_context.flow_run.id,
629
+ flow_run_id=(
630
+ parent_flow_run_context.flow_run.id
631
+ if getattr(parent_flow_run_context, "flow_run", None)
632
+ else None
633
+ ),
632
634
  dynamic_key=_dynamic_key_for_task_run(parent_flow_run_context, dummy_task),
633
635
  task_inputs=task_inputs,
634
636
  state=Pending(),
@@ -722,8 +724,7 @@ async def create_and_begin_subflow_run(
722
724
  # interruptible as well
723
725
  interruptible=parent_flow_run_context.timeout_scope is not None,
724
726
  client=client,
725
- partial_flow_run_context=PartialModel(
726
- FlowRunContext,
727
+ partial_flow_run_context=FlowRunContext.construct(
727
728
  sync_portal=parent_flow_run_context.sync_portal,
728
729
  task_runner=task_runner,
729
730
  background_tasks=parent_flow_run_context.background_tasks,
@@ -758,7 +759,7 @@ async def orchestrate_flow_run(
758
759
  wait_for: Optional[Iterable[PrefectFuture]],
759
760
  interruptible: bool,
760
761
  client: PrefectClient,
761
- partial_flow_run_context: PartialModel[FlowRunContext],
762
+ partial_flow_run_context: FlowRunContext,
762
763
  user_thread: threading.Thread,
763
764
  ) -> State:
764
765
  """
@@ -809,11 +810,16 @@ async def orchestrate_flow_run(
809
810
  # Update the flow run to the latest data
810
811
  flow_run = await client.read_flow_run(flow_run.id)
811
812
  try:
812
- with partial_flow_run_context.finalize(
813
- flow=flow,
814
- flow_run=flow_run,
815
- client=client,
816
- parameters=parameters,
813
+ with FlowRunContext(
814
+ **{
815
+ **partial_flow_run_context.dict(),
816
+ **{
817
+ "flow_run": flow_run,
818
+ "flow": flow,
819
+ "client": client,
820
+ "parameters": parameters,
821
+ },
822
+ }
817
823
  ) as flow_run_context:
818
824
  # update flow run name
819
825
  if not run_name_set and flow.flow_run_name:
@@ -852,12 +858,14 @@ async def orchestrate_flow_run(
852
858
  if parent_call and (
853
859
  not parent_flow_run_context
854
860
  or (
855
- parent_flow_run_context
861
+ getattr(parent_flow_run_context, "flow", None)
856
862
  and parent_flow_run_context.flow.isasync == flow.isasync
857
863
  )
858
864
  ):
859
865
  from_async.call_soon_in_waiting_thread(
860
- flow_call, thread=user_thread, timeout=flow.timeout_seconds
866
+ flow_call,
867
+ thread=user_thread,
868
+ timeout=flow.timeout_seconds,
861
869
  )
862
870
  else:
863
871
  from_async.call_soon_in_new_thread(
@@ -1371,17 +1379,11 @@ def enter_task_run_engine(
1371
1379
  flow_run_context = FlowRunContext.get()
1372
1380
 
1373
1381
  if not flow_run_context:
1374
- if (
1375
- not PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING.value()
1376
- or return_type == "future"
1377
- or mapped
1378
- ):
1382
+ if return_type == "future" or mapped:
1379
1383
  raise RuntimeError(
1380
- "Tasks cannot be run outside of a flow by default."
1381
- " If you meant to submit an autonomous task, you need to set"
1384
+ " If you meant to submit a background task, you need to set"
1382
1385
  " `prefect config set PREFECT_EXPERIMENTAL_ENABLE_TASK_SCHEDULING=true`"
1383
1386
  " and use `your_task.submit()` instead of `your_task()`."
1384
- " Mapping autonomous tasks is not yet supported."
1385
1387
  )
1386
1388
  from prefect.task_engine import submit_autonomous_task_run_to_engine
1387
1389
 
@@ -1395,12 +1397,6 @@ def enter_task_run_engine(
1395
1397
  client=get_client(),
1396
1398
  )
1397
1399
 
1398
- if TaskRunContext.get():
1399
- raise RuntimeError(
1400
- "Tasks cannot be run from within tasks. Did you mean to call this "
1401
- "task in a flow?"
1402
- )
1403
-
1404
1400
  if flow_run_context.timeout_scope and flow_run_context.timeout_scope.cancel_called:
1405
1401
  raise TimeoutError("Flow run timed out")
1406
1402
 
@@ -1528,52 +1524,6 @@ async def begin_task_map(
1528
1524
  return await gather(*task_runs)
1529
1525
 
1530
1526
 
1531
- async def collect_task_run_inputs(expr: Any, max_depth: int = -1) -> Set[TaskRunInput]:
1532
- """
1533
- This function recurses through an expression to generate a set of any discernible
1534
- task run inputs it finds in the data structure. It produces a set of all inputs
1535
- found.
1536
-
1537
- Examples:
1538
- >>> task_inputs = {
1539
- >>> k: await collect_task_run_inputs(v) for k, v in parameters.items()
1540
- >>> }
1541
- """
1542
- # TODO: This function needs to be updated to detect parameters and constants
1543
-
1544
- inputs = set()
1545
- futures = set()
1546
-
1547
- def add_futures_and_states_to_inputs(obj):
1548
- if isinstance(obj, PrefectFuture):
1549
- # We need to wait for futures to be submitted before we can get the task
1550
- # run id but we want to do so asynchronously
1551
- futures.add(obj)
1552
- elif is_state(obj):
1553
- if obj.state_details.task_run_id:
1554
- inputs.add(TaskRunResult(id=obj.state_details.task_run_id))
1555
- # Expressions inside quotes should not be traversed
1556
- elif isinstance(obj, quote):
1557
- raise StopVisiting
1558
- else:
1559
- state = get_state_for_result(obj)
1560
- if state and state.state_details.task_run_id:
1561
- inputs.add(TaskRunResult(id=state.state_details.task_run_id))
1562
-
1563
- visit_collection(
1564
- expr,
1565
- visit_fn=add_futures_and_states_to_inputs,
1566
- return_data=False,
1567
- max_depth=max_depth,
1568
- )
1569
-
1570
- await asyncio.gather(*[future._wait_for_submission() for future in futures])
1571
- for future in futures:
1572
- inputs.add(TaskRunResult(id=future.task_run.id))
1573
-
1574
- return inputs
1575
-
1576
-
1577
1527
  async def get_task_call_return_value(
1578
1528
  task: Task,
1579
1529
  flow_run_context: FlowRunContext,
@@ -1941,8 +1891,7 @@ async def orchestrate_task_run(
1941
1891
  flow_run = await client.read_flow_run(task_run.flow_run_id)
1942
1892
  logger = task_run_logger(task_run, task=task, flow_run=flow_run)
1943
1893
 
1944
- partial_task_run_context = PartialModel(
1945
- TaskRunContext,
1894
+ partial_task_run_context = TaskRunContext.construct(
1946
1895
  task_run=task_run,
1947
1896
  task=task,
1948
1897
  client=client,
@@ -1982,17 +1931,20 @@ async def orchestrate_task_run(
1982
1931
 
1983
1932
  # Generate the cache key to attach to proposed states
1984
1933
  # The cache key uses a TaskRunContext that does not include a `timeout_context``
1934
+
1935
+ task_run_context = TaskRunContext(
1936
+ **partial_task_run_context.dict(), parameters=resolved_parameters
1937
+ )
1938
+
1985
1939
  cache_key = (
1986
1940
  task.cache_key_fn(
1987
- partial_task_run_context.finalize(parameters=resolved_parameters),
1941
+ task_run_context,
1988
1942
  resolved_parameters,
1989
1943
  )
1990
1944
  if task.cache_key_fn
1991
1945
  else None
1992
1946
  )
1993
1947
 
1994
- task_run_context = partial_task_run_context.finalize(parameters=resolved_parameters)
1995
-
1996
1948
  # Ignore the cached results for a cache key, default = false
1997
1949
  # Setting on task level overrules the Prefect setting (env var)
1998
1950
  refresh_cache = (
@@ -2242,85 +2194,6 @@ async def orchestrate_task_run(
2242
2194
  return state
2243
2195
 
2244
2196
 
2245
- async def wait_for_task_runs_and_report_crashes(
2246
- task_run_futures: Iterable[PrefectFuture], client: PrefectClient
2247
- ) -> Literal[True]:
2248
- crash_exceptions = []
2249
-
2250
- # Gather states concurrently first
2251
- states = await gather(*(future._wait for future in task_run_futures))
2252
-
2253
- for future, state in zip(task_run_futures, states):
2254
- logger = task_run_logger(future.task_run)
2255
-
2256
- if not state.type == StateType.CRASHED:
2257
- continue
2258
-
2259
- # We use this utility instead of `state.result` for type checking
2260
- exception = await get_state_exception(state)
2261
-
2262
- task_run = await client.read_task_run(future.task_run.id)
2263
- if not task_run.state.is_crashed():
2264
- logger.info(f"Crash detected! {state.message}")
2265
- logger.debug("Crash details:", exc_info=exception)
2266
-
2267
- # Update the state of the task run
2268
- result = await client.set_task_run_state(
2269
- task_run_id=future.task_run.id, state=state, force=True
2270
- )
2271
- if result.status == SetStateStatus.ACCEPT:
2272
- engine_logger.debug(
2273
- f"Reported crashed task run {future.name!r} successfully."
2274
- )
2275
- else:
2276
- engine_logger.warning(
2277
- f"Failed to report crashed task run {future.name!r}. "
2278
- f"Orchestrator did not accept state: {result!r}"
2279
- )
2280
- else:
2281
- # Populate the state details on the local state
2282
- future._final_state.state_details = task_run.state.state_details
2283
-
2284
- crash_exceptions.append(exception)
2285
-
2286
- # Now that we've finished reporting crashed tasks, reraise any exit exceptions
2287
- for exception in crash_exceptions:
2288
- if isinstance(exception, (KeyboardInterrupt, SystemExit)):
2289
- raise exception
2290
-
2291
- return True
2292
-
2293
-
2294
- @contextlib.contextmanager
2295
- def capture_sigterm():
2296
- def cancel_flow_run(*args):
2297
- raise TerminationSignal(signal=signal.SIGTERM)
2298
-
2299
- original_term_handler = None
2300
- try:
2301
- original_term_handler = signal.signal(signal.SIGTERM, cancel_flow_run)
2302
- except ValueError:
2303
- # Signals only work in the main thread
2304
- pass
2305
-
2306
- try:
2307
- yield
2308
- except TerminationSignal as exc:
2309
- # Termination signals are swapped out during a flow run to perform
2310
- # a graceful shutdown and raise this exception. This `os.kill` call
2311
- # ensures that the previous handler, likely the Python default,
2312
- # gets called as well.
2313
- if original_term_handler is not None:
2314
- signal.signal(exc.signal, original_term_handler)
2315
- os.kill(os.getpid(), exc.signal)
2316
-
2317
- raise
2318
-
2319
- finally:
2320
- if original_term_handler is not None:
2321
- signal.signal(signal.SIGTERM, original_term_handler)
2322
-
2323
-
2324
2197
  @asynccontextmanager
2325
2198
  async def report_flow_run_crashes(flow_run: FlowRun, client: PrefectClient, flow: Flow):
2326
2199
  """
@@ -2391,370 +2264,6 @@ async def report_task_run_crashes(task_run: TaskRun, client: PrefectClient):
2391
2264
  raise
2392
2265
 
2393
2266
 
2394
- async def resolve_inputs(
2395
- parameters: Dict[str, Any], return_data: bool = True, max_depth: int = -1
2396
- ) -> Dict[str, Any]:
2397
- """
2398
- Resolve any `Quote`, `PrefectFuture`, or `State` types nested in parameters into
2399
- data.
2400
-
2401
- Returns:
2402
- A copy of the parameters with resolved data
2403
-
2404
- Raises:
2405
- UpstreamTaskError: If any of the upstream states are not `COMPLETED`
2406
- """
2407
-
2408
- futures = set()
2409
- states = set()
2410
- result_by_state = {}
2411
-
2412
- if not parameters:
2413
- return {}
2414
-
2415
- def collect_futures_and_states(expr, context):
2416
- # Expressions inside quotes should not be traversed
2417
- if isinstance(context.get("annotation"), quote):
2418
- raise StopVisiting()
2419
-
2420
- if isinstance(expr, PrefectFuture):
2421
- futures.add(expr)
2422
- if is_state(expr):
2423
- states.add(expr)
2424
-
2425
- return expr
2426
-
2427
- visit_collection(
2428
- parameters,
2429
- visit_fn=collect_futures_and_states,
2430
- return_data=False,
2431
- max_depth=max_depth,
2432
- context={},
2433
- )
2434
-
2435
- # Wait for all futures so we do not block when we retrieve the state in `resolve_input`
2436
- states.update(await asyncio.gather(*[future._wait() for future in futures]))
2437
-
2438
- # Only retrieve the result if requested as it may be expensive
2439
- if return_data:
2440
- finished_states = [state for state in states if state.is_final()]
2441
-
2442
- state_results = await asyncio.gather(
2443
- *[
2444
- state.result(raise_on_failure=False, fetch=True)
2445
- for state in finished_states
2446
- ]
2447
- )
2448
-
2449
- for state, result in zip(finished_states, state_results):
2450
- result_by_state[state] = result
2451
-
2452
- def resolve_input(expr, context):
2453
- state = None
2454
-
2455
- # Expressions inside quotes should not be modified
2456
- if isinstance(context.get("annotation"), quote):
2457
- raise StopVisiting()
2458
-
2459
- if isinstance(expr, PrefectFuture):
2460
- state = expr._final_state
2461
- elif is_state(expr):
2462
- state = expr
2463
- else:
2464
- return expr
2465
-
2466
- # Do not allow uncompleted upstreams except failures when `allow_failure` has
2467
- # been used
2468
- if not state.is_completed() and not (
2469
- # TODO: Note that the contextual annotation here is only at the current level
2470
- # if `allow_failure` is used then another annotation is used, this will
2471
- # incorrectly evaluate to false — to resolve this, we must track all
2472
- # annotations wrapping the current expression but this is not yet
2473
- # implemented.
2474
- isinstance(context.get("annotation"), allow_failure) and state.is_failed()
2475
- ):
2476
- raise UpstreamTaskError(
2477
- f"Upstream task run '{state.state_details.task_run_id}' did not reach a"
2478
- " 'COMPLETED' state."
2479
- )
2480
-
2481
- return result_by_state.get(state)
2482
-
2483
- resolved_parameters = {}
2484
- for parameter, value in parameters.items():
2485
- try:
2486
- resolved_parameters[parameter] = visit_collection(
2487
- value,
2488
- visit_fn=resolve_input,
2489
- return_data=return_data,
2490
- # we're manually going 1 layer deeper here
2491
- max_depth=max_depth - 1,
2492
- remove_annotations=True,
2493
- context={},
2494
- )
2495
- except UpstreamTaskError:
2496
- raise
2497
- except Exception as exc:
2498
- raise PrefectException(
2499
- f"Failed to resolve inputs in parameter {parameter!r}. If your"
2500
- " parameter type is not supported, consider using the `quote`"
2501
- " annotation to skip resolution of inputs."
2502
- ) from exc
2503
-
2504
- return resolved_parameters
2505
-
2506
-
2507
- async def propose_state(
2508
- client: PrefectClient,
2509
- state: State,
2510
- force: bool = False,
2511
- task_run_id: UUID = None,
2512
- flow_run_id: UUID = None,
2513
- ) -> State:
2514
- """
2515
- Propose a new state for a flow run or task run, invoking Prefect orchestration logic.
2516
-
2517
- If the proposed state is accepted, the provided `state` will be augmented with
2518
- details and returned.
2519
-
2520
- If the proposed state is rejected, a new state returned by the Prefect API will be
2521
- returned.
2522
-
2523
- If the proposed state results in a WAIT instruction from the Prefect API, the
2524
- function will sleep and attempt to propose the state again.
2525
-
2526
- If the proposed state results in an ABORT instruction from the Prefect API, an
2527
- error will be raised.
2528
-
2529
- Args:
2530
- state: a new state for the task or flow run
2531
- task_run_id: an optional task run id, used when proposing task run states
2532
- flow_run_id: an optional flow run id, used when proposing flow run states
2533
-
2534
- Returns:
2535
- a [State model][prefect.client.schemas.objects.State] representation of the
2536
- flow or task run state
2537
-
2538
- Raises:
2539
- ValueError: if neither task_run_id or flow_run_id is provided
2540
- prefect.exceptions.Abort: if an ABORT instruction is received from
2541
- the Prefect API
2542
- """
2543
-
2544
- # Determine if working with a task run or flow run
2545
- if not task_run_id and not flow_run_id:
2546
- raise ValueError("You must provide either a `task_run_id` or `flow_run_id`")
2547
-
2548
- # Handle task and sub-flow tracing
2549
- if state.is_final():
2550
- if isinstance(state.data, BaseResult) and state.data.has_cached_object():
2551
- # Avoid fetching the result unless it is cached, otherwise we defeat
2552
- # the purpose of disabling `cache_result_in_memory`
2553
- result = await state.result(raise_on_failure=False, fetch=True)
2554
- else:
2555
- result = state.data
2556
-
2557
- link_state_to_result(state, result)
2558
-
2559
- # Handle repeated WAITs in a loop instead of recursively, to avoid
2560
- # reaching max recursion depth in extreme cases.
2561
- async def set_state_and_handle_waits(set_state_func) -> OrchestrationResult:
2562
- response = await set_state_func()
2563
- while response.status == SetStateStatus.WAIT:
2564
- engine_logger.debug(
2565
- f"Received wait instruction for {response.details.delay_seconds}s: "
2566
- f"{response.details.reason}"
2567
- )
2568
- await anyio.sleep(response.details.delay_seconds)
2569
- response = await set_state_func()
2570
- return response
2571
-
2572
- # Attempt to set the state
2573
- if task_run_id:
2574
- set_state = partial(client.set_task_run_state, task_run_id, state, force=force)
2575
- response = await set_state_and_handle_waits(set_state)
2576
- elif flow_run_id:
2577
- set_state = partial(client.set_flow_run_state, flow_run_id, state, force=force)
2578
- response = await set_state_and_handle_waits(set_state)
2579
- else:
2580
- raise ValueError(
2581
- "Neither flow run id or task run id were provided. At least one must "
2582
- "be given."
2583
- )
2584
-
2585
- # Parse the response to return the new state
2586
- if response.status == SetStateStatus.ACCEPT:
2587
- # Update the state with the details if provided
2588
- state.id = response.state.id
2589
- state.timestamp = response.state.timestamp
2590
- if response.state.state_details:
2591
- state.state_details = response.state.state_details
2592
- return state
2593
-
2594
- elif response.status == SetStateStatus.ABORT:
2595
- raise prefect.exceptions.Abort(response.details.reason)
2596
-
2597
- elif response.status == SetStateStatus.REJECT:
2598
- if response.state.is_paused():
2599
- raise Pause(response.details.reason, state=response.state)
2600
- return response.state
2601
-
2602
- else:
2603
- raise ValueError(
2604
- f"Received unexpected `SetStateStatus` from server: {response.status!r}"
2605
- )
2606
-
2607
-
2608
- def _dynamic_key_for_task_run(context: FlowRunContext, task: Task) -> int:
2609
- if context.flow_run is None: # this is an autonomous task run
2610
- context.task_run_dynamic_keys[task.task_key] = getattr(
2611
- task, "dynamic_key", str(uuid4())
2612
- )
2613
-
2614
- elif task.task_key not in context.task_run_dynamic_keys:
2615
- context.task_run_dynamic_keys[task.task_key] = 0
2616
- else:
2617
- context.task_run_dynamic_keys[task.task_key] += 1
2618
-
2619
- return context.task_run_dynamic_keys[task.task_key]
2620
-
2621
-
2622
- def _observed_flow_pauses(context: FlowRunContext) -> int:
2623
- if "counter" not in context.observed_flow_pauses:
2624
- context.observed_flow_pauses["counter"] = 1
2625
- else:
2626
- context.observed_flow_pauses["counter"] += 1
2627
- return context.observed_flow_pauses["counter"]
2628
-
2629
-
2630
- def get_state_for_result(obj: Any) -> Optional[State]:
2631
- """
2632
- Get the state related to a result object.
2633
-
2634
- `link_state_to_result` must have been called first.
2635
- """
2636
- flow_run_context = FlowRunContext.get()
2637
- if flow_run_context:
2638
- return flow_run_context.task_run_results.get(id(obj))
2639
-
2640
-
2641
- def link_state_to_result(state: State, result: Any) -> None:
2642
- """
2643
- Caches a link between a state and a result and its components using
2644
- the `id` of the components to map to the state. The cache is persisted to the
2645
- current flow run context since task relationships are limited to within a flow run.
2646
-
2647
- This allows dependency tracking to occur when results are passed around.
2648
- Note: Because `id` is used, we cannot cache links between singleton objects.
2649
-
2650
- We only cache the relationship between components 1-layer deep.
2651
- Example:
2652
- Given the result [1, ["a","b"], ("c",)], the following elements will be
2653
- mapped to the state:
2654
- - [1, ["a","b"], ("c",)]
2655
- - ["a","b"]
2656
- - ("c",)
2657
-
2658
- Note: the int `1` will not be mapped to the state because it is a singleton.
2659
-
2660
- Other Notes:
2661
- We do not hash the result because:
2662
- - If changes are made to the object in the flow between task calls, we can still
2663
- track that they are related.
2664
- - Hashing can be expensive.
2665
- - Not all objects are hashable.
2666
-
2667
- We do not set an attribute, e.g. `__prefect_state__`, on the result because:
2668
-
2669
- - Mutating user's objects is dangerous.
2670
- - Unrelated equality comparisons can break unexpectedly.
2671
- - The field can be preserved on copy.
2672
- - We cannot set this attribute on Python built-ins.
2673
- """
2674
-
2675
- flow_run_context = FlowRunContext.get()
2676
-
2677
- def link_if_trackable(obj: Any) -> None:
2678
- """Track connection between a task run result and its associated state if it has a unique ID.
2679
-
2680
- We cannot track booleans, Ellipsis, None, NotImplemented, or the integers from -5 to 256
2681
- because they are singletons.
2682
-
2683
- This function will mutate the State if the object is an untrackable type by setting the value
2684
- for `State.state_details.untrackable_result` to `True`.
2685
-
2686
- """
2687
- if (type(obj) in UNTRACKABLE_TYPES) or (
2688
- isinstance(obj, int) and (-5 <= obj <= 256)
2689
- ):
2690
- state.state_details.untrackable_result = True
2691
- return
2692
- flow_run_context.task_run_results[id(obj)] = state
2693
-
2694
- if flow_run_context:
2695
- visit_collection(expr=result, visit_fn=link_if_trackable, max_depth=1)
2696
-
2697
-
2698
- def should_log_prints(flow_or_task: Union[Flow, Task]) -> bool:
2699
- flow_run_context = FlowRunContext.get()
2700
-
2701
- if flow_or_task.log_prints is None:
2702
- if flow_run_context:
2703
- return flow_run_context.log_prints
2704
- else:
2705
- return PREFECT_LOGGING_LOG_PRINTS.value()
2706
-
2707
- return flow_or_task.log_prints
2708
-
2709
-
2710
- def _resolve_custom_flow_run_name(flow: Flow, parameters: Dict[str, Any]) -> str:
2711
- if callable(flow.flow_run_name):
2712
- flow_run_name = flow.flow_run_name()
2713
- if not isinstance(flow_run_name, str):
2714
- raise TypeError(
2715
- f"Callable {flow.flow_run_name} for 'flow_run_name' returned type"
2716
- f" {type(flow_run_name).__name__} but a string is required."
2717
- )
2718
- elif isinstance(flow.flow_run_name, str):
2719
- flow_run_name = flow.flow_run_name.format(**parameters)
2720
- else:
2721
- raise TypeError(
2722
- "Expected string or callable for 'flow_run_name'; got"
2723
- f" {type(flow.flow_run_name).__name__} instead."
2724
- )
2725
-
2726
- return flow_run_name
2727
-
2728
-
2729
- def _resolve_custom_task_run_name(task: Task, parameters: Dict[str, Any]) -> str:
2730
- if callable(task.task_run_name):
2731
- task_run_name = task.task_run_name()
2732
- if not isinstance(task_run_name, str):
2733
- raise TypeError(
2734
- f"Callable {task.task_run_name} for 'task_run_name' returned type"
2735
- f" {type(task_run_name).__name__} but a string is required."
2736
- )
2737
- elif isinstance(task.task_run_name, str):
2738
- task_run_name = task.task_run_name.format(**parameters)
2739
- else:
2740
- raise TypeError(
2741
- "Expected string or callable for 'task_run_name'; got"
2742
- f" {type(task.task_run_name).__name__} instead."
2743
- )
2744
-
2745
- return task_run_name
2746
-
2747
-
2748
- def _get_hook_name(hook: Callable) -> str:
2749
- return (
2750
- hook.__name__
2751
- if hasattr(hook, "__name__")
2752
- else (
2753
- hook.func.__name__ if isinstance(hook, partial) else hook.__class__.__name__
2754
- )
2755
- )
2756
-
2757
-
2758
2267
  async def _run_task_hooks(task: Task, task_run: TaskRun, state: State) -> None:
2759
2268
  """Run the on_failure and on_completion hooks for a task, making sure to
2760
2269
  catch and log any errors that occur.
@@ -2882,78 +2391,6 @@ async def _run_flow_hooks(flow: Flow, flow_run: FlowRun, state: State) -> None:
2882
2391
  logger.info(f"Hook {hook_name!r} finished running successfully")
2883
2392
 
2884
2393
 
2885
- async def check_api_reachable(client: PrefectClient, fail_message: str):
2886
- # Do not perform a healthcheck if it exists and is not expired
2887
- api_url = str(client.api_url)
2888
- if api_url in API_HEALTHCHECKS:
2889
- expires = API_HEALTHCHECKS[api_url]
2890
- if expires > time.monotonic():
2891
- return
2892
-
2893
- connect_error = await client.api_healthcheck()
2894
- if connect_error:
2895
- raise RuntimeError(
2896
- f"{fail_message}. Failed to reach API at {api_url}."
2897
- ) from connect_error
2898
-
2899
- # Create a 10 minute cache for the healthy response
2900
- API_HEALTHCHECKS[api_url] = get_deadline(60 * 10)
2901
-
2902
-
2903
- def emit_task_run_state_change_event(
2904
- task_run: TaskRun,
2905
- initial_state: Optional[State],
2906
- validated_state: State,
2907
- follows: Optional[Event] = None,
2908
- ) -> Event:
2909
- state_message_truncation_length = 100_000
2910
-
2911
- return emit_event(
2912
- id=validated_state.id,
2913
- occurred=validated_state.timestamp,
2914
- event=f"prefect.task-run.{validated_state.name}",
2915
- payload={
2916
- "intended": {
2917
- "from": str(initial_state.type.value) if initial_state else None,
2918
- "to": str(validated_state.type.value) if validated_state else None,
2919
- },
2920
- "initial_state": (
2921
- {
2922
- "type": str(initial_state.type.value),
2923
- "name": initial_state.name,
2924
- "message": truncated_to(
2925
- state_message_truncation_length, initial_state.message
2926
- ),
2927
- }
2928
- if initial_state
2929
- else None
2930
- ),
2931
- "validated_state": {
2932
- "type": str(validated_state.type.value),
2933
- "name": validated_state.name,
2934
- "message": truncated_to(
2935
- state_message_truncation_length, validated_state.message
2936
- ),
2937
- },
2938
- },
2939
- resource={
2940
- "prefect.resource.id": f"prefect.task-run.{task_run.id}",
2941
- "prefect.resource.name": task_run.name,
2942
- "prefect.state-message": truncated_to(
2943
- state_message_truncation_length, validated_state.message
2944
- ),
2945
- "prefect.state-name": validated_state.name or "",
2946
- "prefect.state-timestamp": (
2947
- validated_state.timestamp.isoformat()
2948
- if validated_state and validated_state.timestamp
2949
- else ""
2950
- ),
2951
- "prefect.state-type": str(validated_state.type.value),
2952
- },
2953
- follows=follows,
2954
- )
2955
-
2956
-
2957
2394
  async def create_autonomous_task_run(task: Task, parameters: Dict[str, Any]) -> TaskRun:
2958
2395
  """Create a task run in the API for an autonomous task submission and store
2959
2396
  the provided parameters using the existing result storage mechanism.