prefect-client 3.0.0rc9__py3-none-any.whl → 3.0.0rc11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. prefect/_internal/compatibility/migration.py +48 -8
  2. prefect/_internal/concurrency/api.py +1 -1
  3. prefect/_internal/retries.py +61 -0
  4. prefect/agent.py +6 -0
  5. prefect/client/cloud.py +1 -1
  6. prefect/client/schemas/objects.py +3 -4
  7. prefect/concurrency/asyncio.py +3 -3
  8. prefect/concurrency/events.py +1 -1
  9. prefect/concurrency/services.py +3 -2
  10. prefect/concurrency/sync.py +19 -5
  11. prefect/context.py +14 -2
  12. prefect/deployments/__init__.py +28 -15
  13. prefect/deployments/schedules.py +5 -2
  14. prefect/deployments/steps/pull.py +7 -0
  15. prefect/events/schemas/automations.py +3 -3
  16. prefect/exceptions.py +4 -1
  17. prefect/filesystems.py +4 -3
  18. prefect/flow_engine.py +76 -14
  19. prefect/flows.py +222 -64
  20. prefect/futures.py +53 -7
  21. prefect/infrastructure/__init__.py +6 -0
  22. prefect/infrastructure/base.py +6 -0
  23. prefect/logging/loggers.py +1 -1
  24. prefect/results.py +50 -67
  25. prefect/runner/runner.py +93 -20
  26. prefect/runner/server.py +20 -22
  27. prefect/runner/submit.py +0 -8
  28. prefect/runtime/flow_run.py +38 -3
  29. prefect/serializers.py +3 -3
  30. prefect/settings.py +15 -45
  31. prefect/task_engine.py +77 -21
  32. prefect/task_runners.py +28 -16
  33. prefect/task_worker.py +6 -4
  34. prefect/tasks.py +30 -5
  35. prefect/transactions.py +18 -2
  36. prefect/utilities/asyncutils.py +9 -3
  37. prefect/utilities/engine.py +34 -1
  38. prefect/utilities/importtools.py +1 -1
  39. prefect/utilities/timeout.py +20 -5
  40. prefect/workers/base.py +98 -208
  41. prefect/workers/block.py +6 -0
  42. prefect/workers/cloud.py +6 -0
  43. prefect/workers/process.py +262 -4
  44. prefect/workers/server.py +27 -9
  45. {prefect_client-3.0.0rc9.dist-info → prefect_client-3.0.0rc11.dist-info}/METADATA +4 -4
  46. {prefect_client-3.0.0rc9.dist-info → prefect_client-3.0.0rc11.dist-info}/RECORD +49 -44
  47. {prefect_client-3.0.0rc9.dist-info → prefect_client-3.0.0rc11.dist-info}/LICENSE +0 -0
  48. {prefect_client-3.0.0rc9.dist-info → prefect_client-3.0.0rc11.dist-info}/WHEEL +0 -0
  49. {prefect_client-3.0.0rc9.dist-info → prefect_client-3.0.0rc11.dist-info}/top_level.txt +0 -0
prefect/flow_engine.py CHANGED
@@ -7,7 +7,6 @@ from dataclasses import dataclass, field
7
7
  from typing import (
8
8
  Any,
9
9
  AsyncGenerator,
10
- Callable,
11
10
  Coroutine,
12
11
  Dict,
13
12
  Generator,
@@ -16,6 +15,7 @@ from typing import (
16
15
  Literal,
17
16
  Optional,
18
17
  Tuple,
18
+ Type,
19
19
  TypeVar,
20
20
  Union,
21
21
  cast,
@@ -30,7 +30,13 @@ from prefect.client.schemas import FlowRun, TaskRun
30
30
  from prefect.client.schemas.filters import FlowRunFilter
31
31
  from prefect.client.schemas.sorting import FlowRunSort
32
32
  from prefect.context import ClientContext, FlowRunContext, TagsContext
33
- from prefect.exceptions import Abort, Pause, PrefectException, UpstreamTaskError
33
+ from prefect.exceptions import (
34
+ Abort,
35
+ Pause,
36
+ PrefectException,
37
+ TerminationSignal,
38
+ UpstreamTaskError,
39
+ )
34
40
  from prefect.flows import Flow, load_flow_from_entrypoint, load_flow_from_flow_run
35
41
  from prefect.futures import PrefectFuture, resolve_futures_to_states
36
42
  from prefect.logging.loggers import (
@@ -39,7 +45,7 @@ from prefect.logging.loggers import (
39
45
  get_run_logger,
40
46
  patch_print,
41
47
  )
42
- from prefect.results import ResultFactory
48
+ from prefect.results import BaseResult, ResultFactory
43
49
  from prefect.settings import PREFECT_DEBUG_MODE
44
50
  from prefect.states import (
45
51
  Failed,
@@ -50,6 +56,7 @@ from prefect.states import (
50
56
  exception_to_failed_state,
51
57
  return_value_to_state,
52
58
  )
59
+ from prefect.utilities.annotations import NotSet
53
60
  from prefect.utilities.asyncutils import run_coro_as_sync
54
61
  from prefect.utilities.callables import (
55
62
  call_with_parameters,
@@ -72,6 +79,10 @@ P = ParamSpec("P")
72
79
  R = TypeVar("R")
73
80
 
74
81
 
82
+ class FlowRunTimeoutError(TimeoutError):
83
+ """Raised when a flow run exceeds its defined timeout."""
84
+
85
+
75
86
  def load_flow_and_flow_run(flow_run_id: UUID) -> Tuple[FlowRun, Flow]:
76
87
  ## TODO: add error handling to update state and log tracebacks
77
88
  entrypoint = os.environ.get("PREFECT__FLOW_ENTRYPOINT")
@@ -95,6 +106,10 @@ class FlowRunEngine(Generic[P, R]):
95
106
  flow_run_id: Optional[UUID] = None
96
107
  logger: logging.Logger = field(default_factory=lambda: get_logger("engine"))
97
108
  wait_for: Optional[Iterable[PrefectFuture]] = None
109
+ # holds the return value from the user code
110
+ _return_value: Union[R, Type[NotSet]] = NotSet
111
+ # holds the exception raised by the user code, if any
112
+ _raised: Union[Exception, Type[NotSet]] = NotSet
98
113
  _is_started: bool = False
99
114
  _client: Optional[SyncPrefectClient] = None
100
115
  short_circuit: bool = False
@@ -208,6 +223,30 @@ class FlowRunEngine(Generic[P, R]):
208
223
  return state
209
224
 
210
225
  def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]":
226
+ if self._return_value is not NotSet and not isinstance(
227
+ self._return_value, State
228
+ ):
229
+ if isinstance(self._return_value, BaseResult):
230
+ _result = self._return_value.get()
231
+ else:
232
+ _result = self._return_value
233
+
234
+ if inspect.isawaitable(_result):
235
+ # getting the value for a BaseResult may return an awaitable
236
+ # depending on whether the parent frame is sync or not
237
+ _result = run_coro_as_sync(_result)
238
+ return _result
239
+
240
+ if self._raised is not NotSet:
241
+ if raise_on_failure:
242
+ raise self._raised
243
+ return self._raised
244
+
245
+ # This is a fall through case which leans on the existing state result mechanics to get the
246
+ # return value. This is necessary because we currently will return a State object if the
247
+ # the State was Prefect-created.
248
+ # TODO: Remove the need to get the result from a State except in cases where the return value
249
+ # is a State object.
211
250
  _result = self.state.result(raise_on_failure=raise_on_failure, fetch=True) # type: ignore
212
251
  # state.result is a `sync_compatible` function that may or may not return an awaitable
213
252
  # depending on whether the parent frame is sync or not
@@ -219,13 +258,15 @@ class FlowRunEngine(Generic[P, R]):
219
258
  result_factory = getattr(FlowRunContext.get(), "result_factory", None)
220
259
  if result_factory is None:
221
260
  raise ValueError("Result factory is not set")
261
+ resolved_result = resolve_futures_to_states(result)
222
262
  terminal_state = run_coro_as_sync(
223
263
  return_value_to_state(
224
- resolve_futures_to_states(result),
264
+ resolved_result,
225
265
  result_factory=result_factory,
226
266
  )
227
267
  )
228
268
  self.set_state(terminal_state)
269
+ self._return_value = resolved_result
229
270
  return result
230
271
 
231
272
  def handle_exception(
@@ -252,10 +293,16 @@ class FlowRunEngine(Generic[P, R]):
252
293
  ),
253
294
  )
254
295
  state = self.set_state(Running())
296
+ self._raised = exc
255
297
  return state
256
298
 
257
299
  def handle_timeout(self, exc: TimeoutError) -> None:
258
- message = f"Flow run exceeded timeout of {self.flow.timeout_seconds} seconds"
300
+ if isinstance(exc, FlowRunTimeoutError):
301
+ message = (
302
+ f"Flow run exceeded timeout of {self.flow.timeout_seconds} second(s)"
303
+ )
304
+ else:
305
+ message = f"Flow run failed due to timeout: {exc!r}"
259
306
  self.logger.error(message)
260
307
  state = Failed(
261
308
  data=exc,
@@ -263,12 +310,14 @@ class FlowRunEngine(Generic[P, R]):
263
310
  name="TimedOut",
264
311
  )
265
312
  self.set_state(state)
313
+ self._raised = exc
266
314
 
267
315
  def handle_crash(self, exc: BaseException) -> None:
268
316
  state = run_coro_as_sync(exception_to_crashed_state(exc))
269
317
  self.logger.error(f"Crash detected! {state.message}")
270
318
  self.logger.debug("Crash details:", exc_info=exc)
271
319
  self.set_state(state, force=True)
320
+ self._raised = exc
272
321
 
273
322
  def load_subflow_run(
274
323
  self,
@@ -315,7 +364,9 @@ class FlowRunEngine(Generic[P, R]):
315
364
  limit=1,
316
365
  )
317
366
  if flow_runs:
318
- return flow_runs[-1]
367
+ loaded_flow_run = flow_runs[-1]
368
+ self._return_value = loaded_flow_run.state
369
+ return loaded_flow_run
319
370
 
320
371
  def create_flow_run(self, client: SyncPrefectClient) -> FlowRun:
321
372
  flow_run_ctx = FlowRunContext.get()
@@ -363,7 +414,7 @@ class FlowRunEngine(Generic[P, R]):
363
414
 
364
415
  return flow_run
365
416
 
366
- def call_hooks(self, state: State = None) -> Iterable[Callable]:
417
+ def call_hooks(self, state: Optional[State] = None):
367
418
  if state is None:
368
419
  state = self.state
369
420
  flow = self.flow
@@ -504,6 +555,11 @@ class FlowRunEngine(Generic[P, R]):
504
555
  )
505
556
  try:
506
557
  yield self
558
+
559
+ except TerminationSignal as exc:
560
+ self.cancel_all_tasks()
561
+ self.handle_crash(exc)
562
+ raise
507
563
  except Exception:
508
564
  # regular exceptions are caught and re-raised to the user
509
565
  raise
@@ -539,6 +595,10 @@ class FlowRunEngine(Generic[P, R]):
539
595
  return False # TODO: handle this differently?
540
596
  return getattr(self, "flow_run").state.is_pending()
541
597
 
598
+ def cancel_all_tasks(self):
599
+ if hasattr(self.flow.task_runner, "cancel_all"):
600
+ self.flow.task_runner.cancel_all() # type: ignore
601
+
542
602
  # --------------------------
543
603
  #
544
604
  # The following methods compose the main task run loop
@@ -552,11 +612,7 @@ class FlowRunEngine(Generic[P, R]):
552
612
 
553
613
  if self.state.is_running():
554
614
  self.call_hooks()
555
- try:
556
- yield
557
- finally:
558
- if self.state.is_final() or self.state.is_cancelling():
559
- self.call_hooks()
615
+ yield
560
616
 
561
617
  @contextmanager
562
618
  def run_context(self):
@@ -564,7 +620,10 @@ class FlowRunEngine(Generic[P, R]):
564
620
  # reenter the run context to ensure it is up to date for every run
565
621
  with self.setup_run_context():
566
622
  try:
567
- with timeout_context(seconds=self.flow.timeout_seconds):
623
+ with timeout_context(
624
+ seconds=self.flow.timeout_seconds,
625
+ timeout_exc_type=FlowRunTimeoutError,
626
+ ):
568
627
  self.logger.debug(
569
628
  f"Executing flow {self.flow.name!r} for flow run {self.flow_run.name!r}..."
570
629
  )
@@ -572,8 +631,11 @@ class FlowRunEngine(Generic[P, R]):
572
631
  except TimeoutError as exc:
573
632
  self.handle_timeout(exc)
574
633
  except Exception as exc:
575
- self.logger.exception(f"Encountered exception during execution: {exc}")
634
+ self.logger.exception("Encountered exception during execution: %r", exc)
576
635
  self.handle_exception(exc)
636
+ finally:
637
+ if self.state.is_final() or self.state.is_cancelling():
638
+ self.call_hooks()
577
639
 
578
640
  def call_flow_fn(self) -> Union[R, Coroutine[Any, Any, R]]:
579
641
  """
prefect/flows.py CHANGED
@@ -5,6 +5,7 @@ Module containing the base workflow class and decorator - for most use cases, us
5
5
  # This file requires type-checking with pyright because mypy does not yet support PEP612
6
6
  # See https://github.com/python/mypy/issues/8645
7
7
  import ast
8
+ import asyncio
8
9
  import datetime
9
10
  import importlib.util
10
11
  import inspect
@@ -28,6 +29,8 @@ from typing import (
28
29
  List,
29
30
  NoReturn,
30
31
  Optional,
32
+ Set,
33
+ Tuple,
31
34
  Type,
32
35
  TypeVar,
33
36
  Union,
@@ -44,7 +47,9 @@ from pydantic.v1.errors import ConfigError # TODO
44
47
  from rich.console import Console
45
48
  from typing_extensions import Literal, ParamSpec, Self
46
49
 
47
- from prefect._internal.compatibility.deprecated import deprecated_parameter
50
+ from prefect._internal.compatibility.deprecated import (
51
+ deprecated_parameter,
52
+ )
48
53
  from prefect._internal.concurrency.api import create_call, from_async
49
54
  from prefect.blocks.core import Block
50
55
  from prefect.client.orchestration import get_client
@@ -60,6 +65,7 @@ from prefect.exceptions import (
60
65
  MissingFlowError,
61
66
  ObjectNotFound,
62
67
  ParameterTypeError,
68
+ ScriptError,
63
69
  UnspecifiedFlowError,
64
70
  )
65
71
  from prefect.filesystems import LocalFileSystem, ReadableDeploymentStorage
@@ -187,7 +193,7 @@ class Flow(Generic[P, R]):
187
193
  timeout_seconds: Union[int, float, None] = None,
188
194
  validate_parameters: bool = True,
189
195
  persist_result: Optional[bool] = None,
190
- result_storage: Optional[ResultStorage] = None,
196
+ result_storage: Optional[Union[ResultStorage, str]] = None,
191
197
  result_serializer: Optional[ResultSerializer] = None,
192
198
  cache_result_in_memory: bool = True,
193
199
  log_prints: Optional[bool] = None,
@@ -335,7 +341,18 @@ class Flow(Generic[P, R]):
335
341
  "Disable validation or change the argument names."
336
342
  ) from exc
337
343
 
344
+ # result persistence settings
345
+ if persist_result is None:
346
+ if result_storage is not None or result_serializer is not None:
347
+ persist_result = True
348
+
338
349
  self.persist_result = persist_result
350
+ if result_storage and not isinstance(result_storage, str):
351
+ if getattr(result_storage, "_block_document_id", None) is None:
352
+ raise TypeError(
353
+ "Result storage configuration must be persisted server-side."
354
+ " Please call `.save()` on your block before passing it in."
355
+ )
339
356
  self.result_storage = result_storage
340
357
  self.result_serializer = result_serializer
341
358
  self.cache_result_in_memory = cache_result_in_memory
@@ -767,8 +784,7 @@ class Flow(Generic[P, R]):
767
784
  self.on_failure_hooks.append(fn)
768
785
  return fn
769
786
 
770
- @sync_compatible
771
- async def serve(
787
+ def serve(
772
788
  self,
773
789
  name: Optional[str] = None,
774
790
  interval: Optional[
@@ -873,7 +889,7 @@ class Flow(Generic[P, R]):
873
889
  name = Path(name).stem
874
890
 
875
891
  runner = Runner(name=name, pause_on_shutdown=pause_on_shutdown, limit=limit)
876
- deployment_id = await runner.add_flow(
892
+ deployment_id = runner.add_flow(
877
893
  self,
878
894
  name=name,
879
895
  triggers=triggers,
@@ -906,15 +922,27 @@ class Flow(Generic[P, R]):
906
922
 
907
923
  console = Console()
908
924
  console.print(help_message, soft_wrap=True)
909
- await runner.start(webserver=webserver)
925
+
926
+ try:
927
+ loop = asyncio.get_running_loop()
928
+ except RuntimeError as exc:
929
+ if "no running event loop" in str(exc):
930
+ loop = None
931
+ else:
932
+ raise
933
+
934
+ if loop is not None:
935
+ loop.run_until_complete(runner.start(webserver=webserver))
936
+ else:
937
+ asyncio.run(runner.start(webserver=webserver))
910
938
 
911
939
  @classmethod
912
940
  @sync_compatible
913
941
  async def from_source(
914
- cls: Type[F],
942
+ cls: Type["Flow[P, R]"],
915
943
  source: Union[str, "RunnerStorage", ReadableDeploymentStorage],
916
944
  entrypoint: str,
917
- ) -> F:
945
+ ) -> "Flow[P, R]":
918
946
  """
919
947
  Loads a flow from a remote source.
920
948
 
@@ -960,6 +988,29 @@ class Flow(Generic[P, R]):
960
988
 
961
989
  my_flow()
962
990
  ```
991
+
992
+ Load a flow from a local directory:
993
+
994
+ ``` python
995
+ # from_local_source.py
996
+
997
+ from pathlib import Path
998
+ from prefect import flow
999
+
1000
+ @flow(log_prints=True)
1001
+ def my_flow(name: str = "world"):
1002
+ print(f"Hello {name}! I'm a flow from a Python script!")
1003
+
1004
+ if __name__ == "__main__":
1005
+ my_flow.from_source(
1006
+ source=str(Path(__file__).parent),
1007
+ entrypoint="from_local_source.py:my_flow",
1008
+ ).deploy(
1009
+ name="my-deployment",
1010
+ parameters=dict(name="Marvin"),
1011
+ work_pool_name="local",
1012
+ )
1013
+ ```
963
1014
  """
964
1015
 
965
1016
  from prefect.runner.storage import (
@@ -969,7 +1020,9 @@ class Flow(Generic[P, R]):
969
1020
  create_storage_from_source,
970
1021
  )
971
1022
 
972
- if isinstance(source, str):
1023
+ if isinstance(source, (Path, str)):
1024
+ if isinstance(source, Path):
1025
+ source = str(source)
973
1026
  storage = create_storage_from_source(source)
974
1027
  elif isinstance(source, RunnerStorage):
975
1028
  storage = source
@@ -988,7 +1041,7 @@ class Flow(Generic[P, R]):
988
1041
  await storage.pull_code()
989
1042
 
990
1043
  full_entrypoint = str(storage.destination / entrypoint)
991
- flow: "Flow" = await from_async.wait_for_call_in_new_thread(
1044
+ flow: Flow = await from_async.wait_for_call_in_new_thread(
992
1045
  create_call(load_flow_from_entrypoint, full_entrypoint)
993
1046
  )
994
1047
  flow._storage = storage
@@ -1115,7 +1168,13 @@ class Flow(Generic[P, R]):
1115
1168
  )
1116
1169
  ```
1117
1170
  """
1118
- work_pool_name = work_pool_name or PREFECT_DEFAULT_WORK_POOL_NAME.value()
1171
+ if not (
1172
+ work_pool_name := work_pool_name or PREFECT_DEFAULT_WORK_POOL_NAME.value()
1173
+ ):
1174
+ raise ValueError(
1175
+ "No work pool name provided. Please provide a `work_pool_name` or set the"
1176
+ " `PREFECT_DEFAULT_WORK_POOL_NAME` environment variable."
1177
+ )
1119
1178
 
1120
1179
  try:
1121
1180
  async with get_client() as client:
@@ -1146,9 +1205,9 @@ class Flow(Generic[P, R]):
1146
1205
  entrypoint_type=entrypoint_type,
1147
1206
  )
1148
1207
 
1149
- from prefect.deployments import runner
1208
+ from prefect.deployments.runner import deploy
1150
1209
 
1151
- deployment_ids = await runner.deploy(
1210
+ deployment_ids = await deploy(
1152
1211
  deployment,
1153
1212
  work_pool_name=work_pool_name,
1154
1213
  image=image,
@@ -1672,6 +1731,14 @@ def load_flow_from_entrypoint(
1672
1731
  raise MissingFlowError(
1673
1732
  f"Flow function with name {func_name!r} not found in {path!r}. "
1674
1733
  ) from exc
1734
+ except ScriptError as exc:
1735
+ # If the flow has dependencies that are not installed in the current
1736
+ # environment, fallback to loading the flow via AST parsing. The
1737
+ # drawback of this approach is that we're unable to actually load the
1738
+ # function, so we create a placeholder flow that will re-raise this
1739
+ # exception when called.
1740
+
1741
+ flow = load_placeholder_flow(entrypoint=entrypoint, raises=exc)
1675
1742
 
1676
1743
  if not isinstance(flow, Flow):
1677
1744
  raise MissingFlowError(
@@ -1682,14 +1749,13 @@ def load_flow_from_entrypoint(
1682
1749
  return flow
1683
1750
 
1684
1751
 
1685
- @sync_compatible
1686
- async def serve(
1752
+ def serve(
1687
1753
  *args: "RunnerDeployment",
1688
1754
  pause_on_shutdown: bool = True,
1689
1755
  print_starting_message: bool = True,
1690
1756
  limit: Optional[int] = None,
1691
1757
  **kwargs,
1692
- ) -> NoReturn:
1758
+ ):
1693
1759
  """
1694
1760
  Serve the provided list of deployments.
1695
1761
 
@@ -1739,7 +1805,7 @@ async def serve(
1739
1805
 
1740
1806
  runner = Runner(pause_on_shutdown=pause_on_shutdown, limit=limit, **kwargs)
1741
1807
  for deployment in args:
1742
- await runner.add_deployment(deployment)
1808
+ runner.add_deployment(deployment)
1743
1809
 
1744
1810
  if print_starting_message:
1745
1811
  help_message_top = (
@@ -1770,7 +1836,18 @@ async def serve(
1770
1836
  Group(help_message_top, table, help_message_bottom), soft_wrap=True
1771
1837
  )
1772
1838
 
1773
- await runner.start()
1839
+ try:
1840
+ loop = asyncio.get_running_loop()
1841
+ except RuntimeError as exc:
1842
+ if "no running event loop" in str(exc):
1843
+ loop = None
1844
+ else:
1845
+ raise
1846
+
1847
+ if loop is not None:
1848
+ loop.run_until_complete(runner.start())
1849
+ else:
1850
+ asyncio.run(runner.start())
1774
1851
 
1775
1852
 
1776
1853
  @client_injector
@@ -1852,24 +1929,138 @@ async def load_flow_from_flow_run(
1852
1929
  return flow
1853
1930
 
1854
1931
 
1855
- def load_flow_argument_from_entrypoint(
1856
- entrypoint: str, arg: str = "name"
1857
- ) -> Optional[str]:
1932
+ def load_placeholder_flow(entrypoint: str, raises: Exception):
1858
1933
  """
1859
- Extract a flow argument from an entrypoint string.
1934
+ Load a placeholder flow that is initialized with the same arguments as the
1935
+ flow specified in the entrypoint. If called the flow will raise `raises`.
1860
1936
 
1861
- Loads the source code of the entrypoint and extracts the flow argument from the
1862
- `flow` decorator.
1937
+ This is useful when a flow can't be loaded due to missing dependencies or
1938
+ other issues but the base metadata defining the flow is still needed.
1863
1939
 
1864
1940
  Args:
1865
- entrypoint: a string in the format `<path_to_script>:<flow_func_name>` or a module path
1866
- to a flow function
1941
+ entrypoint: a string in the format `<path_to_script>:<flow_func_name>`
1942
+ or a module path to a flow function
1943
+ raises: an exception to raise when the flow is called
1944
+ """
1945
+
1946
+ def _base_placeholder():
1947
+ raise raises
1948
+
1949
+ def sync_placeholder_flow(*args, **kwargs):
1950
+ _base_placeholder()
1951
+
1952
+ async def async_placeholder_flow(*args, **kwargs):
1953
+ _base_placeholder()
1954
+
1955
+ placeholder_flow = (
1956
+ async_placeholder_flow
1957
+ if is_entrypoint_async(entrypoint)
1958
+ else sync_placeholder_flow
1959
+ )
1960
+
1961
+ arguments = load_flow_arguments_from_entrypoint(entrypoint)
1962
+ arguments["fn"] = placeholder_flow
1963
+
1964
+ return Flow(**arguments)
1965
+
1966
+
1967
+ def load_flow_arguments_from_entrypoint(
1968
+ entrypoint: str, arguments: Optional[Union[List[str], Set[str]]] = None
1969
+ ) -> dict[str, Any]:
1970
+ """
1971
+ Extract flow arguments from an entrypoint string.
1972
+
1973
+ Loads the source code of the entrypoint and extracts the flow arguments
1974
+ from the `flow` decorator.
1975
+
1976
+ Args:
1977
+ entrypoint: a string in the format `<path_to_script>:<flow_func_name>`
1978
+ or a module path to a flow function
1979
+ """
1980
+
1981
+ func_def, source_code = _entrypoint_definition_and_source(entrypoint)
1982
+
1983
+ if arguments is None:
1984
+ # If no arguments are provided default to known arguments that are of
1985
+ # built-in types.
1986
+ arguments = {
1987
+ "name",
1988
+ "version",
1989
+ "retries",
1990
+ "retry_delay_seconds",
1991
+ "description",
1992
+ "timeout_seconds",
1993
+ "validate_parameters",
1994
+ "persist_result",
1995
+ "cache_result_in_memory",
1996
+ "log_prints",
1997
+ }
1998
+
1999
+ result = {}
2000
+
2001
+ for decorator in func_def.decorator_list:
2002
+ if (
2003
+ isinstance(decorator, ast.Call)
2004
+ and getattr(decorator.func, "id", "") == "flow"
2005
+ ):
2006
+ for keyword in decorator.keywords:
2007
+ if keyword.arg not in arguments:
2008
+ continue
2009
+
2010
+ if isinstance(keyword.value, ast.Constant):
2011
+ # Use the string value of the argument
2012
+ result[keyword.arg] = str(keyword.value.value)
2013
+ continue
2014
+
2015
+ # if the arg value is not a raw str (i.e. a variable or expression),
2016
+ # then attempt to evaluate it
2017
+ namespace = safe_load_namespace(source_code)
2018
+ literal_arg_value = ast.get_source_segment(source_code, keyword.value)
2019
+ cleaned_value = (
2020
+ literal_arg_value.replace("\n", "") if literal_arg_value else ""
2021
+ )
2022
+
2023
+ try:
2024
+ evaluated_value = eval(cleaned_value, namespace) # type: ignore
2025
+ result[keyword.arg] = str(evaluated_value)
2026
+ except Exception as e:
2027
+ logger.info(
2028
+ "Failed to parse @flow argument: `%s=%s` due to the following error. Ignoring and falling back to default behavior.",
2029
+ keyword.arg,
2030
+ literal_arg_value,
2031
+ exc_info=e,
2032
+ )
2033
+ # ignore the decorator arg and fallback to default behavior
2034
+ continue
2035
+
2036
+ if "name" in arguments and "name" not in result:
2037
+ # If no matching decorator or keyword argument for `name' is found
2038
+ # fallback to the function name.
2039
+ result["name"] = func_def.name.replace("_", "-")
2040
+
2041
+ return result
2042
+
2043
+
2044
+ def is_entrypoint_async(entrypoint: str) -> bool:
2045
+ """
2046
+ Determine if the function specified in the entrypoint is asynchronous.
2047
+
2048
+ Args:
2049
+ entrypoint: A string in the format `<path_to_script>:<func_name>` or
2050
+ a module path to a function.
1867
2051
 
1868
2052
  Returns:
1869
- The flow argument value
2053
+ True if the function is asynchronous, False otherwise.
1870
2054
  """
2055
+ func_def, _ = _entrypoint_definition_and_source(entrypoint)
2056
+ return isinstance(func_def, ast.AsyncFunctionDef)
2057
+
2058
+
2059
+ def _entrypoint_definition_and_source(
2060
+ entrypoint: str,
2061
+ ) -> Tuple[Union[ast.FunctionDef, ast.AsyncFunctionDef], str]:
1871
2062
  if ":" in entrypoint:
1872
- # split by the last colon once to handle Windows paths with drive letters i.e C:\path\to\file.py:do_stuff
2063
+ # Split by the last colon once to handle Windows paths with drive letters i.e C:\path\to\file.py:do_stuff
1873
2064
  path, func_name = entrypoint.rsplit(":", maxsplit=1)
1874
2065
  source_code = Path(path).read_text()
1875
2066
  else:
@@ -1878,6 +2069,7 @@ def load_flow_argument_from_entrypoint(
1878
2069
  if not spec or not spec.origin:
1879
2070
  raise ValueError(f"Could not find module {path!r}")
1880
2071
  source_code = Path(spec.origin).read_text()
2072
+
1881
2073
  parsed_code = ast.parse(source_code)
1882
2074
  func_def = next(
1883
2075
  (
@@ -1894,42 +2086,8 @@ def load_flow_argument_from_entrypoint(
1894
2086
  ),
1895
2087
  None,
1896
2088
  )
2089
+
1897
2090
  if not func_def:
1898
2091
  raise ValueError(f"Could not find flow {func_name!r} in {path!r}")
1899
- for decorator in func_def.decorator_list:
1900
- if (
1901
- isinstance(decorator, ast.Call)
1902
- and getattr(decorator.func, "id", "") == "flow"
1903
- ):
1904
- for keyword in decorator.keywords:
1905
- if keyword.arg == arg:
1906
- if isinstance(keyword.value, ast.Constant):
1907
- return (
1908
- keyword.value.value
1909
- ) # Return the string value of the argument
1910
-
1911
- # if the arg value is not a raw str (i.e. a variable or expression),
1912
- # then attempt to evaluate it
1913
- namespace = safe_load_namespace(source_code)
1914
- literal_arg_value = ast.get_source_segment(
1915
- source_code, keyword.value
1916
- )
1917
- try:
1918
- evaluated_value = eval(literal_arg_value, namespace) # type: ignore
1919
- except Exception as e:
1920
- logger.info(
1921
- "Failed to parse @flow argument: `%s=%s` due to the following error. Ignoring and falling back to default behavior.",
1922
- arg,
1923
- literal_arg_value,
1924
- exc_info=e,
1925
- )
1926
- # ignore the decorator arg and fallback to default behavior
1927
- break
1928
- return str(evaluated_value)
1929
-
1930
- if arg == "name":
1931
- return func_name.replace(
1932
- "_", "-"
1933
- ) # If no matching decorator or keyword argument is found
1934
2092
 
1935
- return None
2093
+ return func_def, source_code