prefect-client 2.19.8__py3-none-any.whl → 2.20.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -139,12 +139,40 @@ class MicrosoftTeamsWebhook(AppriseNotificationBlock):
139
139
  url: SecretStr = Field(
140
140
  ...,
141
141
  title="Webhook URL",
142
- description="The Teams incoming webhook URL used to send notifications.",
142
+ description="The Microsoft Power Automate (Workflows) URL used to send notifications to Teams.",
143
143
  examples=[
144
- "https://your-org.webhook.office.com/webhookb2/XXX/IncomingWebhook/YYY/ZZZ"
144
+ "https://prod-NO.LOCATION.logic.azure.com:443/workflows/WFID/triggers/manual/paths/invoke?sp=%2Ftriggers%2Fmanual%2Frun&sv=1.0&sig=SIGNATURE"
145
145
  ],
146
146
  )
147
147
 
148
+ include_image: bool = Field(
149
+ default=True,
150
+ description="Include an image with the notification.",
151
+ )
152
+
153
+ wrap: bool = Field(
154
+ default=True,
155
+ description="Wrap the notification text.",
156
+ )
157
+
158
+ def block_initialization(self) -> None:
159
+ """see https://github.com/caronc/apprise/pull/1172"""
160
+ from apprise.plugins.workflows import NotifyWorkflows
161
+
162
+ if not (
163
+ parsed_url := NotifyWorkflows.parse_native_url(self.url.get_secret_value())
164
+ ):
165
+ raise ValueError("Invalid Microsoft Teams Workflow URL provided.")
166
+
167
+ parsed_url.update(
168
+ {
169
+ "include_image": self.include_image,
170
+ "wrap": self.wrap,
171
+ }
172
+ )
173
+
174
+ self._start_apprise_client(SecretStr(NotifyWorkflows(**parsed_url).url()))
175
+
148
176
 
149
177
  class PagerDutyWebHook(AbstractAppriseNotificationBlock):
150
178
  """
@@ -17,6 +17,7 @@ import pendulum
17
17
 
18
18
  from prefect._internal.compatibility.deprecated import (
19
19
  DeprecatedInfraOverridesField,
20
+ deprecated_class,
20
21
  )
21
22
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
22
23
  from prefect.types import NonNegativeInteger, PositiveInteger
@@ -917,6 +918,10 @@ class Flow(ObjectBaseModel):
917
918
  return raise_on_name_with_banned_characters(v)
918
919
 
919
920
 
921
+ @deprecated_class(
922
+ start_date="Jun 2024",
923
+ help="Will be removed in Prefect 3 in favor of prefect.client.schemas.actions.DeploymentScheduleCreate",
924
+ )
920
925
  class MinimalDeploymentSchedule(PrefectBaseModel):
921
926
  schedule: SCHEDULE_TYPES = Field(
922
927
  default=..., description="The schedule for the deployment."
@@ -238,12 +238,17 @@ async def run_deployment(
238
238
  return flow_run
239
239
 
240
240
 
241
+ @deprecated_callable(
242
+ start_date="Jun 2024",
243
+ help="Will be moved in Prefect 3 to prefect.flows:load_flow_from_flow_run",
244
+ )
241
245
  @inject_client
242
246
  async def load_flow_from_flow_run(
243
247
  flow_run: FlowRun,
244
248
  client: PrefectClient,
245
249
  ignore_storage: bool = False,
246
250
  storage_base_path: Optional[str] = None,
251
+ use_placeholder_flow: bool = True,
247
252
  ) -> Flow:
248
253
  """
249
254
  Load a flow from the location/script provided in a deployment's storage document.
@@ -270,7 +275,7 @@ async def load_flow_from_flow_run(
270
275
  f"Importing flow code from module path {deployment.entrypoint}"
271
276
  )
272
277
  flow = await run_sync_in_worker_thread(
273
- load_flow_from_entrypoint, deployment.entrypoint
278
+ load_flow_from_entrypoint, deployment.entrypoint, use_placeholder_flow
274
279
  )
275
280
  return flow
276
281
 
@@ -314,7 +319,9 @@ async def load_flow_from_flow_run(
314
319
  ).absolute()
315
320
  run_logger.debug(f"Importing flow code from '{import_path}'")
316
321
 
317
- flow = await run_sync_in_worker_thread(load_flow_from_entrypoint, str(import_path))
322
+ flow = await run_sync_in_worker_thread(
323
+ load_flow_from_entrypoint, str(import_path), use_placeholder_flow
324
+ )
318
325
 
319
326
  return flow
320
327
 
prefect/engine.py CHANGED
@@ -107,13 +107,16 @@ from uuid import UUID, uuid4
107
107
 
108
108
  import anyio
109
109
  import pendulum
110
- from anyio import start_blocking_portal
110
+ from anyio.from_thread import start_blocking_portal
111
111
  from typing_extensions import Literal
112
112
 
113
113
  import prefect
114
114
  import prefect.context
115
115
  import prefect.plugins
116
- from prefect._internal.compatibility.deprecated import deprecated_parameter
116
+ from prefect._internal.compatibility.deprecated import (
117
+ deprecated_callable,
118
+ deprecated_parameter,
119
+ )
117
120
  from prefect._internal.compatibility.experimental import experimental_parameter
118
121
  from prefect._internal.concurrency.api import create_call, from_async, from_sync
119
122
  from prefect._internal.concurrency.calls import get_current_call
@@ -208,6 +211,7 @@ from prefect.utilities.engine import (
208
211
  _resolve_custom_task_run_name,
209
212
  capture_sigterm,
210
213
  check_api_reachable,
214
+ collapse_excgroups,
211
215
  collect_task_run_inputs,
212
216
  emit_task_run_state_change_event,
213
217
  propose_state,
@@ -276,7 +280,7 @@ def enter_flow_run_engine_from_flow_call(
276
280
  # the user. Generally, you should enter contexts _within_ the async `begin_run`
277
281
  # instead but if you need to enter a context from the main thread you'll need to do
278
282
  # it here.
279
- contexts = [capture_sigterm()]
283
+ contexts = [capture_sigterm(), collapse_excgroups()]
280
284
 
281
285
  if flow.isasync and (
282
286
  not is_subflow_run or (is_subflow_run and parent_flow_run_context.flow.isasync)
@@ -321,7 +325,7 @@ def enter_flow_run_engine_from_subprocess(flow_run_id: UUID) -> State:
321
325
  flow_run_id,
322
326
  user_thread=threading.current_thread(),
323
327
  ),
324
- contexts=[capture_sigterm()],
328
+ contexts=[capture_sigterm(), collapse_excgroups()],
325
329
  )
326
330
 
327
331
  APILogHandler.flush()
@@ -417,9 +421,12 @@ async def retrieve_flow_then_begin_flow_run(
417
421
 
418
422
  try:
419
423
  flow = (
420
- load_flow_from_entrypoint(entrypoint)
424
+ # We do not want to use a placeholder flow at runtime
425
+ load_flow_from_entrypoint(entrypoint, use_placeholder_flow=False)
421
426
  if entrypoint
422
- else await load_flow_from_flow_run(flow_run, client=client)
427
+ else await load_flow_from_flow_run(
428
+ flow_run, client=client, use_placeholder_flow=False
429
+ )
423
430
  )
424
431
  except Exception:
425
432
  message = (
@@ -975,6 +982,10 @@ async def orchestrate_flow_run(
975
982
  return state
976
983
 
977
984
 
985
+ @deprecated_callable(
986
+ start_date="Jun 2024",
987
+ help="Will be moved in Prefect 3 to prefect.flow_runs:pause_flow_run",
988
+ )
978
989
  @overload
979
990
  async def pause_flow_run(
980
991
  wait_for_input: None = None,
@@ -987,6 +998,10 @@ async def pause_flow_run(
987
998
  ...
988
999
 
989
1000
 
1001
+ @deprecated_callable(
1002
+ start_date="Jun 2024",
1003
+ help="Will be moved in Prefect 3 to prefect.flow_runs:pause_flow_run",
1004
+ )
990
1005
  @overload
991
1006
  async def pause_flow_run(
992
1007
  wait_for_input: Type[T],
@@ -1095,6 +1110,10 @@ async def pause_flow_run(
1095
1110
  )
1096
1111
 
1097
1112
 
1113
+ @deprecated_callable(
1114
+ start_date="Jun 2024",
1115
+ help="Will be moved in Prefect 3 to prefect.flow_runs:_in_process_pause",
1116
+ )
1098
1117
  @inject_client
1099
1118
  async def _in_process_pause(
1100
1119
  timeout: int = 3600,
@@ -1190,6 +1209,10 @@ async def _in_process_pause(
1190
1209
  raise FlowPauseTimeout("Flow run was paused and never resumed.")
1191
1210
 
1192
1211
 
1212
+ @deprecated_callable(
1213
+ start_date="Jun 2024",
1214
+ help="Will be moved in Prefect 3 to prefect.flow_runs.pause_flow_run.",
1215
+ )
1193
1216
  @inject_client
1194
1217
  async def _out_of_process_pause(
1195
1218
  flow_run_id: UUID,
@@ -1212,6 +1235,10 @@ async def _out_of_process_pause(
1212
1235
  raise RuntimeError(response.details.reason)
1213
1236
 
1214
1237
 
1238
+ @deprecated_callable(
1239
+ start_date="Jun 2024",
1240
+ help="Will be moved in Prefect 3 to prefect.flow_runs:suspend_flow_run",
1241
+ )
1215
1242
  @overload
1216
1243
  async def suspend_flow_run(
1217
1244
  wait_for_input: None = None,
@@ -1343,6 +1370,10 @@ async def suspend_flow_run(
1343
1370
  raise Pause()
1344
1371
 
1345
1372
 
1373
+ @deprecated_callable(
1374
+ start_date="Jun 2024",
1375
+ help="Will be moved in Prefect 3 to prefect.flow_runs:resume_flow_run",
1376
+ )
1346
1377
  @sync_compatible
1347
1378
  async def resume_flow_run(flow_run_id, run_input: Optional[Dict] = None):
1348
1379
  """
@@ -2218,9 +2249,9 @@ async def report_flow_run_crashes(flow_run: FlowRun, client: PrefectClient, flow
2218
2249
 
2219
2250
  This context _must_ reraise the exception to properly exit the run.
2220
2251
  """
2221
-
2222
2252
  try:
2223
- yield
2253
+ with collapse_excgroups():
2254
+ yield
2224
2255
  except (Abort, Pause):
2225
2256
  # Do not capture internal signals as crashes
2226
2257
  raise
@@ -2257,7 +2288,8 @@ async def report_task_run_crashes(task_run: TaskRun, client: PrefectClient):
2257
2288
  This context _must_ reraise the exception to properly exit the run.
2258
2289
  """
2259
2290
  try:
2260
- yield
2291
+ with collapse_excgroups():
2292
+ yield
2261
2293
  except (Abort, Pause):
2262
2294
  # Do not capture internal signals as crashes
2263
2295
  raise
prefect/flows.py CHANGED
@@ -1650,6 +1650,7 @@ def load_flow_from_script(path: str, flow_name: str = None) -> Flow:
1650
1650
 
1651
1651
  def load_flow_from_entrypoint(
1652
1652
  entrypoint: str,
1653
+ use_placeholder_flow: bool = True,
1653
1654
  ) -> Flow:
1654
1655
  """
1655
1656
  Extract a flow object from a script at an entrypoint by running all of the code in the file.
@@ -1657,6 +1658,9 @@ def load_flow_from_entrypoint(
1657
1658
  Args:
1658
1659
  entrypoint: a string in the format `<path_to_script>:<flow_func_name>` or a module path
1659
1660
  to a flow function
1661
+ use_placeholder_flow: If True, a placeholder flow will be used if the entrypoint
1662
+ cannot be loaded for any reason (e.g. dependencies are missing). If False, an
1663
+ exception will be raised.
1660
1664
 
1661
1665
  Returns:
1662
1666
  The flow object from the script
@@ -1672,6 +1676,7 @@ def load_flow_from_entrypoint(
1672
1676
  if ":" in entrypoint:
1673
1677
  # split by the last colon once to handle Windows paths with drive letters i.e C:\path\to\file.py:do_stuff
1674
1678
  path, func_name = entrypoint.rsplit(":", maxsplit=1)
1679
+
1675
1680
  else:
1676
1681
  path, func_name = entrypoint.rsplit(".", maxsplit=1)
1677
1682
  try:
@@ -1680,14 +1685,15 @@ def load_flow_from_entrypoint(
1680
1685
  raise MissingFlowError(
1681
1686
  f"Flow function with name {func_name!r} not found in {path!r}. "
1682
1687
  ) from exc
1683
- except ScriptError as exc:
1688
+ except ScriptError:
1684
1689
  # If the flow has dependencies that are not installed in the current
1685
- # environment, fallback to loading the flow via AST parsing. The
1686
- # drawback of this approach is that we're unable to actually load the
1687
- # function, so we create a placeholder flow that will re-raise this
1688
- # exception when called.
1689
-
1690
- flow = load_placeholder_flow(entrypoint=entrypoint, raises=exc)
1690
+ # environment, fallback to loading the flow via AST parsing.
1691
+ if use_placeholder_flow:
1692
+ flow = safe_load_flow_from_entrypoint(entrypoint)
1693
+ if flow is None:
1694
+ raise
1695
+ else:
1696
+ raise
1691
1697
 
1692
1698
  if not isinstance(flow, Flow):
1693
1699
  raise MissingFlowError(
@@ -1848,6 +1854,147 @@ def load_placeholder_flow(entrypoint: str, raises: Exception):
1848
1854
  return Flow(**arguments)
1849
1855
 
1850
1856
 
1857
+ def safe_load_flow_from_entrypoint(entrypoint: str) -> Optional[Flow]:
1858
+ """
1859
+ Load a flow from an entrypoint and return None if an exception is raised.
1860
+
1861
+ Args:
1862
+ entrypoint: a string in the format `<path_to_script>:<flow_func_name>`
1863
+ or a module path to a flow function
1864
+ """
1865
+ func_def, source_code = _entrypoint_definition_and_source(entrypoint)
1866
+ path = None
1867
+ if ":" in entrypoint:
1868
+ path = entrypoint.rsplit(":")[0]
1869
+ namespace = safe_load_namespace(source_code, filepath=path)
1870
+ if func_def.name in namespace:
1871
+ return namespace[func_def.name]
1872
+ else:
1873
+ # If the function is not in the namespace, if may be due to missing dependencies
1874
+ # for the function. We will attempt to compile each annotation and default value
1875
+ # and remove them from the function definition to see if the function can be
1876
+ # compiled without them.
1877
+
1878
+ return _sanitize_and_load_flow(func_def, namespace)
1879
+
1880
+
1881
+ def _sanitize_and_load_flow(
1882
+ func_def: Union[ast.FunctionDef, ast.AsyncFunctionDef], namespace: Dict[str, Any]
1883
+ ) -> Optional[Flow]:
1884
+ """
1885
+ Attempt to load a flow from the function definition after sanitizing the annotations
1886
+ and defaults that can't be compiled.
1887
+
1888
+ Args:
1889
+ func_def: the function definition
1890
+ namespace: the namespace to load the function into
1891
+
1892
+ Returns:
1893
+ The loaded function or None if the function can't be loaded
1894
+ after sanitizing the annotations and defaults.
1895
+ """
1896
+ args = func_def.args.posonlyargs + func_def.args.args + func_def.args.kwonlyargs
1897
+ if func_def.args.vararg:
1898
+ args.append(func_def.args.vararg)
1899
+ if func_def.args.kwarg:
1900
+ args.append(func_def.args.kwarg)
1901
+ # Remove annotations that can't be compiled
1902
+ for arg in args:
1903
+ if arg.annotation is not None:
1904
+ try:
1905
+ code = compile(
1906
+ ast.Expression(arg.annotation),
1907
+ filename="<ast>",
1908
+ mode="eval",
1909
+ )
1910
+ exec(code, namespace)
1911
+ except Exception as e:
1912
+ logger.debug(
1913
+ "Failed to evaluate annotation for argument %s due to the following error. Ignoring annotation.",
1914
+ arg.arg,
1915
+ exc_info=e,
1916
+ )
1917
+ arg.annotation = None
1918
+
1919
+ # Remove defaults that can't be compiled
1920
+ new_defaults = []
1921
+ for default in func_def.args.defaults:
1922
+ try:
1923
+ code = compile(ast.Expression(default), "<ast>", "eval")
1924
+ exec(code, namespace)
1925
+ new_defaults.append(default)
1926
+ except Exception as e:
1927
+ logger.debug(
1928
+ "Failed to evaluate default value %s due to the following error. Ignoring default.",
1929
+ default,
1930
+ exc_info=e,
1931
+ )
1932
+ new_defaults.append(
1933
+ ast.Constant(
1934
+ value=None, lineno=default.lineno, col_offset=default.col_offset
1935
+ )
1936
+ )
1937
+ func_def.args.defaults = new_defaults
1938
+
1939
+ # Remove kw_defaults that can't be compiled
1940
+ new_kw_defaults = []
1941
+ for default in func_def.args.kw_defaults:
1942
+ if default is not None:
1943
+ try:
1944
+ code = compile(ast.Expression(default), "<ast>", "eval")
1945
+ exec(code, namespace)
1946
+ new_kw_defaults.append(default)
1947
+ except Exception as e:
1948
+ logger.debug(
1949
+ "Failed to evaluate default value %s due to the following error. Ignoring default.",
1950
+ default,
1951
+ exc_info=e,
1952
+ )
1953
+ new_kw_defaults.append(
1954
+ ast.Constant(
1955
+ value=None,
1956
+ lineno=default.lineno,
1957
+ col_offset=default.col_offset,
1958
+ )
1959
+ )
1960
+ else:
1961
+ new_kw_defaults.append(
1962
+ ast.Constant(
1963
+ value=None,
1964
+ lineno=func_def.lineno,
1965
+ col_offset=func_def.col_offset,
1966
+ )
1967
+ )
1968
+ func_def.args.kw_defaults = new_kw_defaults
1969
+
1970
+ if func_def.returns is not None:
1971
+ try:
1972
+ code = compile(
1973
+ ast.Expression(func_def.returns), filename="<ast>", mode="eval"
1974
+ )
1975
+ exec(code, namespace)
1976
+ except Exception as e:
1977
+ logger.debug(
1978
+ "Failed to evaluate return annotation due to the following error. Ignoring annotation.",
1979
+ exc_info=e,
1980
+ )
1981
+ func_def.returns = None
1982
+
1983
+ # Attempt to compile the function without annotations and defaults that
1984
+ # can't be compiled
1985
+ try:
1986
+ code = compile(
1987
+ ast.Module(body=[func_def], type_ignores=[]),
1988
+ filename="<ast>",
1989
+ mode="exec",
1990
+ )
1991
+ exec(code, namespace)
1992
+ except Exception as e:
1993
+ logger.debug("Failed to compile: %s", e)
1994
+ else:
1995
+ return namespace.get(func_def.name)
1996
+
1997
+
1851
1998
  def load_flow_arguments_from_entrypoint(
1852
1999
  entrypoint: str, arguments: Optional[Union[List[str], Set[str]]] = None
1853
2000
  ) -> Dict[str, Any]:
@@ -1863,6 +2010,9 @@ def load_flow_arguments_from_entrypoint(
1863
2010
  """
1864
2011
 
1865
2012
  func_def, source_code = _entrypoint_definition_and_source(entrypoint)
2013
+ path = None
2014
+ if ":" in entrypoint:
2015
+ path = entrypoint.rsplit(":")[0]
1866
2016
 
1867
2017
  if arguments is None:
1868
2018
  # If no arguments are provided default to known arguments that are of
@@ -1898,7 +2048,7 @@ def load_flow_arguments_from_entrypoint(
1898
2048
 
1899
2049
  # if the arg value is not a raw str (i.e. a variable or expression),
1900
2050
  # then attempt to evaluate it
1901
- namespace = safe_load_namespace(source_code)
2051
+ namespace = safe_load_namespace(source_code, filepath=path)
1902
2052
  literal_arg_value = ast.get_source_segment(source_code, keyword.value)
1903
2053
  cleaned_value = (
1904
2054
  literal_arg_value.replace("\n", "") if literal_arg_value else ""
prefect/task_server.py CHANGED
@@ -9,6 +9,7 @@ from functools import partial
9
9
  from typing import List, Optional, Type
10
10
 
11
11
  import anyio
12
+ from exceptiongroup import BaseExceptionGroup # novermin
12
13
  from websockets.exceptions import InvalidStatusCode
13
14
 
14
15
  from prefect import Task, get_client
@@ -225,16 +226,21 @@ class TaskServer:
225
226
  validated_state=state,
226
227
  )
227
228
 
228
- self._runs_task_group.start_soon(
229
- partial(
230
- submit_autonomous_task_run_to_engine,
231
- task=task,
232
- task_run=task_run,
233
- parameters=parameters,
234
- task_runner=self.task_runner,
235
- client=self._client,
229
+ try:
230
+ self._runs_task_group.start_soon(
231
+ partial(
232
+ submit_autonomous_task_run_to_engine,
233
+ task=task,
234
+ task_run=task_run,
235
+ parameters=parameters,
236
+ task_runner=self.task_runner,
237
+ client=self._client,
238
+ )
239
+ )
240
+ except BaseException as exc:
241
+ logger.exception(
242
+ f"Failed to submit task run {task_run.id!r} to engine", exc_info=exc
236
243
  )
237
- )
238
244
 
239
245
  async def execute_task_run(self, task_run: TaskRun):
240
246
  """Execute a task run in the task server."""
@@ -301,6 +307,14 @@ async def serve(*tasks: Task, task_runner: Optional[Type[BaseTaskRunner]] = None
301
307
  try:
302
308
  await task_server.start()
303
309
 
310
+ except BaseExceptionGroup as exc: # novermin
311
+ exceptions = exc.exceptions
312
+ n_exceptions = len(exceptions)
313
+ logger.error(
314
+ f"Task worker stopped with {n_exceptions} exception{'s' if n_exceptions != 1 else ''}:"
315
+ f"\n" + "\n".join(str(e) for e in exceptions)
316
+ )
317
+
304
318
  except StopTaskServer:
305
319
  logger.info("Task server stopped.")
306
320
 
prefect/tasks.py CHANGED
@@ -6,7 +6,6 @@ Module containing the base workflow task class and decorator - for most use case
6
6
 
7
7
  import datetime
8
8
  import inspect
9
- import os
10
9
  from copy import copy
11
10
  from functools import partial, update_wrapper
12
11
  from typing import (
@@ -76,6 +75,8 @@ P = ParamSpec("P") # The parameters of the task
76
75
 
77
76
  logger = get_logger("tasks")
78
77
 
78
+ NUM_CHARS_DYNAMIC_KEY: int = 8
79
+
79
80
 
80
81
  def task_input_hash(
81
82
  context: "TaskRunContext", arguments: Dict[str, Any]
@@ -125,6 +126,28 @@ def exponential_backoff(backoff_factor: float) -> Callable[[int], List[float]]:
125
126
  return retry_backoff_callable
126
127
 
127
128
 
129
+ def _generate_task_key(fn: Callable[..., Any]) -> str:
130
+ """Generate a task key based on the function name and source code.
131
+ We may eventually want some sort of top-level namespace here to
132
+ disambiguate tasks with the same function name in different modules,
133
+ in a more human-readable way, while avoiding relative import problems (see #12337).
134
+ As long as the task implementations are unique (even if named the same), we should
135
+ not have any collisions.
136
+ Args:
137
+ fn: The function to generate a task key for.
138
+ """
139
+ if not hasattr(fn, "__qualname__"):
140
+ return to_qualified_name(type(fn))
141
+
142
+ qualname = fn.__qualname__.split(".")[-1]
143
+
144
+ code_hash = (
145
+ h[:NUM_CHARS_DYNAMIC_KEY] if (h := hash_objects(fn.__code__)) else "unknown"
146
+ )
147
+
148
+ return f"{qualname}-{code_hash}"
149
+
150
+
128
151
  @PrefectObjectRegistry.register_instances
129
152
  class Task(Generic[P, R]):
130
153
  """
@@ -292,17 +315,7 @@ class Task(Generic[P, R]):
292
315
 
293
316
  self.tags = set(tags if tags else [])
294
317
 
295
- if not hasattr(self.fn, "__qualname__"):
296
- self.task_key = to_qualified_name(type(self.fn))
297
- else:
298
- try:
299
- task_origin_hash = hash_objects(
300
- self.name, os.path.abspath(inspect.getsourcefile(self.fn))
301
- )
302
- except TypeError:
303
- task_origin_hash = "unknown-source-file"
304
-
305
- self.task_key = f"{self.fn.__qualname__}-{task_origin_hash}"
318
+ self.task_key = _generate_task_key(self.fn)
306
319
 
307
320
  self.cache_key_fn = cache_key_fn
308
321
  self.cache_expiration = cache_expiration
@@ -29,7 +29,9 @@ from uuid import UUID, uuid4
29
29
 
30
30
  import anyio
31
31
  import anyio.abc
32
+ import anyio.to_thread
32
33
  import sniffio
34
+ from anyio.from_thread import start_blocking_portal
33
35
  from typing_extensions import Literal, ParamSpec, TypeGuard
34
36
 
35
37
  from prefect.logging import get_logger
@@ -134,7 +136,7 @@ async def run_sync_in_worker_thread(
134
136
  """
135
137
  call = partial(__fn, *args, **kwargs)
136
138
  return await anyio.to_thread.run_sync(
137
- call, cancellable=True, limiter=get_thread_limiter()
139
+ call, abandon_on_cancel=True, limiter=get_thread_limiter()
138
140
  )
139
141
 
140
142
 
@@ -202,7 +204,7 @@ async def run_sync_in_interruptible_worker_thread(
202
204
  partial(
203
205
  anyio.to_thread.run_sync,
204
206
  capture_worker_thread_and_result,
205
- cancellable=True,
207
+ abandon_on_cancel=True,
206
208
  limiter=get_thread_limiter(),
207
209
  )
208
210
  )
@@ -228,7 +230,7 @@ def run_async_in_new_loop(__fn: Callable[..., Awaitable[T]], *args: Any, **kwarg
228
230
 
229
231
  def in_async_worker_thread() -> bool:
230
232
  try:
231
- anyio.from_thread.threadlocals.current_async_module
233
+ anyio.from_thread.threadlocals.current_async_backend
232
234
  except AttributeError:
233
235
  return False
234
236
  else:
@@ -261,14 +263,14 @@ def sync_compatible(async_fn: T) -> T:
261
263
  """
262
264
 
263
265
  @wraps(async_fn)
264
- def coroutine_wrapper(*args, **kwargs):
266
+ def coroutine_wrapper(*args, _sync: Optional[bool] = None, **kwargs):
265
267
  from prefect._internal.concurrency.api import create_call, from_sync
266
268
  from prefect._internal.concurrency.calls import get_current_call, logger
267
269
  from prefect._internal.concurrency.event_loop import get_running_loop
268
270
  from prefect._internal.concurrency.threads import get_global_loop
269
271
  from prefect.settings import PREFECT_EXPERIMENTAL_DISABLE_SYNC_COMPAT
270
272
 
271
- if PREFECT_EXPERIMENTAL_DISABLE_SYNC_COMPAT:
273
+ if PREFECT_EXPERIMENTAL_DISABLE_SYNC_COMPAT or _sync is False:
272
274
  return async_fn(*args, **kwargs)
273
275
 
274
276
  global_thread_portal = get_global_loop()
@@ -276,12 +278,17 @@ def sync_compatible(async_fn: T) -> T:
276
278
  current_call = get_current_call()
277
279
  current_loop = get_running_loop()
278
280
 
279
- if current_thread.ident == global_thread_portal.thread.ident:
281
+ if (
282
+ current_thread.ident == global_thread_portal.thread.ident
283
+ and _sync is not True
284
+ ):
280
285
  logger.debug(f"{async_fn} --> return coroutine for internal await")
281
286
  # In the prefect async context; return the coro for us to await
282
287
  return async_fn(*args, **kwargs)
283
- elif in_async_main_thread() and (
284
- not current_call or is_async_fn(current_call.fn)
288
+ elif (
289
+ in_async_main_thread()
290
+ and (not current_call or is_async_fn(current_call.fn))
291
+ and _sync is not True
285
292
  ):
286
293
  # In the main async context; return the coro for them to await
287
294
  logger.debug(f"{async_fn} --> return coroutine for user await")
@@ -333,7 +340,7 @@ def sync(__async_fn: Callable[P, Awaitable[T]], *args: P.args, **kwargs: P.kwarg
333
340
  "`sync` called from an asynchronous context; "
334
341
  "you should `await` the async function directly instead."
335
342
  )
336
- with anyio.start_blocking_portal() as portal:
343
+ with start_blocking_portal() as portal:
337
344
  return portal.call(partial(__async_fn, *args, **kwargs))
338
345
  elif in_async_worker_thread():
339
346
  # In a sync context but we can access the event loop thread; send the async
@@ -346,17 +346,19 @@ def parameter_schema_from_entrypoint(entrypoint: str) -> ParameterSchema:
346
346
  Returns:
347
347
  ParameterSchema: The parameter schema for the function.
348
348
  """
349
+ filepath = None
349
350
  if ":" in entrypoint:
350
351
  # split by the last colon once to handle Windows paths with drive letters i.e C:\path\to\file.py:do_stuff
351
352
  path, func_name = entrypoint.rsplit(":", maxsplit=1)
352
353
  source_code = Path(path).read_text()
354
+ filepath = path
353
355
  else:
354
356
  path, func_name = entrypoint.rsplit(".", maxsplit=1)
355
357
  spec = importlib.util.find_spec(path)
356
358
  if not spec or not spec.origin:
357
359
  raise ValueError(f"Could not find module {path!r}")
358
360
  source_code = Path(spec.origin).read_text()
359
- signature = _generate_signature_from_source(source_code, func_name)
361
+ signature = _generate_signature_from_source(source_code, func_name, filepath)
360
362
  docstring = _get_docstring_from_source(source_code, func_name)
361
363
  return generate_parameter_schema(signature, parameter_docstrings(docstring))
362
364
 
@@ -424,7 +426,7 @@ def raise_for_reserved_arguments(fn: Callable, reserved_arguments: Iterable[str]
424
426
 
425
427
 
426
428
  def _generate_signature_from_source(
427
- source_code: str, func_name: str
429
+ source_code: str, func_name: str, filepath: Optional[str] = None
428
430
  ) -> inspect.Signature:
429
431
  """
430
432
  Extract the signature of a function from its source code.
@@ -440,7 +442,7 @@ def _generate_signature_from_source(
440
442
  """
441
443
  # Load the namespace from the source code. Missing imports and exceptions while
442
444
  # loading local class definitions are ignored.
443
- namespace = safe_load_namespace(source_code)
445
+ namespace = safe_load_namespace(source_code, filepath=filepath)
444
446
  # Parse the source code into an AST
445
447
  parsed_code = ast.parse(source_code)
446
448
 
@@ -4,11 +4,13 @@ import inspect
4
4
  import os
5
5
  import signal
6
6
  import time
7
+ from contextlib import contextmanager
7
8
  from functools import partial
8
9
  from typing import (
9
10
  Any,
10
11
  Callable,
11
12
  Dict,
13
+ Generator,
12
14
  Iterable,
13
15
  Optional,
14
16
  Set,
@@ -18,6 +20,7 @@ from typing import (
18
20
  from uuid import UUID, uuid4
19
21
 
20
22
  import anyio
23
+ from exceptiongroup import BaseExceptionGroup # novermin
21
24
  from typing_extensions import Literal
22
25
 
23
26
  import prefect
@@ -734,3 +737,14 @@ def emit_task_run_state_change_event(
734
737
  },
735
738
  follows=follows,
736
739
  )
740
+
741
+
742
+ @contextmanager
743
+ def collapse_excgroups() -> Generator[None, None, None]:
744
+ try:
745
+ yield
746
+ except BaseException as exc:
747
+ while isinstance(exc, BaseExceptionGroup) and len(exc.exceptions) == 1:
748
+ exc = exc.exceptions[0]
749
+
750
+ raise exc
@@ -362,79 +362,159 @@ class AliasedModuleLoader(Loader):
362
362
  sys.modules[self.alias] = root_module
363
363
 
364
364
 
365
- def safe_load_namespace(source_code: str):
365
+ def safe_load_namespace(
366
+ source_code: str, filepath: Optional[str] = None
367
+ ) -> Dict[str, Any]:
366
368
  """
367
- Safely load a namespace from source code.
369
+ Safely load a namespace from source code, optionally handling relative imports.
368
370
 
369
- This function will attempt to import all modules and classes defined in the source
370
- code. If an import fails, the error is caught and the import is skipped. This function
371
- will also attempt to compile and evaluate class and function definitions locally.
371
+ If a `filepath` is provided, `sys.path` is modified to support relative imports.
372
+ Changes to `sys.path` are reverted after completion, but this function is not thread safe
373
+ and use of it in threaded contexts may result in undesirable behavior.
372
374
 
373
375
  Args:
374
376
  source_code: The source code to load
377
+ filepath: Optional file path of the source code. If provided, enables relative imports.
375
378
 
376
379
  Returns:
377
- The namespace loaded from the source code. Can be used when evaluating source
378
- code.
380
+ The namespace loaded from the source code.
379
381
  """
380
382
  parsed_code = ast.parse(source_code)
381
383
 
382
- namespace = {"__name__": "prefect_safe_namespace_loader"}
384
+ namespace: Dict[str, Any] = {"__name__": "prefect_safe_namespace_loader"}
383
385
 
384
- # Remove the body of the if __name__ == "__main__": block from the AST to prevent
385
- # execution of guarded code
386
- new_body = []
387
- for node in parsed_code.body:
388
- if _is_main_block(node):
389
- continue
390
- new_body.append(node)
386
+ # Remove the body of the if __name__ == "__main__": block
387
+ new_body = [node for node in parsed_code.body if not _is_main_block(node)]
391
388
  parsed_code.body = new_body
392
389
 
393
- # Walk through the AST and find all import statements
394
- for node in ast.walk(parsed_code):
395
- if isinstance(node, ast.Import):
396
- for alias in node.names:
397
- module_name = alias.name
398
- as_name = alias.asname if alias.asname else module_name
399
- try:
400
- # Attempt to import the module
401
- namespace[as_name] = importlib.import_module(module_name)
402
- logger.debug("Successfully imported %s", module_name)
403
- except ImportError as e:
404
- logger.debug(f"Failed to import {module_name}: {e}")
405
- elif isinstance(node, ast.ImportFrom):
406
- module_name = node.module
407
- if module_name is None:
408
- continue
409
- try:
410
- module = importlib.import_module(module_name)
390
+ temp_module = None
391
+ original_sys_path = None
392
+
393
+ if filepath:
394
+ # Setup for relative imports
395
+ file_dir = os.path.dirname(os.path.abspath(filepath))
396
+ package_name = os.path.basename(file_dir)
397
+ parent_dir = os.path.dirname(file_dir)
398
+
399
+ # Save original sys.path and modify it
400
+ original_sys_path = sys.path.copy()
401
+ sys.path.insert(0, parent_dir)
402
+
403
+ # Create a temporary module for import context
404
+ temp_module = ModuleType(package_name)
405
+ temp_module.__file__ = filepath
406
+ temp_module.__package__ = package_name
407
+
408
+ # Create a spec for the module
409
+ temp_module.__spec__ = ModuleSpec(package_name, None)
410
+ temp_module.__spec__.loader = None
411
+ temp_module.__spec__.submodule_search_locations = [file_dir]
412
+
413
+ try:
414
+ for node in parsed_code.body:
415
+ if isinstance(node, ast.Import):
411
416
  for alias in node.names:
412
- name = alias.name
413
- asname = alias.asname if alias.asname else name
417
+ module_name = alias.name
418
+ as_name = alias.asname or module_name
414
419
  try:
415
- # Get the specific attribute from the module
416
- attribute = getattr(module, name)
417
- namespace[asname] = attribute
418
- except AttributeError as e:
419
- logger.debug(
420
- "Failed to retrieve %s from %s: %s", name, module_name, e
421
- )
422
- except ImportError as e:
423
- logger.debug("Failed to import from %s: %s", node.module, e)
424
-
425
- # Handle local definitions
426
- for node in parsed_code.body:
427
- if isinstance(node, (ast.ClassDef, ast.FunctionDef, ast.Assign)):
428
- try:
429
- # Compile and execute each class and function definition and assignment
430
- code = compile(
431
- ast.Module(body=[node], type_ignores=[]),
432
- filename="<ast>",
433
- mode="exec",
434
- )
435
- exec(code, namespace)
436
- except Exception as e:
437
- logger.debug("Failed to compile: %s", e)
420
+ namespace[as_name] = importlib.import_module(module_name)
421
+ logger.debug("Successfully imported %s", module_name)
422
+ except ImportError as e:
423
+ logger.debug(f"Failed to import {module_name}: {e}")
424
+ elif isinstance(node, ast.ImportFrom):
425
+ module_name = node.module or ""
426
+ if filepath:
427
+ try:
428
+ if node.level > 0:
429
+ # For relative imports, use the parent package to inform the import
430
+ package_parts = temp_module.__package__.split(".")
431
+ if len(package_parts) < node.level:
432
+ raise ImportError(
433
+ "Attempted relative import beyond top-level package"
434
+ )
435
+ parent_package = ".".join(
436
+ package_parts[: (1 - node.level)]
437
+ if node.level > 1
438
+ else package_parts
439
+ )
440
+ module = importlib.import_module(
441
+ f".{module_name}" if module_name else "",
442
+ package=parent_package,
443
+ )
444
+ else:
445
+ # Absolute imports are handled as normal
446
+ module = importlib.import_module(module_name)
447
+
448
+ for alias in node.names:
449
+ name = alias.name
450
+ asname = alias.asname or name
451
+ if name == "*":
452
+ # Handle 'from module import *'
453
+ module_dict = {
454
+ k: v
455
+ for k, v in module.__dict__.items()
456
+ if not k.startswith("_")
457
+ }
458
+ namespace.update(module_dict)
459
+ else:
460
+ try:
461
+ attribute = getattr(module, name)
462
+ namespace[asname] = attribute
463
+ except AttributeError as e:
464
+ logger.debug(
465
+ "Failed to retrieve %s from %s: %s",
466
+ name,
467
+ module_name,
468
+ e,
469
+ )
470
+ except ImportError as e:
471
+ logger.debug("Failed to import from %s: %s", module_name, e)
472
+ else:
473
+ # Handle as absolute import when no filepath is provided
474
+ try:
475
+ module = importlib.import_module(module_name)
476
+ for alias in node.names:
477
+ name = alias.name
478
+ asname = alias.asname or name
479
+ if name == "*":
480
+ # Handle 'from module import *'
481
+ module_dict = {
482
+ k: v
483
+ for k, v in module.__dict__.items()
484
+ if not k.startswith("_")
485
+ }
486
+ namespace.update(module_dict)
487
+ else:
488
+ try:
489
+ attribute = getattr(module, name)
490
+ namespace[asname] = attribute
491
+ except AttributeError as e:
492
+ logger.debug(
493
+ "Failed to retrieve %s from %s: %s",
494
+ name,
495
+ module_name,
496
+ e,
497
+ )
498
+ except ImportError as e:
499
+ logger.debug("Failed to import from %s: %s", module_name, e)
500
+ # Handle local definitions
501
+ for node in parsed_code.body:
502
+ if isinstance(node, (ast.ClassDef, ast.FunctionDef, ast.Assign)):
503
+ try:
504
+ code = compile(
505
+ ast.Module(body=[node], type_ignores=[]),
506
+ filename="<ast>",
507
+ mode="exec",
508
+ )
509
+ exec(code, namespace)
510
+ except Exception as e:
511
+ logger.debug("Failed to compile: %s", e)
512
+
513
+ finally:
514
+ # Restore original sys.path if it was modified
515
+ if original_sys_path:
516
+ sys.path[:] = original_sys_path
517
+
438
518
  return namespace
439
519
 
440
520
 
@@ -1,4 +1,5 @@
1
1
  import sys
2
+ from asyncio import CancelledError
2
3
  from collections import deque
3
4
  from traceback import format_exception
4
5
  from types import TracebackType
@@ -67,6 +68,11 @@ async def critical_service_loop(
67
68
  backoff_count = 0
68
69
 
69
70
  track_record.append(True)
71
+ except CancelledError as exc:
72
+ # Exit immediately because the task was cancelled, possibly due
73
+ # to a signal or timeout.
74
+ logger.debug(f"Run of {workload!r} cancelled", exc_info=exc)
75
+ return
70
76
  except httpx.TransportError as exc:
71
77
  # httpx.TransportError is the base class for any kind of communications
72
78
  # error, like timeouts, connection failures, etc. This does _not_ cover
@@ -138,7 +144,7 @@ async def critical_service_loop(
138
144
  failures.clear()
139
145
  printer(
140
146
  "Backing off due to consecutive errors, using increased interval of "
141
- f" {interval * 2**backoff_count}s."
147
+ f" {interval * 2 ** backoff_count}s."
142
148
  )
143
149
 
144
150
  if run_once:
prefect/workers/block.py CHANGED
@@ -4,6 +4,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Optional
4
4
  import anyio
5
5
  import anyio.abc
6
6
 
7
+ from prefect._internal.compatibility.deprecated import deprecated_class
7
8
  from prefect._internal.pydantic import HAS_PYDANTIC_V2
8
9
  from prefect._internal.schemas.validators import validate_block_is_infrastructure
9
10
  from prefect.blocks.core import Block
@@ -27,6 +28,10 @@ if TYPE_CHECKING:
27
28
  from prefect.client.schemas.responses import DeploymentResponse
28
29
 
29
30
 
31
+ @deprecated_class(
32
+ start_date="Jun 2024",
33
+ help="Refer to the upgrade guide for more information: https://docs.prefect.io/latest/guides/upgrade-guide-agents-to-workers/",
34
+ )
30
35
  class BlockWorkerJobConfiguration(BaseModel):
31
36
  block: Block = Field(
32
37
  default=..., description="The infrastructure block to use for job creation."
@@ -144,6 +149,10 @@ class BlockWorkerResult(BaseWorkerResult):
144
149
  """Result of a block worker job"""
145
150
 
146
151
 
152
+ @deprecated_class(
153
+ start_date="Jun 2024",
154
+ help="Refer to the upgrade guide for more information: https://docs.prefect.io/latest/guides/upgrade-guide-agents-to-workers/",
155
+ )
147
156
  class BlockWorker(BaseWorker):
148
157
  type = "block"
149
158
  job_configuration = BlockWorkerJobConfiguration
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: prefect-client
3
- Version: 2.19.8
3
+ Version: 2.20.0
4
4
  Summary: Workflow orchestration and management.
5
5
  Home-page: https://www.prefect.io
6
6
  Author: Prefect Technologies, Inc.
@@ -23,7 +23,7 @@ Classifier: Programming Language :: Python :: 3.11
23
23
  Classifier: Topic :: Software Development :: Libraries
24
24
  Requires-Python: >=3.8
25
25
  Description-Content-Type: text/markdown
26
- Requires-Dist: anyio <4.0.0,>=3.7.1
26
+ Requires-Dist: anyio <5.0.0,>=4.4.0
27
27
  Requires-Dist: asgi-lifespan <3.0,>=1.0
28
28
  Requires-Dist: cachetools <6.0,>=5.3
29
29
  Requires-Dist: cloudpickle <4.0,>=2.0
@@ -31,7 +31,7 @@ Requires-Dist: coolname <3.0.0,>=1.0.4
31
31
  Requires-Dist: croniter <3.0.0,>=1.0.12
32
32
  Requires-Dist: fsspec >=2022.5.0
33
33
  Requires-Dist: graphviz >=0.20.1
34
- Requires-Dist: griffe >=0.20.0
34
+ Requires-Dist: griffe <0.48.0,>=0.20.0
35
35
  Requires-Dist: httpcore <2.0.0,>=1.0.5
36
36
  Requires-Dist: httpx[http2] !=0.23.2,>=0.23
37
37
  Requires-Dist: importlib-resources <6.2.0,>=6.1.3
@@ -5,11 +5,11 @@ prefect/agent.py,sha256=HaGT0yh3fciluYpO99dVHo_LHq7N2cYLuWNrEV_kPV8,27789
5
5
  prefect/artifacts.py,sha256=mreaBE4qMoXkjc9YI-5cAxoye7ixraHB_zr8GTK9xPU,8694
6
6
  prefect/automations.py,sha256=rjVtQblBlKhD_q24bG6zbxJeb_XQJnodMlhr565aZJY,4853
7
7
  prefect/context.py,sha256=Hgn3rIjCbqfCmGnZzV_eZ2FwxGjEhaZjUw_nppqNQSA,18189
8
- prefect/engine.py,sha256=L9VCeRU4iO5Utj3kOlBlNSGD_t_AqMV-A5uN_82lk3U,90832
8
+ prefect/engine.py,sha256=czyTcLPIaj_LOpPuWNheK0FfdOwX42VKLtonm7vZqH0,91938
9
9
  prefect/exceptions.py,sha256=ElqC81_w6XbTaxLYANLMIPK8Fz46NmJZCRKL4NZ-JIg,10907
10
10
  prefect/filesystems.py,sha256=XniPSdBAqywj43X7GyfuWJQIbz07QJ5Y3cVNLhIF3lQ,35260
11
11
  prefect/flow_runs.py,sha256=mFHLavZk1yZ62H3UazuNDBZWAF7AqKttA4rMcHgsVSw,3119
12
- prefect/flows.py,sha256=tfQJ3UxBZFl8XDuWti-G4ZUupDRc2ta4OtBIhAh6uAM,79409
12
+ prefect/flows.py,sha256=-Ietkctf2ONzmqcytI9Fc8T1-vtPmEuOo3m6wPNLIrQ,84735
13
13
  prefect/futures.py,sha256=RaWfYIXtH7RsWxQ5QWTTlAzwtVV8XWpXaZT_hLq35vQ,12590
14
14
  prefect/manifests.py,sha256=sTM7j8Us5d49zaydYKWsKb7zJ96v1ChkLkLeR0GFYD8,683
15
15
  prefect/new_flow_engine.py,sha256=A1adTWTBAwPCn6ay003Jsoc2SdYgHV4AcJo1bmpa_7Y,16039
@@ -23,8 +23,8 @@ prefect/settings.py,sha256=gFVXmGLapnkIV7hQvRJMJ6472UZJr6gqZYk1xwcqgnQ,74931
23
23
  prefect/states.py,sha256=B38zIXnqc8cmw3GPxmMQ4thX6pXb6UtG4PoTZ5thGQs,21036
24
24
  prefect/task_engine.py,sha256=_2I7XLwoT_nNhpzTMa_52aQKjsDoaW6WpzwIHYEWZS0,2598
25
25
  prefect/task_runners.py,sha256=HXUg5UqhZRN2QNBqMdGE1lKhwFhT8TaRN75ScgLbnw8,11012
26
- prefect/task_server.py,sha256=3f6rDIOXmhhF_MDHGk5owaU9lyLHsR-zgCp6pIHEUyo,11075
27
- prefect/tasks.py,sha256=cQ_jfI4WfPlL80TehTKX5p-rjWfJAEgN4L51Rozj32o,55686
26
+ prefect/task_server.py,sha256=-wHuAlY8DLEQuJcEvcFfB4da0Xdnmqk6D7GjHShh-Ik,11668
27
+ prefect/tasks.py,sha256=HWTT9Y6obBId1VcLCQbmBFN4WUKE0j1pw2IvzyTgF70,56134
28
28
  prefect/variables.py,sha256=4r5gVGpAZxLWHj5JoNZJTuktX1-u3ENzVp3t4M6FDgs,3815
29
29
  prefect/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
30
  prefect/_internal/_logging.py,sha256=HvNHY-8P469o5u4LYEDBTem69XZEt1QUeUaLToijpak,810
@@ -156,7 +156,7 @@ prefect/blocks/abstract.py,sha256=AiAs0MC5JKCf0Xg0yofC5Qu2TZ52AjDMP1ntMGuP2dY,16
156
156
  prefect/blocks/core.py,sha256=66pGFVPxtCCGWELqPXYqN8L0GoUXuUqv6jWw3Kk-tyY,43496
157
157
  prefect/blocks/fields.py,sha256=ANOzbNyDCBIvm6ktgbLTMs7JW2Sf6CruyATjAW61ks0,1607
158
158
  prefect/blocks/kubernetes.py,sha256=IN-hZkzIRvqjd_dzPZby3q8p7m2oUWqArBq24BU9cDg,4071
159
- prefect/blocks/notifications.py,sha256=raXBPidAfec7VCyLA1bb46RD06i0zPtVonWcXtkeOUU,27211
159
+ prefect/blocks/notifications.py,sha256=LJd2mgV29URqItJyxtWUpdo4wswtm7KyIseuAjV3joI,28132
160
160
  prefect/blocks/system.py,sha256=aIRiFKlXIQ1sMaqniMXYolFsx2IVN3taBMH3KCThB2I,3089
161
161
  prefect/blocks/webhook.py,sha256=VzQ-qcRtW8MMuYEGYwFgt1QXtWedUtVmeTo7iE2UQ78,2008
162
162
  prefect/client/__init__.py,sha256=yJ5FRF9RxNUio2V_HmyKCKw5G6CZO0h8cv6xA_Hkpcc,477
@@ -170,7 +170,7 @@ prefect/client/utilities.py,sha256=7V4IkfC8x_OZuPXGvtIMmwZCOW63hSY8iVQkuRYTR6g,3
170
170
  prefect/client/schemas/__init__.py,sha256=KlyqFV-hMulMkNstBn_0ijoHoIwJZaBj6B1r07UmgvE,607
171
171
  prefect/client/schemas/actions.py,sha256=4mq1OXMsXs6aGlXg1G232RNcn0ivAOIgikL0IKs6S-E,27943
172
172
  prefect/client/schemas/filters.py,sha256=gv57m0bHJqL7Ifsc_vAdRODFomaMVcrGXKAahOSBU4w,35598
173
- prefect/client/schemas/objects.py,sha256=Ie82ck5vXXgs0Q_luVe_NJkg_SAYN68d0nTxxFKt31M,53130
173
+ prefect/client/schemas/objects.py,sha256=bA0kAN9fbeiE6yUGX6bRlScheYtg8uxu0eeuC3-3zd8,53309
174
174
  prefect/client/schemas/responses.py,sha256=XAc95g3PRL9UIkH9_VMuv0ECHKdc19guBLmdt5KefkI,15325
175
175
  prefect/client/schemas/schedules.py,sha256=ZF7fFbkcc8rVdx2MxE8DR0av3FUE9qDPjLreEuV8HfM,12193
176
176
  prefect/client/schemas/sorting.py,sha256=EIQ6FUjUWMwk6fn6ckVLQLXOP-GI5kce7ftjUkDFWV0,2490
@@ -181,7 +181,7 @@ prefect/concurrency/services.py,sha256=PQb4cs72lTRz_XEgDEfv4OkpHWC7KAmW_Xd9vpOOu
181
181
  prefect/concurrency/sync.py,sha256=QtnPRfVX9GqVyuZOt6W9yJuT9G-PlCSVnxlZKFTjuKY,3271
182
182
  prefect/deployments/__init__.py,sha256=dM866rOEz3BbAN_xaFMHj3Hw1oOFemBTZ2yxVE6IGoY,394
183
183
  prefect/deployments/base.py,sha256=0l2D_laMc3q2Q5nvh-WANv3iDy4Ih5BqcPMNJJbHuP0,16391
184
- prefect/deployments/deployments.py,sha256=bYNmxU0yn2jzluGIr2tUkgRi73WGQ6gGbjb0GlD4EIk,41656
184
+ prefect/deployments/deployments.py,sha256=S9ro-RUNrc2v8uWFkLr3-JE7h3RGC-HO_f5T7xe4ABw,41884
185
185
  prefect/deployments/runner.py,sha256=a2dxc84zCofZFXV47M2zfntqUaoAhGWvf7o0s3MjPws,44772
186
186
  prefect/deployments/schedules.py,sha256=23GDCAKOP-aAEKGappwTrM4HU67ndVH7NR4Dq0neU_U,1884
187
187
  prefect/deployments/steps/__init__.py,sha256=3pZWONAZzenDszqNQT3bmTFilnvjB6xMolMz9tr5pLw,229
@@ -254,23 +254,23 @@ prefect/software/python.py,sha256=EssQ16aMvWSzzWagtNPfjQLu9ehieRwN0iWeqpBVtRU,17
254
254
  prefect/types/__init__.py,sha256=aZvlQ2uXl949sJ_khmxSVkRH3o6edo-eJ_GBGMBN5Yg,3134
255
255
  prefect/utilities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
256
256
  prefect/utilities/annotations.py,sha256=bXB43j5Zsq5gaBcJe9qnszBlnNwCTwqSTgcu2OkkRLo,2776
257
- prefect/utilities/asyncutils.py,sha256=1xpjGFs72vQTPXfG0ww1mfNBwp0-UbRICLGVeRuiqOg,17010
258
- prefect/utilities/callables.py,sha256=-Ccr5JmDoafth46MPPQCRStBMSG_ickCDfBztpd_MAs,21119
257
+ prefect/utilities/asyncutils.py,sha256=ftu6MaV9qOZ3oCIErrneW07km2BydCezOMzvPUuCMUo,17246
258
+ prefect/utilities/callables.py,sha256=YWilWp6oyL3D-hsyUuSE-h2KZ0aO_nbjhW75KsrLVmQ,21224
259
259
  prefect/utilities/collections.py,sha256=0v-NNXxYYzkUTCCNDMNB44AnDv9yj35UYouNraCqlo8,15449
260
260
  prefect/utilities/compat.py,sha256=mNQZDnzyKaOqy-OV-DnmH_dc7CNF5nQgW_EsA4xMr7g,906
261
261
  prefect/utilities/context.py,sha256=BThuUW94-IYgFYTeMIM9KMo8ShT3oiI7w5ajZHzU1j0,1377
262
262
  prefect/utilities/dispatch.py,sha256=BSAuYf3uchA6giBB90Z9tsmnR94SAqHZMHl01fRuA64,5467
263
263
  prefect/utilities/dockerutils.py,sha256=O5lIgCej5KGRYU2TC1NzNuIK595uOIWJilhZXYEVtOA,20180
264
- prefect/utilities/engine.py,sha256=TKiYqpfgt4zopuI8yvh2e-V9GgLcRrh3TpKRhvLuHdw,25669
264
+ prefect/utilities/engine.py,sha256=6O7zYZQfpo6FtsI6n9DUNs-MB7_xLs3iXiCnSukR8qI,26046
265
265
  prefect/utilities/filesystem.py,sha256=M_TeZ1MftjBf7hDLWk-Iphir369TpJ1binMsBKiO9YE,4449
266
266
  prefect/utilities/hashing.py,sha256=EOwZLmoIZImuSTxAvVqInabxJ-4RpEfYeg9e2EDQF8o,1752
267
- prefect/utilities/importtools.py,sha256=MxGyPxfKjn6WtXVY9t8xRfDqK5MPUa4NBZqvyqIvp-4,15535
267
+ prefect/utilities/importtools.py,sha256=JteP9zFz-oJyxSVYr63kJ-RpDL2jjTfJMqgYaBst19M,19518
268
268
  prefect/utilities/math.py,sha256=wLwcKVidpNeWQi1TUIWWLHGjlz9UgboX9FUGhx_CQzo,2821
269
269
  prefect/utilities/names.py,sha256=x-stHcF7_tebJPvB1dz-5FvdXJXNBTg2kFZXSnIBBmk,1657
270
270
  prefect/utilities/processutils.py,sha256=yo_GO48pZzgn4A0IK5irTAoqyUCYvWKDSqHXCrtP8c4,14547
271
271
  prefect/utilities/pydantic.py,sha256=3IR73F3gkuRG6HQfCEP9ENIC6qbK6oOFawjsYJfoUkg,9984
272
272
  prefect/utilities/render_swagger.py,sha256=h2UrORVN3f7gM4zurtMnySjQXZIOWbji3uMinpbkl8U,3717
273
- prefect/utilities/services.py,sha256=u0Gpdw5pYceaSLCqOihGyFb2AlMBYE2P9Ts9qRb3N9Q,6584
273
+ prefect/utilities/services.py,sha256=POYQRdvkUs-0dFcgV-BOyII0NFttgW1NjDAJR1bbGqU,6865
274
274
  prefect/utilities/slugify.py,sha256=57Vb14t13F3zm1P65KAu8nVeAz0iJCd1Qc5eMG-R5y8,169
275
275
  prefect/utilities/templating.py,sha256=t32Gcsvvm8ibzdqXwcWzY7JkwftPn73FiiLYEnQWyKM,13237
276
276
  prefect/utilities/text.py,sha256=eXGIsCcZ7h_6hy8T5GDQjL8GiKyktoOqavYub0QjgO4,445
@@ -281,12 +281,12 @@ prefect/utilities/schema_tools/hydration.py,sha256=RNuJK4Vd__V69gdQbaWSVhSkV0AUI
281
281
  prefect/utilities/schema_tools/validation.py,sha256=zZHL_UFxAlgaUzi-qsEOrhWtZ7EkFQvPkX_YN1EJNTo,8414
282
282
  prefect/workers/__init__.py,sha256=6el2Q856CuRPa5Hdrbm9QyAWB_ovcT2bImSFsoWI46k,66
283
283
  prefect/workers/base.py,sha256=LKIMS2DaQSQRV4rjbrMYeQnY-9rzgj_KWBRIq-8c5rg,45125
284
- prefect/workers/block.py,sha256=5bdCuqT-4I-et_8ZLG2y1AODzYiCQwFiivhdt5NMEog,7635
284
+ prefect/workers/block.py,sha256=aYY__uq3v1eq1kkbVukxyhQNbkknaKYo6-_3tcrfKKA,8067
285
285
  prefect/workers/process.py,sha256=pPtCdA7fKQ4OsvoitT-cayZeh5HgLX4xBUYlb2Zad-Q,9475
286
286
  prefect/workers/server.py,sha256=WVZJxR8nTMzK0ov0BD0xw5OyQpT26AxlXbsGQ1OrxeQ,1551
287
287
  prefect/workers/utilities.py,sha256=VfPfAlGtTuDj0-Kb8WlMgAuOfgXCdrGAnKMapPSBrwc,2483
288
- prefect_client-2.19.8.dist-info/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
289
- prefect_client-2.19.8.dist-info/METADATA,sha256=kFZAOZgAQSC1Qz_vFIkAxOyBRHquCOa9F-Rtwds-_-M,7402
290
- prefect_client-2.19.8.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
291
- prefect_client-2.19.8.dist-info/top_level.txt,sha256=MJZYJgFdbRc2woQCeB4vM6T33tr01TmkEhRcns6H_H4,8
292
- prefect_client-2.19.8.dist-info/RECORD,,
288
+ prefect_client-2.20.0.dist-info/LICENSE,sha256=MCxsn8osAkzfxKC4CC_dLcUkU8DZLkyihZ8mGs3Ah3Q,11357
289
+ prefect_client-2.20.0.dist-info/METADATA,sha256=GRcdYDHMdwoy3FRSFZvP_sNOvtU3VEgUx1VMD6wUXdU,7410
290
+ prefect_client-2.20.0.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
291
+ prefect_client-2.20.0.dist-info/top_level.txt,sha256=MJZYJgFdbRc2woQCeB4vM6T33tr01TmkEhRcns6H_H4,8
292
+ prefect_client-2.20.0.dist-info/RECORD,,