prefect-client 3.0.10__py3-none-any.whl → 3.0.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. prefect/__init__.py +17 -14
  2. prefect/_internal/schemas/bases.py +1 -0
  3. prefect/_internal/schemas/validators.py +5 -3
  4. prefect/_version.py +3 -3
  5. prefect/client/cloud.py +2 -2
  6. prefect/client/orchestration.py +4 -4
  7. prefect/client/schemas/filters.py +14 -0
  8. prefect/context.py +3 -2
  9. prefect/deployments/runner.py +15 -6
  10. prefect/events/schemas/automations.py +3 -3
  11. prefect/events/schemas/deployment_triggers.py +10 -5
  12. prefect/flow_engine.py +4 -4
  13. prefect/flows.py +24 -9
  14. prefect/futures.py +4 -4
  15. prefect/logging/handlers.py +1 -1
  16. prefect/logging/highlighters.py +2 -0
  17. prefect/logging/logging.yml +82 -83
  18. prefect/runner/runner.py +1 -2
  19. prefect/runner/server.py +12 -1
  20. prefect/settings/__init__.py +59 -0
  21. prefect/settings/base.py +131 -0
  22. prefect/settings/constants.py +8 -0
  23. prefect/settings/context.py +65 -0
  24. prefect/settings/legacy.py +167 -0
  25. prefect/settings/models/__init__.py +0 -0
  26. prefect/settings/models/api.py +41 -0
  27. prefect/settings/models/cli.py +31 -0
  28. prefect/settings/models/client.py +90 -0
  29. prefect/settings/models/cloud.py +58 -0
  30. prefect/settings/models/deployments.py +40 -0
  31. prefect/settings/models/flows.py +37 -0
  32. prefect/settings/models/internal.py +21 -0
  33. prefect/settings/models/logging.py +137 -0
  34. prefect/settings/models/results.py +47 -0
  35. prefect/settings/models/root.py +447 -0
  36. prefect/settings/models/runner.py +65 -0
  37. prefect/settings/models/server/__init__.py +1 -0
  38. prefect/settings/models/server/api.py +133 -0
  39. prefect/settings/models/server/database.py +202 -0
  40. prefect/settings/models/server/deployments.py +24 -0
  41. prefect/settings/models/server/ephemeral.py +34 -0
  42. prefect/settings/models/server/events.py +140 -0
  43. prefect/settings/models/server/flow_run_graph.py +34 -0
  44. prefect/settings/models/server/root.py +143 -0
  45. prefect/settings/models/server/services.py +485 -0
  46. prefect/settings/models/server/tasks.py +86 -0
  47. prefect/settings/models/server/ui.py +52 -0
  48. prefect/settings/models/tasks.py +91 -0
  49. prefect/settings/models/testing.py +52 -0
  50. prefect/settings/models/ui.py +0 -0
  51. prefect/settings/models/worker.py +46 -0
  52. prefect/settings/profiles.py +390 -0
  53. prefect/settings/sources.py +162 -0
  54. prefect/task_engine.py +24 -29
  55. prefect/task_runners.py +6 -1
  56. prefect/tasks.py +63 -28
  57. prefect/utilities/asyncutils.py +1 -1
  58. prefect/utilities/engine.py +11 -3
  59. prefect/utilities/services.py +3 -3
  60. prefect/workers/base.py +8 -2
  61. {prefect_client-3.0.10.dist-info → prefect_client-3.0.11.dist-info}/METADATA +2 -2
  62. {prefect_client-3.0.10.dist-info → prefect_client-3.0.11.dist-info}/RECORD +66 -33
  63. prefect/settings.py +0 -2172
  64. /prefect/{profiles.toml → settings/profiles.toml} +0 -0
  65. {prefect_client-3.0.10.dist-info → prefect_client-3.0.11.dist-info}/LICENSE +0 -0
  66. {prefect_client-3.0.10.dist-info → prefect_client-3.0.11.dist-info}/WHEEL +0 -0
  67. {prefect_client-3.0.10.dist-info → prefect_client-3.0.11.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,162 @@
1
+ import os
2
+ import sys
3
+ import warnings
4
+ from pathlib import Path
5
+ from typing import Any, Dict, List, Optional, Tuple, Type
6
+
7
+ import toml
8
+ from pydantic import AliasChoices
9
+ from pydantic.fields import FieldInfo
10
+ from pydantic_settings import (
11
+ BaseSettings,
12
+ EnvSettingsSource,
13
+ PydanticBaseSettingsSource,
14
+ )
15
+
16
+ from prefect.settings.constants import DEFAULT_PREFECT_HOME, DEFAULT_PROFILES_PATH
17
+
18
+
19
+ class EnvFilterSettingsSource(EnvSettingsSource):
20
+ """
21
+ Custom pydantic settings source to filter out specific environment variables.
22
+
23
+ All validation aliases are loaded from environment variables by default. We use
24
+ `AliasPath` to maintain the ability set fields via model initialization, but those
25
+ shouldn't be loaded from environment variables. This loader allows use to say which
26
+ environment variables should be ignored.
27
+ """
28
+
29
+ def __init__(
30
+ self,
31
+ settings_cls: type[BaseSettings],
32
+ case_sensitive: Optional[bool] = None,
33
+ env_prefix: Optional[str] = None,
34
+ env_nested_delimiter: Optional[str] = None,
35
+ env_ignore_empty: Optional[bool] = None,
36
+ env_parse_none_str: Optional[str] = None,
37
+ env_parse_enums: Optional[bool] = None,
38
+ env_filter: Optional[List[str]] = None,
39
+ ) -> None:
40
+ super().__init__(
41
+ settings_cls,
42
+ case_sensitive,
43
+ env_prefix,
44
+ env_nested_delimiter,
45
+ env_ignore_empty,
46
+ env_parse_none_str,
47
+ env_parse_enums,
48
+ )
49
+ if env_filter:
50
+ if isinstance(self.env_vars, dict):
51
+ for key in env_filter:
52
+ self.env_vars.pop(key, None)
53
+ else:
54
+ self.env_vars = {
55
+ key: value
56
+ for key, value in self.env_vars.items()
57
+ if key.lower() not in env_filter
58
+ }
59
+
60
+
61
+ class ProfileSettingsTomlLoader(PydanticBaseSettingsSource):
62
+ """
63
+ Custom pydantic settings source to load profile settings from a toml file.
64
+
65
+ See https://docs.pydantic.dev/latest/concepts/pydantic_settings/#customise-settings-sources
66
+ """
67
+
68
+ def __init__(self, settings_cls: Type[BaseSettings]):
69
+ super().__init__(settings_cls)
70
+ self.settings_cls = settings_cls
71
+ self.profiles_path = _get_profiles_path()
72
+ self.profile_settings = self._load_profile_settings()
73
+
74
+ def _load_profile_settings(self) -> Dict[str, Any]:
75
+ """Helper method to load the profile settings from the profiles.toml file"""
76
+
77
+ if not self.profiles_path.exists():
78
+ return {}
79
+
80
+ try:
81
+ all_profile_data = toml.load(self.profiles_path)
82
+ except toml.TomlDecodeError:
83
+ warnings.warn(
84
+ f"Failed to load profiles from {self.profiles_path}. Please ensure the file is valid TOML."
85
+ )
86
+ return {}
87
+
88
+ if (
89
+ sys.argv[0].endswith("/prefect")
90
+ and len(sys.argv) >= 3
91
+ and sys.argv[1] == "--profile"
92
+ ):
93
+ active_profile = sys.argv[2]
94
+
95
+ else:
96
+ active_profile = os.environ.get("PREFECT_PROFILE") or all_profile_data.get(
97
+ "active"
98
+ )
99
+
100
+ profiles_data = all_profile_data.get("profiles", {})
101
+
102
+ if not active_profile or active_profile not in profiles_data:
103
+ return {}
104
+ return profiles_data[active_profile]
105
+
106
+ def get_field_value(
107
+ self, field: FieldInfo, field_name: str
108
+ ) -> Tuple[Any, str, bool]:
109
+ """Concrete implementation to get the field value from the profile settings"""
110
+ if field.validation_alias:
111
+ if isinstance(field.validation_alias, str):
112
+ value = self.profile_settings.get(field.validation_alias.upper())
113
+ if value is not None:
114
+ return value, field_name, self.field_is_complex(field)
115
+ elif isinstance(field.validation_alias, AliasChoices):
116
+ for alias in field.validation_alias.choices:
117
+ if not isinstance(alias, str):
118
+ continue
119
+ value = self.profile_settings.get(alias.upper())
120
+ if value is not None:
121
+ return value, field_name, self.field_is_complex(field)
122
+
123
+ value = self.profile_settings.get(
124
+ f"{self.config.get('env_prefix','')}{field_name.upper()}"
125
+ )
126
+ return value, field_name, self.field_is_complex(field)
127
+
128
+ def __call__(self) -> Dict[str, Any]:
129
+ """Called by pydantic to get the settings from our custom source"""
130
+ if _is_test_mode():
131
+ return {}
132
+ profile_settings: Dict[str, Any] = {}
133
+ for field_name, field in self.settings_cls.model_fields.items():
134
+ value, key, is_complex = self.get_field_value(field, field_name)
135
+ if value is not None:
136
+ prepared_value = self.prepare_field_value(
137
+ field_name, field, value, is_complex
138
+ )
139
+ profile_settings[key] = prepared_value
140
+ return profile_settings
141
+
142
+
143
+ def _is_test_mode() -> bool:
144
+ """Check if the current process is in test mode."""
145
+ return bool(
146
+ os.getenv("PREFECT_TEST_MODE")
147
+ or os.getenv("PREFECT_UNIT_TEST_MODE")
148
+ or os.getenv("PREFECT_TESTING_UNIT_TEST_MODE")
149
+ or os.getenv("PREFECT_TESTING_TEST_MODE")
150
+ )
151
+
152
+
153
+ def _get_profiles_path() -> Path:
154
+ """Helper to get the profiles path"""
155
+
156
+ if _is_test_mode():
157
+ return DEFAULT_PROFILES_PATH
158
+ if env_path := os.getenv("PREFECT_PROFILES_PATH"):
159
+ return Path(env_path)
160
+ if not (DEFAULT_PREFECT_HOME / "profiles.toml").exists():
161
+ return DEFAULT_PROFILES_PATH
162
+ return DEFAULT_PREFECT_HOME / "profiles.toml"
prefect/task_engine.py CHANGED
@@ -1,3 +1,4 @@
1
+ import asyncio
1
2
  import inspect
2
3
  import logging
3
4
  import threading
@@ -184,6 +185,22 @@ class BaseTaskRunEngine(Generic[P, R]):
184
185
 
185
186
  self.parameters = resolved_parameters
186
187
 
188
+ def _set_custom_task_run_name(self):
189
+ from prefect.utilities.engine import _resolve_custom_task_run_name
190
+
191
+ # update the task run name if necessary
192
+ if not self._task_name_set and self.task.task_run_name:
193
+ task_run_name = _resolve_custom_task_run_name(
194
+ task=self.task, parameters=self.parameters or {}
195
+ )
196
+
197
+ self.logger.extra["task_run_name"] = task_run_name
198
+ self.logger.debug(
199
+ f"Renamed task run {self.task_run.name!r} to {task_run_name!r}"
200
+ )
201
+ self.task_run.name = task_run_name
202
+ self._task_name_set = True
203
+
187
204
  def _wait_for_dependencies(self):
188
205
  if not self.wait_for:
189
206
  return
@@ -291,7 +308,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
291
308
  data=exc,
292
309
  message=f"Task run encountered unexpected exception: {repr(exc)}",
293
310
  )
294
- if inspect.iscoroutinefunction(retry_condition):
311
+ if asyncio.iscoroutinefunction(retry_condition):
295
312
  should_retry = run_coro_as_sync(
296
313
  retry_condition(self.task, self.task_run, state)
297
314
  )
@@ -335,7 +352,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
335
352
  f" {state.name!r}"
336
353
  )
337
354
  result = hook(task, task_run, state)
338
- if inspect.isawaitable(result):
355
+ if asyncio.iscoroutine(result):
339
356
  run_coro_as_sync(result)
340
357
  except Exception:
341
358
  self.logger.error(
@@ -348,6 +365,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
348
365
  def begin_run(self):
349
366
  try:
350
367
  self._resolve_parameters()
368
+ self._set_custom_task_run_name()
351
369
  self._wait_for_dependencies()
352
370
  except UpstreamTaskError as upstream_exc:
353
371
  state = self.set_state(
@@ -419,7 +437,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
419
437
  # Avoid fetching the result unless it is cached, otherwise we defeat
420
438
  # the purpose of disabling `cache_result_in_memory`
421
439
  result = state.result(raise_on_failure=False, fetch=True)
422
- if inspect.isawaitable(result):
440
+ if asyncio.iscoroutine(result):
423
441
  result = run_coro_as_sync(result)
424
442
  elif isinstance(state.data, ResultRecord):
425
443
  result = state.data.result
@@ -443,7 +461,7 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
443
461
  # if the return value is a BaseResult, we need to fetch it
444
462
  if isinstance(self._return_value, BaseResult):
445
463
  _result = self._return_value.get()
446
- if inspect.isawaitable(_result):
464
+ if asyncio.iscoroutine(_result):
447
465
  _result = run_coro_as_sync(_result)
448
466
  return _result
449
467
  elif isinstance(self._return_value, ResultRecord):
@@ -577,7 +595,6 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
577
595
  @contextmanager
578
596
  def setup_run_context(self, client: Optional[SyncPrefectClient] = None):
579
597
  from prefect.utilities.engine import (
580
- _resolve_custom_task_run_name,
581
598
  should_log_prints,
582
599
  )
583
600
 
@@ -609,18 +626,6 @@ class SyncTaskRunEngine(BaseTaskRunEngine[P, R]):
609
626
 
610
627
  self.logger = task_run_logger(task_run=self.task_run, task=self.task) # type: ignore
611
628
 
612
- # update the task run name if necessary
613
- if not self._task_name_set and self.task.task_run_name:
614
- task_run_name = _resolve_custom_task_run_name(
615
- task=self.task, parameters=self.parameters
616
- )
617
-
618
- self.logger.extra["task_run_name"] = task_run_name
619
- self.logger.debug(
620
- f"Renamed task run {self.task_run.name!r} to {task_run_name!r}"
621
- )
622
- self.task_run.name = task_run_name
623
- self._task_name_set = True
624
629
  yield
625
630
 
626
631
  @contextmanager
@@ -813,7 +818,7 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
813
818
  data=exc,
814
819
  message=f"Task run encountered unexpected exception: {repr(exc)}",
815
820
  )
816
- if inspect.iscoroutinefunction(retry_condition):
821
+ if asyncio.iscoroutinefunction(retry_condition):
817
822
  should_retry = await retry_condition(self.task, self.task_run, state)
818
823
  elif inspect.isfunction(retry_condition):
819
824
  should_retry = retry_condition(self.task, self.task_run, state)
@@ -869,6 +874,7 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
869
874
  async def begin_run(self):
870
875
  try:
871
876
  self._resolve_parameters()
877
+ self._set_custom_task_run_name()
872
878
  self._wait_for_dependencies()
873
879
  except UpstreamTaskError as upstream_exc:
874
880
  state = await self.set_state(
@@ -1091,7 +1097,6 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1091
1097
  @asynccontextmanager
1092
1098
  async def setup_run_context(self, client: Optional[PrefectClient] = None):
1093
1099
  from prefect.utilities.engine import (
1094
- _resolve_custom_task_run_name,
1095
1100
  should_log_prints,
1096
1101
  )
1097
1102
 
@@ -1122,16 +1127,6 @@ class AsyncTaskRunEngine(BaseTaskRunEngine[P, R]):
1122
1127
 
1123
1128
  self.logger = task_run_logger(task_run=self.task_run, task=self.task) # type: ignore
1124
1129
 
1125
- if not self._task_name_set and self.task.task_run_name:
1126
- task_run_name = _resolve_custom_task_run_name(
1127
- task=self.task, parameters=self.parameters
1128
- )
1129
- self.logger.extra["task_run_name"] = task_run_name
1130
- self.logger.debug(
1131
- f"Renamed task run {self.task_run.name!r} to {task_run_name!r}"
1132
- )
1133
- self.task_run.name = task_run_name
1134
- self._task_name_set = True
1135
1130
  yield
1136
1131
 
1137
1132
  @asynccontextmanager
prefect/task_runners.py CHANGED
@@ -29,6 +29,7 @@ from prefect.futures import (
29
29
  PrefectFutureList,
30
30
  )
31
31
  from prefect.logging.loggers import get_logger, get_run_logger
32
+ from prefect.settings import PREFECT_TASK_RUNNER_THREAD_POOL_MAX_WORKERS
32
33
  from prefect.utilities.annotations import allow_failure, quote, unmapped
33
34
  from prefect.utilities.callables import (
34
35
  collapse_variadic_parameters,
@@ -220,7 +221,11 @@ class ThreadPoolTaskRunner(TaskRunner[PrefectConcurrentFuture]):
220
221
  def __init__(self, max_workers: Optional[int] = None):
221
222
  super().__init__()
222
223
  self._executor: Optional[ThreadPoolExecutor] = None
223
- self._max_workers = sys.maxsize if max_workers is None else max_workers
224
+ self._max_workers = (
225
+ (PREFECT_TASK_RUNNER_THREAD_POOL_MAX_WORKERS.value() or sys.maxsize)
226
+ if max_workers is None
227
+ else max_workers
228
+ )
224
229
  self._cancel_events: Dict[uuid.UUID, threading.Event] = {}
225
230
 
226
231
  def duplicate(self) -> "ThreadPoolTaskRunner":
prefect/tasks.py CHANGED
@@ -4,6 +4,7 @@ Module containing the base workflow task class and decorator - for most use case
4
4
  # This file requires type-checking with pyright because mypy does not yet support PEP612
5
5
  # See https://github.com/python/mypy/issues/8645
6
6
 
7
+ import asyncio
7
8
  import datetime
8
9
  import inspect
9
10
  from copy import copy
@@ -309,7 +310,9 @@ class Task(Generic[P, R]):
309
310
  Callable[["TaskRunContext", Dict[str, Any]], Optional[str]]
310
311
  ] = None,
311
312
  cache_expiration: Optional[datetime.timedelta] = None,
312
- task_run_name: Optional[Union[Callable[[], str], str]] = None,
313
+ task_run_name: Optional[
314
+ Union[Callable[[], str], Callable[[Dict[str, Any]], str], str]
315
+ ] = None,
313
316
  retries: Optional[int] = None,
314
317
  retry_delay_seconds: Optional[
315
318
  Union[
@@ -370,7 +373,7 @@ class Task(Generic[P, R]):
370
373
 
371
374
  # the task is considered async if its function is async or an async
372
375
  # generator
373
- self.isasync = inspect.iscoroutinefunction(
376
+ self.isasync = asyncio.iscoroutinefunction(
374
377
  self.fn
375
378
  ) or inspect.isasyncgenfunction(self.fn)
376
379
 
@@ -530,7 +533,9 @@ class Task(Generic[P, R]):
530
533
  cache_key_fn: Optional[
531
534
  Callable[["TaskRunContext", Dict[str, Any]], Optional[str]]
532
535
  ] = None,
533
- task_run_name: Optional[Union[Callable[[], str], str, Type[NotSet]]] = NotSet,
536
+ task_run_name: Optional[
537
+ Union[Callable[[], str], Callable[[Dict[str, Any]], str], str, Type[NotSet]]
538
+ ] = NotSet,
534
539
  cache_expiration: Optional[datetime.timedelta] = None,
535
540
  retries: Union[int, Type[NotSet]] = NotSet,
536
541
  retry_delay_seconds: Union[
@@ -1059,6 +1064,8 @@ class Task(Generic[P, R]):
1059
1064
  task runner. This call only blocks execution while the task is being submitted,
1060
1065
  once it is submitted, the flow function will continue executing.
1061
1066
 
1067
+ This method is always synchronous, even if the underlying user function is asynchronous.
1068
+
1062
1069
  Args:
1063
1070
  *args: Arguments to run the task with
1064
1071
  return_state: Return the result of the flow run wrapped in a
@@ -1111,7 +1118,7 @@ class Task(Generic[P, R]):
1111
1118
  >>>
1112
1119
  >>> @flow
1113
1120
  >>> async def my_flow():
1114
- >>> await my_async_task.submit()
1121
+ >>> my_async_task.submit()
1115
1122
 
1116
1123
  Run a sync task in an async flow
1117
1124
 
@@ -1169,51 +1176,73 @@ class Task(Generic[P, R]):
1169
1176
 
1170
1177
  @overload
1171
1178
  def map(
1172
- self: "Task[P, NoReturn]",
1173
- *args: P.args,
1174
- **kwargs: P.kwargs,
1175
- ) -> PrefectFutureList[PrefectFuture[NoReturn]]:
1179
+ self: "Task[P, R]",
1180
+ *args: Any,
1181
+ return_state: Literal[True],
1182
+ wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = ...,
1183
+ deferred: bool = ...,
1184
+ **kwargs: Any,
1185
+ ) -> List[State[R]]:
1176
1186
  ...
1177
1187
 
1178
1188
  @overload
1179
1189
  def map(
1180
- self: "Task[P, Coroutine[Any, Any, T]]",
1181
- *args: P.args,
1182
- **kwargs: P.kwargs,
1183
- ) -> PrefectFutureList[PrefectFuture[T]]:
1190
+ self: "Task[P, R]",
1191
+ *args: Any,
1192
+ wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = ...,
1193
+ deferred: bool = ...,
1194
+ **kwargs: Any,
1195
+ ) -> PrefectFutureList[R]:
1184
1196
  ...
1185
1197
 
1186
1198
  @overload
1187
1199
  def map(
1188
- self: "Task[P, T]",
1189
- *args: P.args,
1190
- **kwargs: P.kwargs,
1191
- ) -> PrefectFutureList[PrefectFuture[T]]:
1200
+ self: "Task[P, R]",
1201
+ *args: Any,
1202
+ return_state: Literal[True],
1203
+ wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = ...,
1204
+ deferred: bool = ...,
1205
+ **kwargs: Any,
1206
+ ) -> List[State[R]]:
1192
1207
  ...
1193
1208
 
1194
1209
  @overload
1195
1210
  def map(
1196
- self: "Task[P, Coroutine[Any, Any, T]]",
1197
- *args: P.args,
1198
- return_state: Literal[True],
1199
- **kwargs: P.kwargs,
1200
- ) -> PrefectFutureList[State[T]]:
1211
+ self: "Task[P, R]",
1212
+ *args: Any,
1213
+ wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = ...,
1214
+ deferred: bool = ...,
1215
+ **kwargs: Any,
1216
+ ) -> PrefectFutureList[R]:
1201
1217
  ...
1202
1218
 
1203
1219
  @overload
1204
1220
  def map(
1205
- self: "Task[P, T]",
1206
- *args: P.args,
1221
+ self: "Task[P, Coroutine[Any, Any, R]]",
1222
+ *args: Any,
1207
1223
  return_state: Literal[True],
1208
- **kwargs: P.kwargs,
1209
- ) -> PrefectFutureList[State[T]]:
1224
+ wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = ...,
1225
+ deferred: bool = ...,
1226
+ **kwargs: Any,
1227
+ ) -> List[State[R]]:
1228
+ ...
1229
+
1230
+ @overload
1231
+ def map(
1232
+ self: "Task[P, Coroutine[Any, Any, R]]",
1233
+ *args: Any,
1234
+ return_state: Literal[False],
1235
+ wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = ...,
1236
+ deferred: bool = ...,
1237
+ **kwargs: Any,
1238
+ ) -> PrefectFutureList[R]:
1210
1239
  ...
1211
1240
 
1212
1241
  def map(
1213
1242
  self,
1214
1243
  *args: Any,
1215
1244
  return_state: bool = False,
1216
- wait_for: Optional[Iterable[PrefectFuture]] = None,
1245
+ wait_for: Optional[Iterable[Union[PrefectFuture[T], T]]] = None,
1217
1246
  deferred: bool = False,
1218
1247
  **kwargs: Any,
1219
1248
  ):
@@ -1234,6 +1263,8 @@ class Task(Generic[P, R]):
1234
1263
  also blocks while the tasks are being submitted, once they are
1235
1264
  submitted, the flow function will continue executing.
1236
1265
 
1266
+ This method is always synchronous, even if the underlying user function is asynchronous.
1267
+
1237
1268
  Args:
1238
1269
  *args: Iterable and static arguments to run the tasks with
1239
1270
  return_state: Return a list of Prefect States that wrap the results
@@ -1556,7 +1587,9 @@ def task(
1556
1587
  Callable[["TaskRunContext", Dict[str, Any]], Optional[str]]
1557
1588
  ] = None,
1558
1589
  cache_expiration: Optional[datetime.timedelta] = None,
1559
- task_run_name: Optional[Union[Callable[[], str], str]] = None,
1590
+ task_run_name: Optional[
1591
+ Union[Callable[[], str], Callable[[Dict[str, Any]], str], str]
1592
+ ] = None,
1560
1593
  retries: int = 0,
1561
1594
  retry_delay_seconds: Union[
1562
1595
  float,
@@ -1593,7 +1626,9 @@ def task(
1593
1626
  Callable[["TaskRunContext", Dict[str, Any]], Optional[str]], None
1594
1627
  ] = None,
1595
1628
  cache_expiration: Optional[datetime.timedelta] = None,
1596
- task_run_name: Optional[Union[Callable[[], str], str]] = None,
1629
+ task_run_name: Optional[
1630
+ Union[Callable[[], str], Callable[[Dict[str, Any]], str], str]
1631
+ ] = None,
1597
1632
  retries: Optional[int] = None,
1598
1633
  retry_delay_seconds: Union[
1599
1634
  float, int, List[float], Callable[[int], List[float]], None
@@ -83,7 +83,7 @@ def is_async_fn(
83
83
  while hasattr(func, "__wrapped__"):
84
84
  func = func.__wrapped__
85
85
 
86
- return inspect.iscoroutinefunction(func)
86
+ return asyncio.iscoroutinefunction(func)
87
87
 
88
88
 
89
89
  def is_async_gen_fn(func):
@@ -503,7 +503,7 @@ def propose_state_sync(
503
503
  # Avoid fetching the result unless it is cached, otherwise we defeat
504
504
  # the purpose of disabling `cache_result_in_memory`
505
505
  result = state.result(raise_on_failure=False, fetch=True)
506
- if inspect.isawaitable(result):
506
+ if asyncio.iscoroutine(result):
507
507
  result = run_coro_as_sync(result)
508
508
  elif isinstance(state.data, ResultRecord):
509
509
  result = state.data.result
@@ -685,7 +685,15 @@ def _resolve_custom_flow_run_name(flow: Flow, parameters: Dict[str, Any]) -> str
685
685
 
686
686
  def _resolve_custom_task_run_name(task: Task, parameters: Dict[str, Any]) -> str:
687
687
  if callable(task.task_run_name):
688
- task_run_name = task.task_run_name()
688
+ sig = inspect.signature(task.task_run_name)
689
+
690
+ # If the callable accepts a 'parameters' kwarg, pass the entire parameters dict
691
+ if "parameters" in sig.parameters:
692
+ task_run_name = task.task_run_name(parameters=parameters)
693
+ else:
694
+ # If it doesn't expect parameters, call it without arguments
695
+ task_run_name = task.task_run_name()
696
+
689
697
  if not isinstance(task_run_name, str):
690
698
  raise TypeError(
691
699
  f"Callable {task.task_run_name} for 'task_run_name' returned type"
@@ -870,7 +878,7 @@ def resolve_to_final_result(expr, context):
870
878
  )
871
879
 
872
880
  _result = state.result(raise_on_failure=False, fetch=True)
873
- if inspect.isawaitable(_result):
881
+ if asyncio.iscoroutine(_result):
874
882
  _result = run_coro_as_sync(_result)
875
883
  return _result
876
884
 
@@ -10,7 +10,7 @@ import anyio
10
10
  import httpx
11
11
 
12
12
  from prefect.logging.loggers import get_logger
13
- from prefect.settings import PREFECT_CLIENT_ENABLE_METRICS, PREFECT_CLIENT_METRICS_PORT
13
+ from prefect.settings import PREFECT_CLIENT_METRICS_ENABLED, PREFECT_CLIENT_METRICS_PORT
14
14
  from prefect.utilities.collections import distinct
15
15
  from prefect.utilities.math import clamped_poisson_interval
16
16
 
@@ -160,8 +160,8 @@ _metrics_server: Optional[Tuple[WSGIServer, threading.Thread]] = None
160
160
 
161
161
  def start_client_metrics_server():
162
162
  """Start the process-wide Prometheus metrics server for client metrics (if enabled
163
- with `PREFECT_CLIENT_ENABLE_METRICS`) on the port `PREFECT_CLIENT_METRICS_PORT`."""
164
- if not PREFECT_CLIENT_ENABLE_METRICS:
163
+ with `PREFECT_CLIENT_METRICS_ENABLED`) on the port `PREFECT_CLIENT_METRICS_PORT`."""
164
+ if not PREFECT_CLIENT_METRICS_ENABLED:
165
165
  return
166
166
 
167
167
  global _metrics_server
prefect/workers/base.py CHANGED
@@ -1,5 +1,5 @@
1
1
  import abc
2
- import inspect
2
+ import asyncio
3
3
  import threading
4
4
  from contextlib import AsyncExitStack
5
5
  from functools import partial
@@ -137,6 +137,12 @@ class BaseJobConfiguration(BaseModel):
137
137
  variables = cls._get_base_config_defaults(
138
138
  variables_schema.get("properties", {})
139
139
  )
140
+
141
+ # copy variable defaults for `env` to job config before they're replaced by
142
+ # deployment overrides
143
+ if variables.get("env"):
144
+ job_config["env"] = variables.get("env")
145
+
140
146
  variables.update(values)
141
147
 
142
148
  # deep merge `env`
@@ -1078,7 +1084,7 @@ class BaseWorker(abc.ABC):
1078
1084
  task_status.started()
1079
1085
 
1080
1086
  result = fn(*args, **kwargs)
1081
- if inspect.iscoroutine(result):
1087
+ if asyncio.iscoroutine(result):
1082
1088
  await result
1083
1089
 
1084
1090
  await self._runs_task_group.start(wrapper)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: prefect-client
3
- Version: 3.0.10
3
+ Version: 3.0.11
4
4
  Summary: Workflow orchestration and management.
5
5
  Home-page: https://www.prefect.io
6
6
  Author: Prefect Technologies, Inc.
@@ -47,7 +47,7 @@ Requires-Dist: prometheus-client>=0.20.0
47
47
  Requires-Dist: pydantic<3.0.0,>=2.7
48
48
  Requires-Dist: pydantic-core<3.0.0,>=2.12.0
49
49
  Requires-Dist: pydantic-extra-types<3.0.0,>=2.8.2
50
- Requires-Dist: pydantic-settings
50
+ Requires-Dist: pydantic-settings>2.2.1
51
51
  Requires-Dist: python-dateutil<3.0.0,>=2.8.2
52
52
  Requires-Dist: python-slugify<9.0,>=5.0
53
53
  Requires-Dist: pyyaml<7.0.0,>=5.4.1