prefect-client 3.1.1__py3-none-any.whl → 3.1.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,73 +1,63 @@
1
1
  import asyncio
2
2
  import inspect
3
3
  from functools import wraps
4
- from typing import Any, Callable, Coroutine, Protocol, TypeVar, Union
4
+ from typing import Any, Callable, Coroutine, Optional, TypeVar, Union
5
5
 
6
6
  from typing_extensions import ParamSpec
7
7
 
8
+ from prefect.tasks import Task
9
+
8
10
  R = TypeVar("R")
9
11
  P = ParamSpec("P")
10
12
 
11
13
 
12
- class AsyncDispatchable(Protocol[P, R]):
13
- """Protocol for functions decorated with async_dispatch."""
14
-
15
- def __call__(
16
- self, *args: P.args, **kwargs: P.kwargs
17
- ) -> Union[R, Coroutine[Any, Any, R]]:
18
- ...
19
-
20
- aio: Callable[P, Coroutine[Any, Any, R]]
21
- sync: Callable[P, R]
22
-
23
-
24
14
  def is_in_async_context() -> bool:
25
- """Check if we're in an async context."""
15
+ """
16
+ Returns True if called from within an async context (coroutine or running event loop)
17
+ """
26
18
  try:
27
- # First check if we're in a coroutine
28
- if asyncio.current_task() is not None:
29
- return True
30
-
31
- # Check if we have a loop and it's running
32
- loop = asyncio.get_event_loop()
33
- return loop.is_running()
19
+ asyncio.get_running_loop()
20
+ return True
34
21
  except RuntimeError:
35
22
  return False
36
23
 
37
24
 
25
+ def _is_acceptable_callable(obj: Union[Callable, Task]) -> bool:
26
+ if inspect.iscoroutinefunction(obj):
27
+ return True
28
+ if isinstance(obj, Task) and inspect.iscoroutinefunction(obj.fn):
29
+ return True
30
+ return False
31
+
32
+
38
33
  def async_dispatch(
39
34
  async_impl: Callable[P, Coroutine[Any, Any, R]],
40
- ) -> Callable[[Callable[P, R]], AsyncDispatchable[P, R]]:
35
+ ) -> Callable[[Callable[P, R]], Callable[P, Union[R, Coroutine[Any, Any, R]]]]:
41
36
  """
42
- Decorator that adds async compatibility to a sync function.
43
-
44
- The decorated function will:
45
- - Return a coroutine when in an async context (detected via running event loop)
46
- - Run synchronously when in a sync context
47
- - Provide .aio for explicit async access
48
- - Provide .sync for explicit sync access
37
+ Decorator that dispatches to either sync or async implementation based on context.
49
38
 
50
39
  Args:
51
- async_impl: The async implementation to dispatch to when async execution
52
- is needed
40
+ async_impl: The async implementation to dispatch to when in async context
53
41
  """
54
- if not inspect.iscoroutinefunction(async_impl):
55
- raise TypeError(
56
- "async_impl must be an async function to dispatch in async contexts"
57
- )
58
42
 
59
- def decorator(sync_fn: Callable[P, R]) -> AsyncDispatchable[P, R]:
43
+ def decorator(
44
+ sync_fn: Callable[P, R],
45
+ ) -> Callable[P, Union[R, Coroutine[Any, Any, R]]]:
46
+ if not _is_acceptable_callable(async_impl):
47
+ raise TypeError("async_impl must be an async function")
48
+
60
49
  @wraps(sync_fn)
61
50
  def wrapper(
62
- *args: P.args, **kwargs: P.kwargs
51
+ *args: P.args,
52
+ _sync: Optional[bool] = None, # type: ignore
53
+ **kwargs: P.kwargs,
63
54
  ) -> Union[R, Coroutine[Any, Any, R]]:
64
- if is_in_async_context():
65
- return async_impl(*args, **kwargs)
66
- return sync_fn(*args, **kwargs)
55
+ should_run_sync = _sync if _sync is not None else not is_in_async_context()
56
+
57
+ if should_run_sync:
58
+ return sync_fn(*args, **kwargs)
59
+ return async_impl(*args, **kwargs)
67
60
 
68
- # Attach both async and sync implementations directly
69
- wrapper.aio = async_impl
70
- wrapper.sync = sync_fn
71
61
  return wrapper # type: ignore
72
62
 
73
63
  return decorator
prefect/_version.py CHANGED
@@ -8,11 +8,11 @@ import json
8
8
 
9
9
  version_json = '''
10
10
  {
11
- "date": "2024-11-08T12:38:16-0800",
11
+ "date": "2024-11-19T15:25:34-0600",
12
12
  "dirty": true,
13
13
  "error": null,
14
- "full-revisionid": "6b50a2b9f9d4ebf59703c55e1156c6f79151f1c3",
15
- "version": "3.1.1"
14
+ "full-revisionid": "39b6028cea9f2b0437546ba13cc08bb3bf6d94a4",
15
+ "version": "3.1.3"
16
16
  }
17
17
  ''' # END VERSION_JSON
18
18
 
@@ -494,6 +494,7 @@ class OpsgenieWebhook(AbstractAppriseNotificationBlock):
494
494
  entity=self.entity,
495
495
  batch=self.batch,
496
496
  tags=self.tags,
497
+ action="new",
497
498
  ).url()
498
499
  )
499
500
  self._start_apprise_client(url)
prefect/cache_policies.py CHANGED
@@ -7,6 +7,7 @@ from typing import TYPE_CHECKING, Any, Callable, Dict, List, Literal, Optional,
7
7
  from typing_extensions import Self
8
8
 
9
9
  from prefect.context import TaskRunContext
10
+ from prefect.exceptions import HashError
10
11
  from prefect.utilities.hashing import hash_objects
11
12
 
12
13
  if TYPE_CHECKING:
@@ -223,7 +224,6 @@ class TaskSource(CachePolicy):
223
224
  lines = task_ctx.task.fn.__code__.co_code
224
225
  else:
225
226
  raise
226
-
227
227
  return hash_objects(lines, raise_on_failure=True)
228
228
 
229
229
 
@@ -293,7 +293,18 @@ class Inputs(CachePolicy):
293
293
  if key not in exclude:
294
294
  hashed_inputs[key] = val
295
295
 
296
- return hash_objects(hashed_inputs, raise_on_failure=True)
296
+ try:
297
+ return hash_objects(hashed_inputs, raise_on_failure=True)
298
+ except HashError as exc:
299
+ msg = (
300
+ f"{exc}\n\n"
301
+ "This often occurs when task inputs contain objects that cannot be cached "
302
+ "like locks, file handles, or other system resources.\n\n"
303
+ "To resolve this, you can:\n"
304
+ " 1. Exclude these arguments by defining a custom `cache_key_fn`\n"
305
+ " 2. Disable caching by passing `cache_policy=NONE`\n"
306
+ )
307
+ raise ValueError(msg) from exc
297
308
 
298
309
  def __sub__(self, other: str) -> "CachePolicy":
299
310
  if not isinstance(other, str):
prefect/client/cloud.py CHANGED
@@ -1,5 +1,6 @@
1
1
  import re
2
2
  from typing import Any, Dict, List, Optional, cast
3
+ from uuid import UUID
3
4
 
4
5
  import anyio
5
6
  import httpx
@@ -12,6 +13,7 @@ from prefect.client.base import PrefectHttpxAsyncClient
12
13
  from prefect.client.schemas.objects import (
13
14
  IPAllowlist,
14
15
  IPAllowlistMyAccessResponse,
16
+ KeyValueLabels,
15
17
  Workspace,
16
18
  )
17
19
  from prefect.exceptions import ObjectNotFound, PrefectException
@@ -151,6 +153,25 @@ class CloudClient:
151
153
  response = await self.get(f"{self.account_base_url}/ip_allowlist/my_access")
152
154
  return IPAllowlistMyAccessResponse.model_validate(response)
153
155
 
156
+ async def update_flow_run_labels(
157
+ self, flow_run_id: UUID, labels: KeyValueLabels
158
+ ) -> httpx.Response:
159
+ """
160
+ Update the labels for a flow run.
161
+
162
+ Args:
163
+ flow_run_id: The identifier for the flow run to update.
164
+ labels: A dictionary of labels to update for the flow run.
165
+
166
+ Returns:
167
+ an `httpx.Response` object from the PATCH request
168
+ """
169
+
170
+ return await self._client.patch(
171
+ f"{self.workspace_base_url}/flow_runs/{flow_run_id}/labels",
172
+ json=labels,
173
+ )
174
+
154
175
  async def __aenter__(self):
155
176
  await self._client.__aenter__()
156
177
  return self
@@ -2613,7 +2613,7 @@ class PrefectClient:
2613
2613
  "heartbeat_interval_seconds": heartbeat_interval_seconds,
2614
2614
  }
2615
2615
  if worker_metadata:
2616
- params["worker_metadata"] = worker_metadata.model_dump(mode="json")
2616
+ params["metadata"] = worker_metadata.model_dump(mode="json")
2617
2617
  if get_worker_id:
2618
2618
  params["return_id"] = get_worker_id
2619
2619
 
@@ -23,6 +23,9 @@ from pydantic import (
23
23
  HttpUrl,
24
24
  IPvAnyNetwork,
25
25
  SerializationInfo,
26
+ StrictBool,
27
+ StrictFloat,
28
+ StrictInt,
26
29
  Tag,
27
30
  field_validator,
28
31
  model_serializer,
@@ -68,6 +71,8 @@ if TYPE_CHECKING:
68
71
 
69
72
  R = TypeVar("R", default=Any)
70
73
 
74
+ KeyValueLabels = dict[str, Union[StrictBool, StrictInt, StrictFloat, str]]
75
+
71
76
 
72
77
  DEFAULT_BLOCK_SCHEMA_VERSION = "non-versioned"
73
78
  DEFAULT_AGENT_WORK_POOL_NAME = "default-agent-pool"
@@ -492,6 +497,9 @@ class FlowRunPolicy(PrefectBaseModel):
492
497
  resuming: Optional[bool] = Field(
493
498
  default=False, description="Indicates if this run is resuming from a pause."
494
499
  )
500
+ retry_type: Optional[Literal["in_process", "reschedule"]] = Field(
501
+ default=None, description="The type of retry this run is undergoing."
502
+ )
495
503
 
496
504
  @model_validator(mode="before")
497
505
  @classmethod
@@ -555,6 +563,11 @@ class FlowRun(ObjectBaseModel):
555
563
  description="A list of tags on the flow run",
556
564
  examples=[["tag-1", "tag-2"]],
557
565
  )
566
+ labels: KeyValueLabels = Field(
567
+ default_factory=dict,
568
+ description="Prefect Cloud: A dictionary of key-value labels. Values can be strings, numbers, or booleans.",
569
+ examples=[{"key": "value1", "key2": 42}],
570
+ )
558
571
  parent_task_run_id: Optional[UUID] = Field(
559
572
  default=None,
560
573
  description=(
prefect/events/filters.py CHANGED
@@ -83,17 +83,18 @@ class EventOccurredFilter(EventDataFilter):
83
83
 
84
84
  class EventNameFilter(EventDataFilter):
85
85
  prefix: Optional[List[str]] = Field(
86
- None, description="Only include events matching one of these prefixes"
86
+ default=None, description="Only include events matching one of these prefixes"
87
87
  )
88
88
  exclude_prefix: Optional[List[str]] = Field(
89
- None, description="Exclude events matching one of these prefixes"
89
+ default=None, description="Exclude events matching one of these prefixes"
90
90
  )
91
91
 
92
92
  name: Optional[List[str]] = Field(
93
- None, description="Only include events matching one of these names exactly"
93
+ default=None,
94
+ description="Only include events matching one of these names exactly",
94
95
  )
95
96
  exclude_name: Optional[List[str]] = Field(
96
- None, description="Exclude events matching one of these names exactly"
97
+ default=None, description="Exclude events matching one of these names exactly"
97
98
  )
98
99
 
99
100
  def includes(self, event: Event) -> bool:
@@ -230,17 +231,20 @@ class EventFilter(EventDataFilter):
230
231
  description="Filter criteria for when the events occurred",
231
232
  )
232
233
  event: Optional[EventNameFilter] = Field(
233
- None,
234
+ default=None,
234
235
  description="Filter criteria for the event name",
235
236
  )
236
237
  any_resource: Optional[EventAnyResourceFilter] = Field(
237
- None, description="Filter criteria for any resource involved in the event"
238
+ default=None,
239
+ description="Filter criteria for any resource involved in the event",
238
240
  )
239
241
  resource: Optional[EventResourceFilter] = Field(
240
- None, description="Filter criteria for the resource of the event"
242
+ default=None,
243
+ description="Filter criteria for the resource of the event",
241
244
  )
242
245
  related: Optional[EventRelatedFilter] = Field(
243
- None, description="Filter criteria for the related resources of the event"
246
+ default=None,
247
+ description="Filter criteria for the related resources of the event",
244
248
  )
245
249
  id: EventIDFilter = Field(
246
250
  default_factory=lambda: EventIDFilter(id=[]),
@@ -248,6 +252,6 @@ class EventFilter(EventDataFilter):
248
252
  )
249
253
 
250
254
  order: EventOrder = Field(
251
- EventOrder.DESC,
255
+ default=EventOrder.DESC,
252
256
  description="The order to return filtered events",
253
257
  )
prefect/exceptions.py CHANGED
@@ -443,3 +443,7 @@ class ProfileSettingsValidationError(PrefectException):
443
443
 
444
444
  def __init__(self, errors: List[Tuple[Any, ValidationError]]) -> None:
445
445
  self.errors = errors
446
+
447
+
448
+ class HashError(PrefectException):
449
+ """Raised when hashing objects fails"""
prefect/flow_engine.py CHANGED
@@ -22,8 +22,11 @@ from typing import (
22
22
  )
23
23
  from uuid import UUID
24
24
 
25
+ from opentelemetry import trace
26
+ from opentelemetry.trace import Tracer, get_tracer
25
27
  from typing_extensions import ParamSpec
26
28
 
29
+ import prefect
27
30
  from prefect import Task
28
31
  from prefect.client.orchestration import SyncPrefectClient, get_client
29
32
  from prefect.client.schemas import FlowRun, TaskRun
@@ -124,6 +127,10 @@ class FlowRunEngine(Generic[P, R]):
124
127
  _client: Optional[SyncPrefectClient] = None
125
128
  short_circuit: bool = False
126
129
  _flow_run_name_set: bool = False
130
+ _tracer: Tracer = field(
131
+ default_factory=lambda: get_tracer("prefect", prefect.__version__)
132
+ )
133
+ _span: Optional[trace.Span] = None
127
134
 
128
135
  def __post_init__(self):
129
136
  if self.flow is None and self.flow_run_id is None:
@@ -233,6 +240,17 @@ class FlowRunEngine(Generic[P, R]):
233
240
  self.flow_run.state = state # type: ignore
234
241
  self.flow_run.state_name = state.name # type: ignore
235
242
  self.flow_run.state_type = state.type # type: ignore
243
+
244
+ if self._span:
245
+ self._span.add_event(
246
+ state.name,
247
+ {
248
+ "prefect.state.message": state.message or "",
249
+ "prefect.state.type": state.type,
250
+ "prefect.state.name": state.name or state.type,
251
+ "prefect.state.id": str(state.id),
252
+ },
253
+ )
236
254
  return state
237
255
 
238
256
  def result(self, raise_on_failure: bool = True) -> "Union[R, State, None]":
@@ -281,6 +299,9 @@ class FlowRunEngine(Generic[P, R]):
281
299
  )
282
300
  self.set_state(terminal_state)
283
301
  self._return_value = resolved_result
302
+
303
+ self._end_span_on_success()
304
+
284
305
  return result
285
306
 
286
307
  def handle_exception(
@@ -311,6 +332,9 @@ class FlowRunEngine(Generic[P, R]):
311
332
  )
312
333
  state = self.set_state(Running())
313
334
  self._raised = exc
335
+
336
+ self._end_span_on_error(exc, state.message)
337
+
314
338
  return state
315
339
 
316
340
  def handle_timeout(self, exc: TimeoutError) -> None:
@@ -329,6 +353,8 @@ class FlowRunEngine(Generic[P, R]):
329
353
  self.set_state(state)
330
354
  self._raised = exc
331
355
 
356
+ self._end_span_on_error(exc, message)
357
+
332
358
  def handle_crash(self, exc: BaseException) -> None:
333
359
  state = run_coro_as_sync(exception_to_crashed_state(exc))
334
360
  self.logger.error(f"Crash detected! {state.message}")
@@ -336,6 +362,23 @@ class FlowRunEngine(Generic[P, R]):
336
362
  self.set_state(state, force=True)
337
363
  self._raised = exc
338
364
 
365
+ self._end_span_on_error(exc, state.message)
366
+
367
+ def _end_span_on_success(self):
368
+ if not self._span:
369
+ return
370
+ self._span.set_status(trace.Status(trace.StatusCode.OK))
371
+ self._span.end(time.time_ns())
372
+ self._span = None
373
+
374
+ def _end_span_on_error(self, exc: BaseException, description: Optional[str]):
375
+ if not self._span:
376
+ return
377
+ self._span.record_exception(exc)
378
+ self._span.set_status(trace.Status(trace.StatusCode.ERROR, description))
379
+ self._span.end(time.time_ns())
380
+ self._span = None
381
+
339
382
  def load_subflow_run(
340
383
  self,
341
384
  parent_task_run: TaskRun,
@@ -578,6 +621,18 @@ class FlowRunEngine(Generic[P, R]):
578
621
  flow_version=self.flow.version,
579
622
  empirical_policy=self.flow_run.empirical_policy,
580
623
  )
624
+
625
+ self._span = self._tracer.start_span(
626
+ name=self.flow_run.name,
627
+ attributes={
628
+ **self.flow_run.labels,
629
+ "prefect.run.type": "flow",
630
+ "prefect.run.id": str(self.flow_run.id),
631
+ "prefect.tags": self.flow_run.tags,
632
+ "prefect.flow.name": self.flow.name,
633
+ },
634
+ )
635
+
581
636
  try:
582
637
  yield self
583
638
 
@@ -632,7 +687,7 @@ class FlowRunEngine(Generic[P, R]):
632
687
 
633
688
  @contextmanager
634
689
  def start(self) -> Generator[None, None, None]:
635
- with self.initialize_run():
690
+ with self.initialize_run(), trace.use_span(self._span):
636
691
  self.begin_run()
637
692
 
638
693
  if self.state.is_running():
@@ -32,7 +32,6 @@ from prefect.settings import (
32
32
  PREFECT_LOGGING_TO_API_BATCH_SIZE,
33
33
  PREFECT_LOGGING_TO_API_MAX_LOG_SIZE,
34
34
  PREFECT_LOGGING_TO_API_WHEN_MISSING_FLOW,
35
- get_current_settings,
36
35
  )
37
36
 
38
37
 
@@ -241,10 +240,12 @@ class APILogHandler(logging.Handler):
241
240
 
242
241
  class WorkerAPILogHandler(APILogHandler):
243
242
  def emit(self, record: logging.LogRecord):
244
- if get_current_settings().experiments.worker_logging_to_api_enabled:
245
- super().emit(record)
246
- else:
243
+ # Open-source API servers do not currently support worker logs, and
244
+ # worker logs only have an associated worker ID when connected to Cloud,
245
+ # so we won't send worker logs to the API unless they have a worker ID.
246
+ if not getattr(record, "worker_id", None):
247
247
  return
248
+ super().emit(record)
248
249
 
249
250
  def prepare(self, record: logging.LogRecord) -> Dict[str, Any]:
250
251
  """
prefect/settings/base.py CHANGED
@@ -17,6 +17,7 @@ from pydantic_settings import (
17
17
 
18
18
  from prefect.settings.sources import (
19
19
  EnvFilterSettingsSource,
20
+ FilteredDotEnvSettingsSource,
20
21
  PrefectTomlConfigSettingsSource,
21
22
  ProfileSettingsTomlLoader,
22
23
  PyprojectTomlConfigSettingsSource,
@@ -43,13 +44,14 @@ class PrefectBaseSettings(BaseSettings):
43
44
  See https://docs.pydantic.dev/latest/concepts/pydantic_settings/#customise-settings-sources
44
45
  """
45
46
  env_filter = set()
46
- for field in settings_cls.model_fields.values():
47
+ for field_name, field in settings_cls.model_fields.items():
47
48
  if field.validation_alias is not None and isinstance(
48
49
  field.validation_alias, AliasChoices
49
50
  ):
50
51
  for alias in field.validation_alias.choices:
51
52
  if isinstance(alias, AliasPath) and len(alias.path) > 0:
52
53
  env_filter.add(alias.path[0])
54
+ env_filter.add(field_name)
53
55
  return (
54
56
  init_settings,
55
57
  EnvFilterSettingsSource(
@@ -62,7 +64,18 @@ class PrefectBaseSettings(BaseSettings):
62
64
  env_parse_enums=cls.model_config.get("env_parse_enums"),
63
65
  env_filter=list(env_filter),
64
66
  ),
65
- dotenv_settings,
67
+ FilteredDotEnvSettingsSource(
68
+ settings_cls,
69
+ env_file=cls.model_config.get("env_file"),
70
+ env_file_encoding=cls.model_config.get("env_file_encoding"),
71
+ case_sensitive=cls.model_config.get("case_sensitive"),
72
+ env_prefix=cls.model_config.get("env_prefix"),
73
+ env_nested_delimiter=cls.model_config.get("env_nested_delimiter"),
74
+ env_ignore_empty=cls.model_config.get("env_ignore_empty"),
75
+ env_parse_none_str=cls.model_config.get("env_parse_none_str"),
76
+ env_parse_enums=cls.model_config.get("env_parse_enums"),
77
+ env_blacklist=list(env_filter),
78
+ ),
66
79
  file_secret_settings,
67
80
  PrefectTomlConfigSettingsSource(settings_cls),
68
81
  PyprojectTomlConfigSettingsSource(settings_cls),
@@ -92,7 +105,7 @@ class PrefectBaseSettings(BaseSettings):
92
105
  elif (value := env.get(key)) is not None:
93
106
  env_variables[
94
107
  f"{self.model_config.get('env_prefix')}{key.upper()}"
95
- ] = str(value)
108
+ ] = _to_environment_variable_value(value)
96
109
  return env_variables
97
110
 
98
111
  @model_serializer(
@@ -191,3 +204,9 @@ def _build_settings_config(
191
204
  pyproject_toml_table_header=("tool", "prefect", *path),
192
205
  json_schema_extra=_add_environment_variables,
193
206
  )
207
+
208
+
209
+ def _to_environment_variable_value(value: Any) -> str:
210
+ if isinstance(value, (list, set, tuple)):
211
+ return ",".join(str(v) for v in value)
212
+ return str(value)
@@ -18,11 +18,6 @@ class ExperimentsSettings(PrefectBaseSettings):
18
18
  ),
19
19
  )
20
20
 
21
- worker_logging_to_api_enabled: bool = Field(
22
- default=False,
23
- description="Enables the logging of worker logs to Prefect Cloud.",
24
- )
25
-
26
21
  telemetry_enabled: bool = Field(
27
22
  default=False,
28
23
  description="Enables sending telemetry to Prefect Cloud.",
@@ -10,10 +10,15 @@ from pydantic import AliasChoices
10
10
  from pydantic.fields import FieldInfo
11
11
  from pydantic_settings import (
12
12
  BaseSettings,
13
+ DotEnvSettingsSource,
13
14
  EnvSettingsSource,
14
15
  PydanticBaseSettingsSource,
15
16
  )
16
- from pydantic_settings.sources import ConfigFileSourceMixin
17
+ from pydantic_settings.sources import (
18
+ ENV_FILE_SENTINEL,
19
+ ConfigFileSourceMixin,
20
+ DotenvType,
21
+ )
17
22
 
18
23
  from prefect.settings.constants import DEFAULT_PREFECT_HOME, DEFAULT_PROFILES_PATH
19
24
  from prefect.utilities.collections import get_from_dict
@@ -61,6 +66,44 @@ class EnvFilterSettingsSource(EnvSettingsSource):
61
66
  }
62
67
 
63
68
 
69
+ class FilteredDotEnvSettingsSource(DotEnvSettingsSource):
70
+ def __init__(
71
+ self,
72
+ settings_cls: type[BaseSettings],
73
+ env_file: Optional[DotenvType] = ENV_FILE_SENTINEL,
74
+ env_file_encoding: Optional[str] = None,
75
+ case_sensitive: Optional[bool] = None,
76
+ env_prefix: Optional[str] = None,
77
+ env_nested_delimiter: Optional[str] = None,
78
+ env_ignore_empty: Optional[bool] = None,
79
+ env_parse_none_str: Optional[str] = None,
80
+ env_parse_enums: Optional[bool] = None,
81
+ env_blacklist: Optional[List[str]] = None,
82
+ ) -> None:
83
+ super().__init__(
84
+ settings_cls,
85
+ env_file,
86
+ env_file_encoding,
87
+ case_sensitive,
88
+ env_prefix,
89
+ env_nested_delimiter,
90
+ env_ignore_empty,
91
+ env_parse_none_str,
92
+ env_parse_enums,
93
+ )
94
+ self.env_blacklist = env_blacklist
95
+ if self.env_blacklist:
96
+ if isinstance(self.env_vars, dict):
97
+ for key in self.env_blacklist:
98
+ self.env_vars.pop(key, None)
99
+ else:
100
+ self.env_vars = {
101
+ key: value
102
+ for key, value in self.env_vars.items() # type: ignore
103
+ if key.lower() not in env_blacklist
104
+ }
105
+
106
+
64
107
  class ProfileSettingsTomlLoader(PydanticBaseSettingsSource):
65
108
  """
66
109
  Custom pydantic settings source to load profile settings from a toml file.
@@ -111,21 +154,34 @@ class ProfileSettingsTomlLoader(PydanticBaseSettingsSource):
111
154
  ) -> Tuple[Any, str, bool]:
112
155
  """Concrete implementation to get the field value from the profile settings"""
113
156
  if field.validation_alias:
157
+ # Use validation alias as the key to ensure profile value does not
158
+ # higher priority sources. Lower priority sources that use the
159
+ # field name can override higher priority sources that use the
160
+ # validation alias as seen in https://github.com/PrefectHQ/prefect/issues/15981
114
161
  if isinstance(field.validation_alias, str):
115
162
  value = self.profile_settings.get(field.validation_alias.upper())
116
163
  if value is not None:
117
- return value, field_name, self.field_is_complex(field)
164
+ return value, field.validation_alias, self.field_is_complex(field)
118
165
  elif isinstance(field.validation_alias, AliasChoices):
166
+ value = None
167
+ lowest_priority_alias = next(
168
+ choice
169
+ for choice in reversed(field.validation_alias.choices)
170
+ if isinstance(choice, str)
171
+ )
119
172
  for alias in field.validation_alias.choices:
120
173
  if not isinstance(alias, str):
121
174
  continue
122
175
  value = self.profile_settings.get(alias.upper())
123
176
  if value is not None:
124
- return value, field_name, self.field_is_complex(field)
125
-
126
- value = self.profile_settings.get(
127
- f"{self.config.get('env_prefix','')}{field_name.upper()}"
128
- )
177
+ return (
178
+ value,
179
+ lowest_priority_alias,
180
+ self.field_is_complex(field),
181
+ )
182
+
183
+ name = f"{self.config.get('env_prefix','')}{field_name.upper()}"
184
+ value = self.profile_settings.get(name)
129
185
  return value, field_name, self.field_is_complex(field)
130
186
 
131
187
  def __call__(self) -> Dict[str, Any]:
@@ -164,7 +220,22 @@ class TomlConfigSettingsSourceBase(PydanticBaseSettingsSource, ConfigFileSourceM
164
220
  # if the value is a dict, it is likely a nested settings object and a nested
165
221
  # source will handle it
166
222
  value = None
167
- return value, field_name, self.field_is_complex(field)
223
+ name = field_name
224
+ # Use validation alias as the key to ensure profile value does not
225
+ # higher priority sources. Lower priority sources that use the
226
+ # field name can override higher priority sources that use the
227
+ # validation alias as seen in https://github.com/PrefectHQ/prefect/issues/15981
228
+ if value is not None:
229
+ if field.validation_alias and isinstance(field.validation_alias, str):
230
+ name = field.validation_alias
231
+ elif field.validation_alias and isinstance(
232
+ field.validation_alias, AliasChoices
233
+ ):
234
+ for alias in reversed(field.validation_alias.choices):
235
+ if isinstance(alias, str):
236
+ name = alias
237
+ break
238
+ return value, name, self.field_is_complex(field)
168
239
 
169
240
  def __call__(self) -> Dict[str, Any]:
170
241
  """Called by pydantic to get the settings from our custom source"""