prefect-client 3.4.5.dev4__py3-none-any.whl → 3.4.6.dev1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
prefect/_build_info.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # Generated by versioningit
2
- __version__ = "3.4.5.dev4"
3
- __build_date__ = "2025-06-05 08:09:20.805818+00:00"
4
- __git_commit__ = "92cd0a62e663cace1fdbb654f3533c368622d8c6"
2
+ __version__ = "3.4.6.dev1"
3
+ __build_date__ = "2025-06-10 08:09:30.415238+00:00"
4
+ __git_commit__ = "cd3c98b5dbb76efc127c1ee052df55873c1723dc"
5
5
  __dirty__ = False
@@ -0,0 +1,4 @@
1
+ from prefect.assets.core import Asset, AssetProperties, add_asset_metadata
2
+ from prefect.assets.materialize import materialize
3
+
4
+ __all__ = ["Asset", "AssetProperties", "materialize", "add_asset_metadata"]
prefect/assets/core.py ADDED
@@ -0,0 +1,75 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Any, ClassVar, Optional
4
+
5
+ from pydantic import ConfigDict, Field
6
+
7
+ from prefect._internal.schemas.bases import PrefectBaseModel
8
+ from prefect.types import URILike
9
+
10
+
11
+ class AssetProperties(PrefectBaseModel):
12
+ """
13
+ Metadata properties to configure on an Asset
14
+ """
15
+
16
+ model_config: ClassVar[ConfigDict] = ConfigDict(frozen=True)
17
+
18
+ name: Optional[str] = Field(
19
+ default=None, description="Human readable name of the Asset."
20
+ )
21
+ url: Optional[str] = Field(
22
+ default=None, description="Visitable url to view the Asset."
23
+ )
24
+ description: Optional[str] = Field(
25
+ default=None, description="Description of the Asset."
26
+ )
27
+ owners: Optional[list[str]] = Field(
28
+ default=None, description="Owners of the Asset."
29
+ )
30
+
31
+
32
+ class Asset(PrefectBaseModel):
33
+ """
34
+ Assets are objects that represent materialized data,
35
+ providing a way to track lineage and dependencies.
36
+ """
37
+
38
+ model_config: ClassVar[ConfigDict] = ConfigDict(frozen=True)
39
+
40
+ key: URILike
41
+ properties: Optional[AssetProperties] = Field(
42
+ default=None,
43
+ description="Properties of the asset. "
44
+ "Setting this will overwrite properties of a known asset.",
45
+ )
46
+
47
+ def __repr__(self) -> str:
48
+ return f"Asset(key={self.key!r})"
49
+
50
+ def __hash__(self) -> int:
51
+ return hash(self.key)
52
+
53
+ def add_metadata(self, metadata: dict[str, Any]) -> None:
54
+ from prefect.context import AssetContext
55
+
56
+ asset_ctx = AssetContext.get()
57
+ if not asset_ctx:
58
+ raise RuntimeError(
59
+ "Unable add Asset metadata when not inside of an AssetContext"
60
+ )
61
+
62
+ asset_ctx.add_asset_metadata(self.key, metadata)
63
+
64
+
65
+ def add_asset_metadata(asset: str | Asset, metadata: dict[str, Any]) -> None:
66
+ from prefect.context import AssetContext
67
+
68
+ asset_ctx = AssetContext.get()
69
+ if not asset_ctx:
70
+ raise RuntimeError(
71
+ "Unable to call `add_asset_metadata` when not inside of an AssetContext"
72
+ )
73
+
74
+ asset_key = asset if isinstance(asset, str) else asset.key
75
+ asset_ctx.add_asset_metadata(asset_key, metadata)
@@ -0,0 +1,42 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING, Callable, TypeVar, Union
4
+
5
+ from typing_extensions import ParamSpec, Unpack
6
+
7
+ from .core import Asset
8
+
9
+ T = TypeVar("T")
10
+ P = ParamSpec("P")
11
+ R = TypeVar("R")
12
+
13
+ if TYPE_CHECKING:
14
+ from prefect.tasks import MaterializingTask, TaskOptions
15
+
16
+
17
+ def materialize(
18
+ *assets: Union[str, Asset],
19
+ by: str | None = None,
20
+ **task_kwargs: Unpack[TaskOptions],
21
+ ) -> Callable[[Callable[P, R]], MaterializingTask[P, R]]:
22
+ """
23
+ Decorator for materializing assets.
24
+
25
+ Args:
26
+ *assets: Assets to materialize
27
+ by: An optional tool that is ultimately responsible for materializing the asset e.g. "dbt" or "spark"
28
+ **task_kwargs: Additional task configuration
29
+ """
30
+ if not assets:
31
+ raise TypeError(
32
+ "materialize requires at least one asset argument, e.g. `@materialize(asset)`"
33
+ )
34
+
35
+ from prefect.tasks import MaterializingTask
36
+
37
+ def decorator(fn: Callable[P, R]) -> MaterializingTask[P, R]:
38
+ return MaterializingTask(
39
+ fn=fn, assets=assets, materialized_by=by, **task_kwargs
40
+ )
41
+
42
+ return decorator
prefect/context.py CHANGED
@@ -7,13 +7,23 @@ For more user-accessible information about the current run, see [`prefect.runtim
7
7
  """
8
8
 
9
9
  import asyncio
10
+ import json
10
11
  import os
11
12
  import sys
12
13
  import warnings
13
14
  from collections.abc import AsyncGenerator, Generator, Mapping
14
15
  from contextlib import ExitStack, asynccontextmanager, contextmanager
15
16
  from contextvars import ContextVar, Token
16
- from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TypeVar, Union
17
+ from typing import (
18
+ TYPE_CHECKING,
19
+ Any,
20
+ Callable,
21
+ ClassVar,
22
+ Optional,
23
+ TypeVar,
24
+ Union,
25
+ )
26
+ from uuid import UUID
17
27
 
18
28
  from pydantic import BaseModel, ConfigDict, Field, PrivateAttr
19
29
  from typing_extensions import Self
@@ -21,6 +31,7 @@ from typing_extensions import Self
21
31
  import prefect.settings
22
32
  import prefect.types._datetime
23
33
  from prefect._internal.compatibility.migration import getattr_migration
34
+ from prefect.assets import Asset
24
35
  from prefect.client.orchestration import PrefectClient, SyncPrefectClient, get_client
25
36
  from prefect.client.schemas import FlowRun, TaskRun
26
37
  from prefect.events.worker import EventsWorker
@@ -48,9 +59,15 @@ if TYPE_CHECKING:
48
59
  from prefect.tasks import Task
49
60
 
50
61
 
51
- def serialize_context() -> dict[str, Any]:
62
+ def serialize_context(
63
+ asset_ctx_kwargs: Union[dict[str, Any], None] = None,
64
+ ) -> dict[str, Any]:
52
65
  """
53
66
  Serialize the current context for use in a remote execution environment.
67
+
68
+ Optionally provide asset_ctx_kwargs to create new AssetContext, that will be used
69
+ in the remote execution environment. This is useful for TaskRunners, who rely on creating the
70
+ task run in the remote environment.
54
71
  """
55
72
  flow_run_context = EngineContext.get()
56
73
  task_run_context = TaskRunContext.get()
@@ -62,6 +79,11 @@ def serialize_context() -> dict[str, Any]:
62
79
  "task_run_context": task_run_context.serialize() if task_run_context else {},
63
80
  "tags_context": tags_context.serialize() if tags_context else {},
64
81
  "settings_context": settings_context.serialize() if settings_context else {},
82
+ "asset_context": AssetContext.from_task_and_inputs(
83
+ **asset_ctx_kwargs
84
+ ).serialize()
85
+ if asset_ctx_kwargs
86
+ else {},
65
87
  }
66
88
 
67
89
 
@@ -112,6 +134,9 @@ def hydrated_context(
112
134
  # Set up tags context
113
135
  if tags_context := serialized_context.get("tags_context"):
114
136
  stack.enter_context(tags(*tags_context["current_tags"]))
137
+ # Set up asset context
138
+ if asset_context := serialized_context.get("asset_context"):
139
+ stack.enter_context(AssetContext(**asset_context))
115
140
  yield
116
141
 
117
142
 
@@ -373,6 +398,10 @@ class EngineContext(RunContext):
373
398
  # Holds the ID of the object returned by the task run and task run state
374
399
  task_run_results: dict[int, State] = Field(default_factory=dict)
375
400
 
401
+ # Tracking information needed to track asset linage between
402
+ # tasks and materialization
403
+ task_run_assets: dict[UUID, set[Asset]] = Field(default_factory=dict)
404
+
376
405
  # Events worker to emit events
377
406
  events: Optional[EventsWorker] = None
378
407
 
@@ -443,6 +472,214 @@ class TaskRunContext(RunContext):
443
472
  )
444
473
 
445
474
 
475
+ class AssetContext(ContextModel):
476
+ """
477
+ The asset context for a materializing task run. Contains all asset-related information needed
478
+ for asset event emission and downstream asset dependency propagation.
479
+
480
+ Attributes:
481
+ direct_asset_dependencies: Assets that this task directly depends on (from task.asset_deps)
482
+ downstream_assets: Assets that this task will create/materialize (from MaterializingTask.assets)
483
+ upstream_assets: Assets from upstream task dependencies
484
+ materialized_by: Tool that materialized the assets (from MaterializingTask.materialized_by)
485
+ task_run_id: ID of the associated task run
486
+ materialization_metadata: Metadata for materialized assets
487
+ """
488
+
489
+ direct_asset_dependencies: set[Asset] = Field(default_factory=set)
490
+ downstream_assets: set[Asset] = Field(default_factory=set)
491
+ upstream_assets: set[Asset] = Field(default_factory=set)
492
+ materialized_by: Optional[str] = None
493
+ task_run_id: Optional[UUID] = None
494
+ materialization_metadata: dict[str, dict[str, Any]] = Field(default_factory=dict)
495
+
496
+ __var__: ClassVar[ContextVar[Self]] = ContextVar("asset_context")
497
+
498
+ @classmethod
499
+ def from_task_and_inputs(
500
+ cls,
501
+ task: "Task[Any, Any]",
502
+ task_run_id: UUID,
503
+ task_inputs: Optional[dict[str, set[Any]]] = None,
504
+ ) -> "AssetContext":
505
+ """
506
+ Create an AssetContext from a task and its resolved inputs.
507
+
508
+ Args:
509
+ task: The task instance
510
+ task_run_id: The task run ID
511
+ task_inputs: The resolved task inputs (TaskRunResult objects)
512
+
513
+ Returns:
514
+ Configured AssetContext
515
+ """
516
+ from prefect.client.schemas import TaskRunResult
517
+ from prefect.tasks import MaterializingTask
518
+
519
+ upstream_assets: set[Asset] = set()
520
+
521
+ # Get upstream assets from engine context instead of TaskRunResult.assets
522
+ flow_ctx = FlowRunContext.get()
523
+ if task_inputs and flow_ctx:
524
+ for inputs in task_inputs.values():
525
+ for task_input in inputs:
526
+ if isinstance(task_input, TaskRunResult):
527
+ # Look up assets in the engine context
528
+ task_assets = flow_ctx.task_run_assets.get(task_input.id)
529
+ if task_assets:
530
+ upstream_assets.update(task_assets)
531
+
532
+ ctx = cls(
533
+ direct_asset_dependencies=set(task.asset_deps)
534
+ if task.asset_deps
535
+ else set(),
536
+ downstream_assets=set(task.assets)
537
+ if isinstance(task, MaterializingTask) and task.assets
538
+ else set(),
539
+ upstream_assets=upstream_assets,
540
+ materialized_by=task.materialized_by
541
+ if isinstance(task, MaterializingTask)
542
+ else None,
543
+ task_run_id=task_run_id,
544
+ )
545
+ ctx.update_tracked_assets()
546
+
547
+ return ctx
548
+
549
+ def add_asset_metadata(self, asset_key: str, metadata: dict[str, Any]) -> None:
550
+ """
551
+ Add metadata for a materialized asset.
552
+
553
+ Args:
554
+ asset_key: The asset key
555
+ metadata: Metadata dictionary to add
556
+
557
+ Raises:
558
+ ValueError: If asset_key is not in downstream_assets
559
+ """
560
+ downstream_keys = {asset.key for asset in self.downstream_assets}
561
+ if asset_key not in downstream_keys:
562
+ raise ValueError(
563
+ "Can only add metadata to assets that are arguments to @materialize"
564
+ )
565
+
566
+ existing = self.materialization_metadata.get(asset_key, {})
567
+ self.materialization_metadata[asset_key] = existing | metadata
568
+
569
+ @staticmethod
570
+ def asset_as_resource(asset: Asset) -> dict[str, str]:
571
+ """Convert Asset to event resource format."""
572
+ resource = {"prefect.resource.id": asset.key}
573
+
574
+ if asset.properties:
575
+ properties_dict = asset.properties.model_dump(exclude_unset=True)
576
+
577
+ if "name" in properties_dict:
578
+ resource["prefect.resource.name"] = properties_dict["name"]
579
+
580
+ if "description" in properties_dict:
581
+ resource["prefect.asset.description"] = properties_dict["description"]
582
+
583
+ if "url" in properties_dict:
584
+ resource["prefect.asset.url"] = properties_dict["url"]
585
+
586
+ if "owners" in properties_dict:
587
+ resource["prefect.asset.owners"] = json.dumps(properties_dict["owners"])
588
+
589
+ return resource
590
+
591
+ @staticmethod
592
+ def asset_as_related(asset: Asset) -> dict[str, str]:
593
+ """Convert Asset to event related format."""
594
+ return {
595
+ "prefect.resource.id": asset.key,
596
+ "prefect.resource.role": "asset",
597
+ }
598
+
599
+ @staticmethod
600
+ def related_materialized_by(by: str) -> dict[str, str]:
601
+ """Create a related resource for the tool that performed the materialization"""
602
+ return {
603
+ "prefect.resource.id": by,
604
+ "prefect.resource.role": "asset-materialized-by",
605
+ }
606
+
607
+ def emit_events(self, state: State) -> None:
608
+ """
609
+ Emit asset events
610
+ """
611
+
612
+ from prefect.events import emit_event
613
+
614
+ if state.name == "Cached":
615
+ return
616
+ elif state.is_failed():
617
+ event_status = "failed"
618
+ elif state.is_completed():
619
+ event_status = "succeeded"
620
+ else:
621
+ return
622
+
623
+ # If we have no downstream assets, this not a materialization
624
+ if not self.downstream_assets:
625
+ return
626
+
627
+ # Emit reference events for all upstream assets (direct + inherited)
628
+ all_upstream_assets = self.upstream_assets | self.direct_asset_dependencies
629
+ for asset in all_upstream_assets:
630
+ emit_event(
631
+ event="prefect.asset.referenced",
632
+ resource=self.asset_as_resource(asset),
633
+ related=[],
634
+ )
635
+
636
+ # Emit materialization events for downstream assets
637
+ upstream_related = [self.asset_as_related(a) for a in all_upstream_assets]
638
+
639
+ if self.materialized_by:
640
+ upstream_related.append(self.related_materialized_by(self.materialized_by))
641
+
642
+ for asset in self.downstream_assets:
643
+ emit_event(
644
+ event=f"prefect.asset.materialization.{event_status}",
645
+ resource=self.asset_as_resource(asset),
646
+ related=upstream_related,
647
+ payload=self.materialization_metadata.get(asset.key),
648
+ )
649
+
650
+ def update_tracked_assets(self) -> None:
651
+ """
652
+ Update the flow run context with assets that should be propagated downstream.
653
+ """
654
+ if not (flow_run_context := FlowRunContext.get()):
655
+ return
656
+
657
+ if not self.task_run_id:
658
+ return
659
+
660
+ if self.downstream_assets:
661
+ # MaterializingTask: propagate the downstream assets (what we create)
662
+ assets_for_downstream = set(self.downstream_assets)
663
+ else:
664
+ # Regular task: propagate upstream assets + direct dependencies
665
+ assets_for_downstream = set(
666
+ self.upstream_assets | self.direct_asset_dependencies
667
+ )
668
+
669
+ flow_run_context.task_run_assets[self.task_run_id] = assets_for_downstream
670
+
671
+ def serialize(self: Self, include_secrets: bool = True) -> dict[str, Any]:
672
+ """Serialize the AssetContext for distributed execution."""
673
+ return self.model_dump(
674
+ # use json serialization so fields that are
675
+ # sets of pydantic models are serialized
676
+ mode="json",
677
+ exclude_unset=True,
678
+ serialize_as_any=True,
679
+ context={"include_secrets": include_secrets},
680
+ )
681
+
682
+
446
683
  class TagsContext(ContextModel):
447
684
  """
448
685
  The context for `prefect.tags` management.
@@ -1222,14 +1222,14 @@ async def deploy(
1222
1222
  " or specify a remote storage location for the flow with `.from_source`."
1223
1223
  " If you are attempting to deploy a flow to a local process work pool,"
1224
1224
  " consider using `flow.serve` instead. See the documentation for more"
1225
- " information: https://docs.prefect.io/latest/deploy/run-flows-in-local-processes"
1225
+ " information: https://docs.prefect.io/latest/how-to-guides/deployments/run-flows-in-local-processes"
1226
1226
  )
1227
1227
  elif work_pool.type == "process" and not ignore_warnings:
1228
1228
  console.print(
1229
1229
  "Looks like you're deploying to a process work pool. If you're creating a"
1230
1230
  " deployment for local development, calling `.serve` on your flow is a great"
1231
1231
  " way to get started. See the documentation for more information:"
1232
- " https://docs.prefect.io/latest/deploy/run-flows-in-local-processes "
1232
+ " https://docs.prefect.io/latest/how-to-guides/deployments/run-flows-in-local-processes "
1233
1233
  " Set `ignore_warnings=True` to suppress this message.",
1234
1234
  style="yellow",
1235
1235
  )
prefect/events/clients.py CHANGED
@@ -628,7 +628,7 @@ class PrefectEventSubscriber:
628
628
  try:
629
629
  await self._reconnect()
630
630
  finally:
631
- EVENT_WEBSOCKET_CONNECTIONS.labels(self.client_name, "out", "initial")
631
+ EVENT_WEBSOCKET_CONNECTIONS.labels(self.client_name, "out", "initial").inc()
632
632
  return self
633
633
 
634
634
  async def _reconnect(self) -> None:
@@ -709,7 +709,7 @@ class PrefectEventSubscriber:
709
709
  finally:
710
710
  EVENT_WEBSOCKET_CONNECTIONS.labels(
711
711
  self.client_name, "out", "reconnect"
712
- )
712
+ ).inc()
713
713
  assert self._websocket
714
714
 
715
715
  while True:
prefect/runner/server.py CHANGED
@@ -257,7 +257,7 @@ def _build_generic_endpoint_for_flows(
257
257
  @deprecated_callable(
258
258
  start_date=datetime(2025, 4, 1),
259
259
  end_date=datetime(2025, 10, 1),
260
- help="Use background tasks (https://docs.prefect.io/v3/develop/deferred-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
260
+ help="Use background tasks (https://docs.prefect.io/v3/concepts/tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
261
261
  )
262
262
  async def build_server(runner: "Runner") -> FastAPI:
263
263
  """
@@ -306,7 +306,7 @@ async def build_server(runner: "Runner") -> FastAPI:
306
306
  @deprecated_callable(
307
307
  start_date=datetime(2025, 4, 1),
308
308
  end_date=datetime(2025, 10, 1),
309
- help="Use background tasks (https://docs.prefect.io/v3/develop/deferred-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
309
+ help="Use background tasks (https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
310
310
  )
311
311
  def start_webserver(runner: "Runner", log_level: str | None = None) -> None:
312
312
  """
prefect/runner/submit.py CHANGED
@@ -124,7 +124,7 @@ def submit_to_runner(
124
124
  @deprecated_callable(
125
125
  start_date=datetime(2025, 4, 1),
126
126
  end_date=datetime(2025, 10, 1),
127
- help="Use background tasks (https://docs.prefect.io/v3/develop/deferred-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
127
+ help="Use background tasks (https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
128
128
  )
129
129
  @sync_compatible
130
130
  async def submit_to_runner(
@@ -196,7 +196,7 @@ async def submit_to_runner(
196
196
  @deprecated_callable(
197
197
  start_date=datetime(2025, 4, 1),
198
198
  end_date=datetime(2025, 10, 1),
199
- help="Use background tasks (https://docs.prefect.io/v3/develop/deferred-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
199
+ help="Use background tasks (https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks) or `run_deployment` and `.serve` instead of submitting runs to the Runner webserver.",
200
200
  )
201
201
  @sync_compatible
202
202
  async def wait_for_submitted_runs(
@@ -51,7 +51,7 @@ async def create_events(
51
51
  """
52
52
  Record a batch of Events.
53
53
 
54
- For more information, see https://docs.prefect.io/v3/automate/events/events.
54
+ For more information, see https://docs.prefect.io/v3/concepts/events.
55
55
  """
56
56
  if ephemeral_request:
57
57
  await EventsPipeline().process_events(events)
@@ -23,7 +23,7 @@ async def read_task_workers(
23
23
  """
24
24
  Read active task workers. Optionally filter by task keys.
25
25
 
26
- For more information, see https://docs.prefect.io/v3/develop/deferred-tasks.
26
+ For more information, see https://docs.prefect.io/v3/concepts/flows-and-tasks#background-tasks.
27
27
  """
28
28
 
29
29
  if task_worker_filter and task_worker_filter.task_keys:
@@ -1,17 +1,14 @@
1
1
  from __future__ import annotations
2
2
 
3
- import inspect
4
3
  import warnings
5
4
  from pathlib import Path
6
5
  from typing import (
7
6
  Annotated,
8
7
  Any,
9
8
  ClassVar,
10
- Dict,
11
9
  Iterable,
12
10
  Iterator,
13
11
  Optional,
14
- Union,
15
12
  )
16
13
 
17
14
  import toml
@@ -20,16 +17,15 @@ from pydantic import (
20
17
  BeforeValidator,
21
18
  ConfigDict,
22
19
  Field,
23
- TypeAdapter,
24
20
  ValidationError,
25
21
  )
26
- from pydantic_settings import BaseSettings
27
22
 
28
23
  from prefect.exceptions import ProfileSettingsValidationError
29
24
  from prefect.settings.constants import DEFAULT_PROFILES_PATH
30
25
  from prefect.settings.context import get_current_settings
31
26
  from prefect.settings.legacy import Setting, _get_settings_fields
32
27
  from prefect.settings.models.root import Settings
28
+ from prefect.utilities.collections import set_in_dict
33
29
 
34
30
 
35
31
  def _cast_settings(
@@ -69,7 +65,7 @@ class Profile(BaseModel):
69
65
  )
70
66
  source: Optional[Path] = None
71
67
 
72
- def to_environment_variables(self) -> Dict[str, str]:
68
+ def to_environment_variables(self) -> dict[str, str]:
73
69
  """Convert the profile settings to a dictionary of environment variables."""
74
70
  return {
75
71
  setting.name: str(value)
@@ -78,23 +74,40 @@ class Profile(BaseModel):
78
74
  }
79
75
 
80
76
  def validate_settings(self) -> None:
81
- errors: list[tuple[Setting, ValidationError]] = []
77
+ """
78
+ Validate all settings in this profile by creating a partial Settings object
79
+ with the nested structure properly constructed using accessor paths.
80
+ """
81
+ if not self.settings:
82
+ return
83
+
84
+ nested_settings: dict[str, Any] = {}
85
+
82
86
  for setting, value in self.settings.items():
83
- try:
84
- model_fields = Settings.model_fields
85
- annotation = None
86
- for section in setting.accessor.split("."):
87
- annotation = model_fields[section].annotation
88
- if inspect.isclass(annotation) and issubclass(
89
- annotation, BaseSettings
90
- ):
91
- model_fields = annotation.model_fields
92
-
93
- TypeAdapter(annotation).validate_python(value)
94
- except ValidationError as e:
95
- errors.append((setting, e))
96
- if errors:
97
- raise ProfileSettingsValidationError(errors)
87
+ set_in_dict(nested_settings, setting.accessor, value)
88
+
89
+ try:
90
+ Settings.model_validate(nested_settings)
91
+ except ValidationError as e:
92
+ errors: list[tuple[Setting, ValidationError]] = []
93
+
94
+ for error in e.errors():
95
+ error_path = ".".join(str(loc) for loc in error["loc"])
96
+
97
+ for setting in self.settings.keys():
98
+ if setting.accessor == error_path:
99
+ errors.append(
100
+ (
101
+ setting,
102
+ ValidationError.from_exception_data(
103
+ "ValidationError", [error]
104
+ ),
105
+ )
106
+ )
107
+ break
108
+
109
+ if errors:
110
+ raise ProfileSettingsValidationError(errors)
98
111
 
99
112
 
100
113
  class ProfilesCollection:
@@ -106,9 +119,7 @@ class ProfilesCollection:
106
119
  The collection may store the name of the active profile.
107
120
  """
108
121
 
109
- def __init__(
110
- self, profiles: Iterable[Profile], active: Optional[str] = None
111
- ) -> None:
122
+ def __init__(self, profiles: Iterable[Profile], active: str | None = None) -> None:
112
123
  self.profiles_by_name: dict[str, Profile] = {
113
124
  profile.name: profile for profile in profiles
114
125
  }
@@ -122,7 +133,7 @@ class ProfilesCollection:
122
133
  return set(self.profiles_by_name.keys())
123
134
 
124
135
  @property
125
- def active_profile(self) -> Optional[Profile]:
136
+ def active_profile(self) -> Profile | None:
126
137
  """
127
138
  Retrieve the active profile in this collection.
128
139
  """
@@ -130,7 +141,7 @@ class ProfilesCollection:
130
141
  return None
131
142
  return self[self.active_name]
132
143
 
133
- def set_active(self, name: Optional[str], check: bool = True) -> None:
144
+ def set_active(self, name: str | None, check: bool = True) -> None:
134
145
  """
135
146
  Set the active profile name in the collection.
136
147
 
@@ -145,7 +156,7 @@ class ProfilesCollection:
145
156
  self,
146
157
  name: str,
147
158
  settings: dict[Setting, Any],
148
- source: Optional[Path] = None,
159
+ source: Path | None = None,
149
160
  ) -> Profile:
150
161
  """
151
162
  Add a profile to the collection or update the existing on if the name is already
@@ -201,7 +212,7 @@ class ProfilesCollection:
201
212
  """
202
213
  self.profiles_by_name.pop(name)
203
214
 
204
- def without_profile_source(self, path: Optional[Path]) -> "ProfilesCollection":
215
+ def without_profile_source(self, path: Path | None) -> "ProfilesCollection":
205
216
  """
206
217
  Remove profiles that were loaded from a given path.
207
218
 
@@ -367,7 +378,7 @@ def load_profile(name: str) -> Profile:
367
378
 
368
379
 
369
380
  def update_current_profile(
370
- settings: Dict[Union[str, Setting], Any],
381
+ settings: dict[str | Setting, Any],
371
382
  ) -> Profile:
372
383
  """
373
384
  Update the persisted data for the profile currently in-use.