lionagi 0.14.11__py3-none-any.whl → 0.15.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. lionagi/libs/concurrency.py +1 -0
  2. lionagi/libs/token_transform/perplexity.py +2 -1
  3. lionagi/libs/token_transform/symbolic_compress_context.py +8 -7
  4. lionagi/ln/__init__.py +49 -0
  5. lionagi/ln/_async_call.py +294 -0
  6. lionagi/ln/_list_call.py +130 -0
  7. lionagi/ln/_models.py +126 -0
  8. lionagi/ln/_to_list.py +176 -0
  9. lionagi/ln/_types.py +146 -0
  10. lionagi/{libs → ln}/concurrency/__init__.py +4 -2
  11. lionagi/ln/concurrency/utils.py +15 -0
  12. lionagi/models/hashable_model.py +1 -2
  13. lionagi/operations/brainstorm/brainstorm.py +2 -1
  14. lionagi/operations/flow.py +3 -3
  15. lionagi/operations/manager.py +10 -8
  16. lionagi/operations/node.py +2 -3
  17. lionagi/operations/plan/plan.py +3 -3
  18. lionagi/protocols/generic/event.py +47 -6
  19. lionagi/protocols/generic/pile.py +1 -1
  20. lionagi/service/hooks/_types.py +2 -2
  21. lionagi/session/branch.py +14 -3
  22. lionagi/session/session.py +55 -25
  23. lionagi/utils.py +90 -510
  24. lionagi/version.py +1 -1
  25. {lionagi-0.14.11.dist-info → lionagi-0.15.1.dist-info}/METADATA +4 -4
  26. {lionagi-0.14.11.dist-info → lionagi-0.15.1.dist-info}/RECORD +36 -30
  27. lionagi/libs/hash/__init__.py +0 -3
  28. lionagi/libs/hash/manager.py +0 -26
  29. /lionagi/{libs/hash/hash_dict.py → ln/_hash.py} +0 -0
  30. /lionagi/{libs → ln}/concurrency/cancel.py +0 -0
  31. /lionagi/{libs → ln}/concurrency/errors.py +0 -0
  32. /lionagi/{libs → ln}/concurrency/patterns.py +0 -0
  33. /lionagi/{libs → ln}/concurrency/primitives.py +0 -0
  34. /lionagi/{libs → ln}/concurrency/resource_tracker.py +0 -0
  35. /lionagi/{libs → ln}/concurrency/task.py +0 -0
  36. /lionagi/{libs → ln}/concurrency/throttle.py +0 -0
  37. {lionagi-0.14.11.dist-info → lionagi-0.15.1.dist-info}/WHEEL +0 -0
  38. {lionagi-0.14.11.dist-info → lionagi-0.15.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1 @@
1
+ from ..ln.concurrency import * # backward compatibility
@@ -5,11 +5,12 @@ from timeit import default_timer as timer
5
5
  import numpy as np
6
6
  from pydantic import BaseModel
7
7
 
8
+ from lionagi.ln import alcall, lcall
8
9
  from lionagi.protocols.generic.event import EventStatus
9
10
  from lionagi.protocols.generic.log import Log
10
11
  from lionagi.service.connections.api_calling import APICalling
11
12
  from lionagi.service.imodel import iModel
12
- from lionagi.utils import alcall, lcall, to_dict, to_list
13
+ from lionagi.utils import to_dict, to_list
13
14
 
14
15
 
15
16
  @dataclass
@@ -2,9 +2,10 @@ from collections.abc import Callable
2
2
  from pathlib import Path
3
3
  from typing import Literal
4
4
 
5
+ from lionagi.ln import alcall
5
6
  from lionagi.service.imodel import iModel
6
7
  from lionagi.session.branch import Branch
7
- from lionagi.utils import alcall, get_bins
8
+ from lionagi.utils import get_bins
8
9
 
9
10
  from .base import TokenMapping, TokenMappingTemplate
10
11
  from .synthlang_.base import SynthlangFramework, SynthlangTemplate
@@ -130,13 +131,13 @@ async def symbolic_compress_context(
130
131
  _inner,
131
132
  max_concurrent=max_concurrent,
132
133
  retry_default=None,
133
- num_retries=3,
134
- throttle_period=throttle_period,
134
+ retry_attempts=3,
135
+ retry_backoff=2,
135
136
  retry_delay=1,
136
- backoff_factor=2,
137
- flatten=True,
138
- dropna=True,
139
- unique_output=True,
137
+ throttle_period=throttle_period,
138
+ output_flatten=True,
139
+ output_dropna=True,
140
+ output_unique=True,
140
141
  )
141
142
  text = "\n".join(results)
142
143
  text = DEFAULT_INVOKATION_PROMPT + text
lionagi/ln/__init__.py ADDED
@@ -0,0 +1,49 @@
1
+ from ._async_call import AlcallParams, BcallParams, alcall, bcall
2
+ from ._hash import hash_dict
3
+ from ._list_call import LcallParams, lcall
4
+ from ._models import DataClass, Params
5
+ from ._to_list import ToListParams, to_list
6
+ from ._types import (
7
+ Enum,
8
+ KeysDict,
9
+ MaybeSentinel,
10
+ MaybeUndefined,
11
+ MaybeUnset,
12
+ SingletonType,
13
+ T,
14
+ Undefined,
15
+ UndefinedType,
16
+ Unset,
17
+ UnsetType,
18
+ is_sentinel,
19
+ not_sentinel,
20
+ )
21
+ from .concurrency import *
22
+
23
+ __all__ = (
24
+ "Undefined",
25
+ "Unset",
26
+ "MaybeUndefined",
27
+ "MaybeUnset",
28
+ "MaybeSentinel",
29
+ "SingletonType",
30
+ "UndefinedType",
31
+ "UnsetType",
32
+ "KeysDict",
33
+ "T",
34
+ "Enum",
35
+ "is_sentinel",
36
+ "not_sentinel",
37
+ "Params",
38
+ "DataClass",
39
+ "Enum",
40
+ "hash_dict",
41
+ "to_list",
42
+ "ToListParams",
43
+ "lcall",
44
+ "LcallParams",
45
+ "alcall",
46
+ "bcall",
47
+ "AlcallParams",
48
+ "BcallParams",
49
+ )
@@ -0,0 +1,294 @@
1
+ import asyncio
2
+ from collections.abc import AsyncGenerator, Callable
3
+ from dataclasses import dataclass
4
+ from typing import Any, ClassVar
5
+
6
+ import anyio
7
+ from pydantic import BaseModel
8
+
9
+ from ._models import Params
10
+ from ._to_list import to_list
11
+ from ._types import T, Unset, not_sentinel
12
+ from .concurrency import Lock as ConcurrencyLock
13
+ from .concurrency import Semaphore, create_task_group, is_coro_func
14
+
15
+ __all__ = (
16
+ "alcall",
17
+ "bcall",
18
+ "AlcallParams",
19
+ "BcallParams",
20
+ )
21
+
22
+
23
+ async def alcall(
24
+ input_: list[Any],
25
+ func: Callable[..., T],
26
+ /,
27
+ *,
28
+ input_flatten: bool = False,
29
+ input_dropna: bool = False,
30
+ input_unique: bool = False,
31
+ input_flatten_tuple_set: bool = False,
32
+ output_flatten: bool = False,
33
+ output_dropna: bool = False,
34
+ output_unique: bool = False,
35
+ output_flatten_tuple_set: bool = False,
36
+ delay_before_start: float = 0,
37
+ retry_initial_deplay: float = 0,
38
+ retry_backoff: float = 1,
39
+ retry_default: Any = Unset,
40
+ retry_timeout: float = 0,
41
+ retry_attempts: int = 0,
42
+ max_concurrent: int | None = None,
43
+ throttle_period: float | None = None,
44
+ **kwargs: Any,
45
+ ) -> list[T]:
46
+ """
47
+ Asynchronously apply a function to each element of a list, with optional input sanitization,
48
+ retries, timeout, and output processing.
49
+ """
50
+
51
+ # Validate func is a single callable
52
+ if not callable(func):
53
+ # If func is not callable, maybe it's an iterable. Extract one callable if possible.
54
+ try:
55
+ func_list = list(func) # Convert iterable to list
56
+ except TypeError:
57
+ raise ValueError(
58
+ "func must be callable or an iterable containing one callable."
59
+ )
60
+
61
+ # Ensure exactly one callable is present
62
+ if len(func_list) != 1 or not callable(func_list[0]):
63
+ raise ValueError("Only one callable function is allowed.")
64
+
65
+ func = func_list[0]
66
+
67
+ # Process input if requested
68
+ if any((input_flatten, input_dropna)):
69
+ input_ = to_list(
70
+ input_,
71
+ flatten=input_flatten,
72
+ dropna=input_dropna,
73
+ unique=input_unique,
74
+ flatten_tuple_set=input_flatten_tuple_set,
75
+ )
76
+ else:
77
+ if not isinstance(input_, list):
78
+ # Attempt to iterate
79
+ if isinstance(input_, BaseModel):
80
+ # Pydantic model, convert to list
81
+ input_ = [input_]
82
+ else:
83
+ try:
84
+ iter(input_)
85
+ # It's iterable (tuple), convert to list of its contents
86
+ input_ = list(input_)
87
+ except TypeError:
88
+ # Not iterable, just wrap in a list
89
+ input_ = [input_]
90
+
91
+ # Optional initial delay before processing
92
+ if delay_before_start:
93
+ await anyio.sleep(delay_before_start)
94
+
95
+ semaphore = Semaphore(max_concurrent) if max_concurrent else None
96
+ throttle_delay = throttle_period or 0
97
+ coro_func = is_coro_func(func)
98
+
99
+ async def call_func(item: Any) -> T:
100
+ if coro_func:
101
+ # Async function
102
+ if retry_timeout is not None:
103
+ with anyio.move_on_after(retry_timeout) as cancel_scope:
104
+ result = await func(item, **kwargs)
105
+ if cancel_scope.cancelled_caught:
106
+ raise asyncio.TimeoutError(
107
+ f"Function call timed out after {retry_timeout}s"
108
+ )
109
+ return result
110
+ else:
111
+ return await func(item, **kwargs)
112
+ else:
113
+ # Sync function
114
+ if retry_timeout is not None:
115
+ with anyio.move_on_after(retry_timeout) as cancel_scope:
116
+ result = await anyio.to_thread.run_sync(
117
+ func, item, **kwargs
118
+ )
119
+ if cancel_scope.cancelled_caught:
120
+ raise asyncio.TimeoutError(
121
+ f"Function call timed out after {retry_timeout}s"
122
+ )
123
+ return result
124
+ else:
125
+ return await anyio.to_thread.run_sync(func, item, **kwargs)
126
+
127
+ async def execute_task(i: Any, index: int) -> Any:
128
+ attempts = 0
129
+ current_delay = retry_initial_deplay
130
+ while True:
131
+ try:
132
+ result = await call_func(i)
133
+ return index, result
134
+
135
+ # if cancelled, re-raise
136
+ except anyio.get_cancelled_exc_class():
137
+ raise
138
+
139
+ # handle other exceptions
140
+ except Exception:
141
+ attempts += 1
142
+ if attempts <= retry_attempts:
143
+ if current_delay:
144
+ await anyio.sleep(current_delay)
145
+ current_delay *= retry_backoff
146
+ # Retry loop continues
147
+ else:
148
+ # Exhausted retries
149
+ if not_sentinel(retry_default):
150
+ return index, retry_default
151
+ # No default, re-raise
152
+ raise
153
+
154
+ async def task_wrapper(item: Any, idx: int) -> Any:
155
+ if semaphore:
156
+ async with semaphore:
157
+ result = await execute_task(item, idx)
158
+ else:
159
+ result = await execute_task(item, idx)
160
+
161
+ return result
162
+
163
+ # Use task group for structured concurrency
164
+ results = []
165
+ results_lock = ConcurrencyLock() # Protect results list
166
+
167
+ async def run_and_store(item: Any, idx: int):
168
+ result = await task_wrapper(item, idx)
169
+ async with results_lock:
170
+ results.append(result)
171
+
172
+ # Execute all tasks using task group
173
+ async with create_task_group() as tg:
174
+ for idx, item in enumerate(input_):
175
+ await tg.start_soon(run_and_store, item, idx)
176
+ # Apply throttle delay between starting tasks
177
+ if throttle_delay and idx < len(input_) - 1:
178
+ await anyio.sleep(throttle_delay)
179
+
180
+ # Sort by original index
181
+ results.sort(key=lambda x: x[0])
182
+
183
+ # (index, result)
184
+ output_list = [r[1] for r in results]
185
+ return to_list(
186
+ output_list,
187
+ flatten=output_flatten,
188
+ dropna=output_dropna,
189
+ unique=output_unique,
190
+ flatten_tuple_set=output_flatten_tuple_set,
191
+ )
192
+
193
+
194
+ async def bcall(
195
+ input_: list[Any],
196
+ func: Callable[..., T],
197
+ /,
198
+ batch_size: int,
199
+ *,
200
+ input_flatten: bool = False,
201
+ input_dropna: bool = False,
202
+ input_unique: bool = False,
203
+ input_flatten_tuple_set: bool = False,
204
+ output_flatten: bool = False,
205
+ output_dropna: bool = False,
206
+ output_unique: bool = False,
207
+ output_flatten_tuple_set: bool = False,
208
+ delay_before_start: float = 0,
209
+ retry_initial_deplay: float = 0,
210
+ retry_backoff: float = 1,
211
+ retry_default: Any = Unset,
212
+ retry_timeout: float = 0,
213
+ retry_attempts: int = 0,
214
+ max_concurrent: int | None = None,
215
+ throttle_period: float | None = None,
216
+ **kwargs: Any,
217
+ ) -> AsyncGenerator[list[T | tuple[T, float]], None]:
218
+ input_ = to_list(input_, flatten=True, dropna=True)
219
+
220
+ for i in range(0, len(input_), batch_size):
221
+ batch = input_[i : i + batch_size] # noqa: E203
222
+ yield await alcall(
223
+ batch,
224
+ func,
225
+ input_flatten=input_flatten,
226
+ input_dropna=input_dropna,
227
+ input_unique=input_unique,
228
+ input_flatten_tuple_set=input_flatten_tuple_set,
229
+ output_flatten=output_flatten,
230
+ output_dropna=output_dropna,
231
+ output_unique=output_unique,
232
+ output_flatten_tuple_set=output_flatten_tuple_set,
233
+ delay_before_start=delay_before_start,
234
+ retry_initial_deplay=retry_initial_deplay,
235
+ retry_backoff=retry_backoff,
236
+ retry_default=retry_default,
237
+ retry_timeout=retry_timeout,
238
+ retry_attempts=retry_attempts,
239
+ max_concurrent=max_concurrent,
240
+ throttle_period=throttle_period,
241
+ **kwargs,
242
+ )
243
+
244
+
245
+ @dataclass(slots=True, init=False, frozen=True)
246
+ class AlcallParams(Params):
247
+ # ClassVar attributes
248
+ _none_as_sentinel: ClassVar[bool] = True
249
+ _func: ClassVar[Any] = alcall
250
+
251
+ # input processing
252
+ input_flatten: bool
253
+ input_dropna: bool
254
+ input_unique: bool
255
+ input_flatten_tuple_set: bool
256
+
257
+ # output processing
258
+ output_flatten: bool
259
+ output_dropna: bool
260
+ output_unique: bool
261
+ output_flatten_tuple_set: bool
262
+
263
+ # retry and timeout
264
+ delay_before_start: float
265
+ retry_initial_deplay: float
266
+ retry_backoff: float
267
+ retry_default: Any
268
+ retry_timeout: float
269
+ retry_attempts: int
270
+
271
+ # concurrency and throttling
272
+ max_concurrent: int
273
+ throttle_period: float
274
+
275
+ kw: dict[str, Any] = Unset
276
+
277
+ async def __call__(
278
+ self, input_: list[Any], func: Callable[..., T], **kw
279
+ ) -> list[T]:
280
+ f = self.as_partial()
281
+ return await f(input_, func, **kw)
282
+
283
+
284
+ @dataclass(slots=True, init=False, frozen=True)
285
+ class BcallParams(AlcallParams):
286
+ _func: ClassVar[Any] = bcall
287
+
288
+ batch_size: int
289
+
290
+ async def __call__(
291
+ self, input_: list[Any], func: Callable[..., T], **kw
292
+ ) -> list[T]:
293
+ f = self.as_partial()
294
+ return await f(input_, func, self.batch_size, **kw)
@@ -0,0 +1,130 @@
1
+ from collections.abc import Callable, Iterable
2
+ from dataclasses import dataclass
3
+ from typing import Any, ClassVar, TypeVar
4
+
5
+ from ._models import Params
6
+ from ._to_list import to_list
7
+
8
+ R = TypeVar("R")
9
+ T = TypeVar("T")
10
+
11
+ __all__ = ("lcall", "LcallParams")
12
+
13
+
14
+ def lcall(
15
+ input_: Iterable[T] | T,
16
+ func: Callable[[T], R] | Iterable[Callable[[T], R]],
17
+ /,
18
+ *args: Any,
19
+ input_flatten: bool = False,
20
+ input_dropna: bool = False,
21
+ input_unique: bool = False,
22
+ input_use_values: bool = False,
23
+ input_flatten_tuple_set: bool = False,
24
+ output_flatten: bool = False,
25
+ output_dropna: bool = False,
26
+ output_unique: bool = False,
27
+ output_flatten_tuple_set: bool = False,
28
+ **kwargs: Any,
29
+ ) -> list[R]:
30
+ """Apply function to each element in input list with optional processing.
31
+
32
+ Maps a function over input elements and processes results. Can sanitize input
33
+ and output using various filtering options.
34
+
35
+ Raises:
36
+ ValueError: If func is not callable or unique_output used incorrectly.
37
+ TypeError: If func or input processing fails.
38
+ """
39
+ # Validate and extract callable function
40
+ if not callable(func):
41
+ try:
42
+ func_list = list(func)
43
+ if len(func_list) != 1 or not callable(func_list[0]):
44
+ raise ValueError(
45
+ "func must contain exactly one callable function."
46
+ )
47
+ func = func_list[0]
48
+ except TypeError as e:
49
+ raise ValueError(
50
+ "func must be callable or iterable with one callable."
51
+ ) from e
52
+
53
+ # Validate output processing options
54
+ if output_unique and not (output_flatten or output_dropna):
55
+ raise ValueError(
56
+ "unique_output requires flatten or dropna for post-processing."
57
+ )
58
+
59
+ # Process input based on sanitization flag
60
+ if input_flatten or input_dropna:
61
+ input_ = to_list(
62
+ input_,
63
+ flatten=input_flatten,
64
+ dropna=input_dropna,
65
+ unique=input_unique,
66
+ flatten_tuple_set=input_flatten_tuple_set,
67
+ use_values=input_use_values,
68
+ )
69
+ else:
70
+ if not isinstance(input_, list):
71
+ try:
72
+ input_ = list(input_)
73
+ except TypeError:
74
+ input_ = [input_]
75
+
76
+ # Process elements and collect results
77
+ out = []
78
+ append = out.append
79
+
80
+ for item in input_:
81
+ try:
82
+ result = func(item, *args, **kwargs)
83
+ append(result)
84
+ except InterruptedError:
85
+ return out
86
+ except Exception:
87
+ raise
88
+
89
+ # Apply output processing if requested
90
+ if output_flatten or output_dropna:
91
+ out = to_list(
92
+ out,
93
+ flatten=output_flatten,
94
+ dropna=output_dropna,
95
+ unique=output_unique,
96
+ flatten_tuple_set=output_flatten_tuple_set,
97
+ )
98
+
99
+ return out
100
+
101
+
102
+ @dataclass(slots=True, frozen=True, init=False)
103
+ class LcallParams(Params):
104
+ _func: ClassVar[Any] = lcall
105
+
106
+ # input processing
107
+ input_flatten: bool
108
+ """If True, recursively flatten input to a flat list"""
109
+ input_dropna: bool
110
+ """If True, remove None and undefined values from input."""
111
+ input_unique: bool
112
+ input_use_values: bool
113
+ input_flatten_tuple_set: bool
114
+
115
+ # output processing
116
+ output_flatten: bool
117
+ """If True, recursively flatten output to a flat list."""
118
+ output_dropna: bool
119
+ """If True, remove None and undefined values."""
120
+ output_unique: bool
121
+ """If True, remove duplicates (requires output_flatten=True)."""
122
+ output_use_values: bool
123
+ """If True, extract values from enums/mappings."""
124
+ output_flatten_tuple_set: bool
125
+ """If True, include tuples and sets in flattening."""
126
+
127
+ def __call__(self, input_: Any, *args, **kw) -> list:
128
+ """Convert parameters to a list."""
129
+ f = self.as_partial()
130
+ return f(input_, *args, **kw)
lionagi/ln/_models.py ADDED
@@ -0,0 +1,126 @@
1
+ from dataclasses import dataclass, field
2
+ from functools import partial
3
+ from typing import Any, ClassVar
4
+
5
+ from typing_extensions import override
6
+
7
+ from ._types import Undefined, Unset, is_sentinel
8
+
9
+ __all__ = ("Params", "DataClass")
10
+
11
+
12
+ class _SentinelAware:
13
+ """Metaclass to ensure sentinels are handled correctly in subclasses."""
14
+
15
+ _none_as_sentinel: ClassVar[bool] = False
16
+ """If True, None is treated as a sentinel value."""
17
+
18
+ _strict: ClassVar[bool] = False
19
+ """No sentinels allowed if strict is True."""
20
+
21
+ _prefill_unset: ClassVar[bool] = True
22
+ """If True, unset fields are prefilled with Unset."""
23
+
24
+ _allowed_keys: ClassVar[set[str]] = field(
25
+ default=set(), init=False, repr=False
26
+ )
27
+ """Class variable cache to store allowed keys for parameters."""
28
+
29
+ @classmethod
30
+ def allowed(cls) -> set[str]:
31
+ """Return the keys of the parameters."""
32
+ if cls._allowed_keys:
33
+ return cls._allowed_keys
34
+ cls._allowed_keys = {
35
+ i for i in cls.__dataclass_fields__.keys() if not i.startswith("_")
36
+ }
37
+ return cls._allowed_keys
38
+
39
+ @classmethod
40
+ def _is_sentinel(cls, value: Any) -> bool:
41
+ """Check if a value is a sentinel (Undefined or Unset)."""
42
+ if value is None and cls._none_as_sentinel:
43
+ return True
44
+ return is_sentinel(value)
45
+
46
+ def __post_init__(self):
47
+ """Post-initialization to ensure all fields are set."""
48
+ self._validate()
49
+
50
+ def _validate(self) -> None:
51
+ pass
52
+
53
+ def to_dict(self) -> dict[str, str]:
54
+ data = {}
55
+ for k in self.allowed():
56
+ if not self._is_sentinel(v := getattr(self, k)):
57
+ data[k] = v
58
+ return data
59
+
60
+
61
+ @dataclass(slots=True, frozen=True, init=False)
62
+ class Params(_SentinelAware):
63
+ """Base class for parameters used in various functions."""
64
+
65
+ _func: ClassVar[Any] = Unset
66
+ _particial_func: ClassVar[Any] = Unset
67
+
68
+ @override
69
+ def _validate(self) -> None:
70
+ def _validate_strict(k):
71
+ if self._strict and self._is_sentinel(getattr(self, k, Unset)):
72
+ raise ValueError(f"Missing required parameter: {k}")
73
+ if (
74
+ self._prefill_unset
75
+ and getattr(self, k, Undefined) is Undefined
76
+ ):
77
+ object.__setattr__(self, k, Unset)
78
+
79
+ for k in self.allowed():
80
+ _validate_strict(k)
81
+
82
+ def as_partial(self) -> Any:
83
+ # if partial function is already cached, return it
84
+ if self._particial_func is not Unset:
85
+ return self._particial_func
86
+
87
+ # validate is there is a function to apply
88
+ if self._func is Unset:
89
+ raise ValueError("No function defined for partial application.")
90
+ if not callable(self._func):
91
+ raise TypeError(
92
+ f"Expected a callable, got {type(self._func).__name__}."
93
+ )
94
+
95
+ # create a partial function with the current parameters
96
+ dict_ = self.to_dict()
97
+ if not dict_:
98
+ self._particial_func = self._func
99
+ return self._func
100
+
101
+ # handle kwargs if present, handle both 'kwargs' and 'kw'
102
+ kw_ = {}
103
+ kw_.update(dict_.pop("kwargs", {}))
104
+ kw_.update(dict_.pop("kw", {}))
105
+ dict_.update(kw_)
106
+ self._particial_func = partial(self._func, **dict_)
107
+ return self._particial_func
108
+
109
+
110
+ @dataclass(slots=True)
111
+ class DataClass(_SentinelAware):
112
+ """A base class for data classes with strict parameter handling."""
113
+
114
+ @override
115
+ def _validate(self) -> None:
116
+ def _validate_strict(k):
117
+ if self._strict and self._is_sentinel(getattr(self, k, Unset)):
118
+ raise ValueError(f"Missing required parameter: {k}")
119
+ if (
120
+ self._prefill_unset
121
+ and getattr(self, k, Undefined) is Undefined
122
+ ):
123
+ self.__setattr__(k, Unset)
124
+
125
+ for k in self.allowed():
126
+ _validate_strict(k)