lionagi 0.15.4__py3-none-any.whl → 0.15.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,26 @@
1
+ def check_async_postgres_available():
2
+ try:
3
+ import sqlalchemy as sa
4
+ from pydapter.extras.async_postgres_ import AsyncPostgresAdapter
5
+ from sqlalchemy.ext.asyncio import create_async_engine
6
+
7
+ return True
8
+ except Exception:
9
+ return ImportError(
10
+ "This adapter requires postgres option to be installed. "
11
+ 'Please install them using `uv pip install "lionagi[postgres]"`.'
12
+ )
13
+
14
+
15
+ def check_postgres_available():
16
+ try:
17
+ from pydapter.model_adapters.postgres_model import PostgresModelAdapter
18
+ from sqlalchemy import String
19
+ from sqlalchemy.orm import DeclarativeBase
20
+
21
+ return True
22
+ except Exception:
23
+ return ImportError(
24
+ "This adapter requires postgres option to be installed. "
25
+ 'Please install them using `uv pip install "lionagi[postgres]"`.'
26
+ )
@@ -12,16 +12,16 @@ from typing import Any, ClassVar, TypeVar
12
12
 
13
13
  from pydapter.exceptions import QueryError
14
14
 
15
- try:
16
- import sqlalchemy as sa
17
- from pydapter.extras.async_postgres_ import AsyncPostgresAdapter
18
- from sqlalchemy.ext.asyncio import create_async_engine
19
- except ImportError:
20
- raise ImportError(
21
- "This adapter requires postgres option to be installed. "
22
- 'Please install them using `uv pip install "lionagi[postgres]"`.'
23
- )
15
+ from ._utils import check_async_postgres_available
24
16
 
17
+ _ASYNC_POSTGRES_AVAILABLE = check_async_postgres_available()
18
+
19
+ if isinstance(_ASYNC_POSTGRES_AVAILABLE, ImportError):
20
+ raise _ASYNC_POSTGRES_AVAILABLE
21
+
22
+ import sqlalchemy as sa
23
+ from pydapter.extras.async_postgres_ import AsyncPostgresAdapter
24
+ from sqlalchemy.ext.asyncio import create_async_engine
25
25
 
26
26
  T = TypeVar("T")
27
27
 
@@ -7,19 +7,19 @@ PostgreSQL persistence for lionagi Nodes.
7
7
 
8
8
  from __future__ import annotations
9
9
 
10
- from typing import Any, Union, get_args, get_origin
10
+ from typing import Union, get_args, get_origin
11
11
 
12
12
  from pydantic import BaseModel
13
13
 
14
- try:
15
- from pydapter.model_adapters.postgres_model import PostgresModelAdapter
16
- from sqlalchemy import String
17
- from sqlalchemy.orm import DeclarativeBase
18
- except ImportError:
19
- raise ImportError(
20
- "This adapter requires postgres option to be installed. "
21
- 'Please install them using `uv pip install "lionagi[postgres]"`.'
22
- )
14
+ from ._utils import check_postgres_available
15
+
16
+ _POSTGRES_AVAILABLE = check_postgres_available()
17
+ if isinstance(_POSTGRES_AVAILABLE, ImportError):
18
+ raise _POSTGRES_AVAILABLE
19
+
20
+ from pydapter.model_adapters.postgres_model import PostgresModelAdapter
21
+ from sqlalchemy import String
22
+ from sqlalchemy.orm import DeclarativeBase
23
23
 
24
24
 
25
25
  class LionAGIPostgresAdapter(PostgresModelAdapter):
lionagi/ln/_async_call.py CHANGED
@@ -37,7 +37,7 @@ async def alcall(
37
37
  retry_initial_deplay: float = 0,
38
38
  retry_backoff: float = 1,
39
39
  retry_default: Any = Unset,
40
- retry_timeout: float = 0,
40
+ retry_timeout: float = None,
41
41
  retry_attempts: int = 0,
42
42
  max_concurrent: int | None = None,
43
43
  throttle_period: float | None = None,
@@ -277,8 +277,8 @@ class AlcallParams(Params):
277
277
  async def __call__(
278
278
  self, input_: list[Any], func: Callable[..., T], **kw
279
279
  ) -> list[T]:
280
- f = self.as_partial()
281
- return await f(input_, func, **kw)
280
+ kwargs = {**self.default_kw(), **kw}
281
+ return await alcall(input_, func, **kwargs)
282
282
 
283
283
 
284
284
  @dataclass(slots=True, init=False, frozen=True)
@@ -290,5 +290,7 @@ class BcallParams(AlcallParams):
290
290
  async def __call__(
291
291
  self, input_: list[Any], func: Callable[..., T], **kw
292
292
  ) -> list[T]:
293
- f = self.as_partial()
294
- return await f(input_, func, self.batch_size, **kw)
293
+ kwargs = {**self.default_kw(), **kw}
294
+ func = self._func
295
+
296
+ return await func(input_, func, self.batch_size, **kwargs)
lionagi/ln/_models.py CHANGED
@@ -1,5 +1,4 @@
1
1
  from dataclasses import dataclass, field
2
- from functools import partial
3
2
  from typing import Any, ClassVar
4
3
 
5
4
  from typing_extensions import override
@@ -9,8 +8,9 @@ from ._types import Undefined, Unset, is_sentinel
9
8
  __all__ = ("Params", "DataClass")
10
9
 
11
10
 
12
- class _SentinelAware:
13
- """Metaclass to ensure sentinels are handled correctly in subclasses."""
11
+ @dataclass(slots=True, frozen=True, init=False)
12
+ class Params:
13
+ """Base class for parameters used in various functions."""
14
14
 
15
15
  _none_as_sentinel: ClassVar[bool] = False
16
16
  """If True, None is treated as a sentinel value."""
@@ -26,16 +26,6 @@ class _SentinelAware:
26
26
  )
27
27
  """Class variable cache to store allowed keys for parameters."""
28
28
 
29
- @classmethod
30
- def allowed(cls) -> set[str]:
31
- """Return the keys of the parameters."""
32
- if cls._allowed_keys:
33
- return cls._allowed_keys
34
- cls._allowed_keys = {
35
- i for i in cls.__dataclass_fields__.keys() if not i.startswith("_")
36
- }
37
- return cls._allowed_keys
38
-
39
29
  @classmethod
40
30
  def _is_sentinel(cls, value: Any) -> bool:
41
31
  """Check if a value is a sentinel (Undefined or Unset)."""
@@ -47,23 +37,15 @@ class _SentinelAware:
47
37
  """Post-initialization to ensure all fields are set."""
48
38
  self._validate()
49
39
 
50
- def _validate(self) -> None:
51
- pass
52
-
53
- def to_dict(self) -> dict[str, str]:
54
- data = {}
55
- for k in self.allowed():
56
- if not self._is_sentinel(v := getattr(self, k)):
57
- data[k] = v
58
- return data
59
-
60
-
61
- @dataclass(slots=True, frozen=True, init=False)
62
- class Params(_SentinelAware):
63
- """Base class for parameters used in various functions."""
64
-
65
- _func: ClassVar[Any] = Unset
66
- _particial_func: ClassVar[Any] = Unset
40
+ @classmethod
41
+ def allowed(cls) -> set[str]:
42
+ """Return the keys of the parameters."""
43
+ if cls._allowed_keys:
44
+ return cls._allowed_keys
45
+ cls._allowed_keys = {
46
+ i for i in cls.__dataclass_fields__.keys() if not i.startswith("_")
47
+ }
48
+ return cls._allowed_keys
67
49
 
68
50
  @override
69
51
  def _validate(self) -> None:
@@ -79,38 +61,58 @@ class Params(_SentinelAware):
79
61
  for k in self.allowed():
80
62
  _validate_strict(k)
81
63
 
82
- def as_partial(self) -> Any:
83
- # if partial function is already cached, return it
84
- if self._particial_func is not Unset:
85
- return self._particial_func
86
-
87
- # validate is there is a function to apply
88
- if self._func is Unset:
89
- raise ValueError("No function defined for partial application.")
90
- if not callable(self._func):
91
- raise TypeError(
92
- f"Expected a callable, got {type(self._func).__name__}."
93
- )
64
+ def default_kw(self) -> Any:
94
65
 
95
66
  # create a partial function with the current parameters
96
67
  dict_ = self.to_dict()
97
- if not dict_:
98
- self._particial_func = self._func
99
- return self._func
100
68
 
101
69
  # handle kwargs if present, handle both 'kwargs' and 'kw'
102
70
  kw_ = {}
103
71
  kw_.update(dict_.pop("kwargs", {}))
104
72
  kw_.update(dict_.pop("kw", {}))
105
73
  dict_.update(kw_)
106
- self._particial_func = partial(self._func, **dict_)
107
- return self._particial_func
74
+ return dict_
75
+
76
+ def to_dict(self) -> dict[str, str]:
77
+ data = {}
78
+ for k in self.allowed():
79
+ if not self._is_sentinel(v := getattr(self, k, Undefined)):
80
+ data[k] = v
81
+ return data
108
82
 
109
83
 
110
84
  @dataclass(slots=True)
111
- class DataClass(_SentinelAware):
85
+ class DataClass:
112
86
  """A base class for data classes with strict parameter handling."""
113
87
 
88
+ _none_as_sentinel: ClassVar[bool] = False
89
+ """If True, None is treated as a sentinel value."""
90
+
91
+ _strict: ClassVar[bool] = False
92
+ """No sentinels allowed if strict is True."""
93
+
94
+ _prefill_unset: ClassVar[bool] = True
95
+ """If True, unset fields are prefilled with Unset."""
96
+
97
+ _allowed_keys: ClassVar[set[str]] = field(
98
+ default=set(), init=False, repr=False
99
+ )
100
+ """Class variable cache to store allowed keys for parameters."""
101
+
102
+ def __post_init__(self):
103
+ """Post-initialization to ensure all fields are set."""
104
+ self._validate()
105
+
106
+ @classmethod
107
+ def allowed(cls) -> set[str]:
108
+ """Return the keys of the parameters."""
109
+ if cls._allowed_keys:
110
+ return cls._allowed_keys
111
+ cls._allowed_keys = {
112
+ i for i in cls.__dataclass_fields__.keys() if not i.startswith("_")
113
+ }
114
+ return cls._allowed_keys
115
+
114
116
  @override
115
117
  def _validate(self) -> None:
116
118
  def _validate_strict(k):
@@ -124,3 +126,18 @@ class DataClass(_SentinelAware):
124
126
 
125
127
  for k in self.allowed():
126
128
  _validate_strict(k)
129
+
130
+ def to_dict(self) -> dict[str, str]:
131
+ data = {}
132
+ print(self.allowed())
133
+ for k in type(self).allowed():
134
+ if not self._is_sentinel(v := getattr(self, k)):
135
+ data[k] = v
136
+ return data
137
+
138
+ @classmethod
139
+ def _is_sentinel(cls, value: Any) -> bool:
140
+ """Check if a value is a sentinel (Undefined or Unset)."""
141
+ if value is None and cls._none_as_sentinel:
142
+ return True
143
+ return is_sentinel(value)
@@ -24,6 +24,7 @@ from .resource_tracker import (
24
24
  from .task import TaskGroup, create_task_group
25
25
  from .utils import is_coro_func
26
26
 
27
+ ConcurrencyEvent = Event
27
28
  __all__ = (
28
29
  "TaskGroup",
29
30
  "create_task_group",
@@ -48,4 +49,5 @@ __all__ = (
48
49
  "cleanup_check",
49
50
  "get_global_tracker",
50
51
  "is_coro_func",
52
+ "ConcurrencyEvent",
51
53
  )
@@ -12,9 +12,7 @@ using Events for synchronization and CapacityLimiter for concurrency control.
12
12
  import os
13
13
  from typing import Any
14
14
 
15
- from lionagi.ln.concurrency.primitives import CapacityLimiter
16
- from lionagi.ln.concurrency.primitives import Event as ConcurrencyEvent
17
- from lionagi.ln.concurrency.task import create_task_group
15
+ from lionagi.ln import AlcallParams, CapacityLimiter, ConcurrencyEvent
18
16
  from lionagi.operations.node import Operation
19
17
  from lionagi.protocols.types import EventStatus, Graph
20
18
  from lionagi.session.branch import Branch
@@ -36,6 +34,7 @@ class DependencyAwareExecutor:
36
34
  max_concurrent: int = 5,
37
35
  verbose: bool = False,
38
36
  default_branch: Branch | None = None,
37
+ alcall_params: AlcallParams | None = None,
39
38
  ):
40
39
  """Initialize the executor.
41
40
 
@@ -52,6 +51,7 @@ class DependencyAwareExecutor:
52
51
  self.context = context or {}
53
52
  self.max_concurrent = max_concurrent
54
53
  self.verbose = verbose
54
+ self._alcall = alcall_params or AlcallParams()
55
55
  self._default_branch = default_branch
56
56
 
57
57
  # Track results and completion
@@ -92,11 +92,12 @@ class DependencyAwareExecutor:
92
92
  )
93
93
  limiter = CapacityLimiter(capacity)
94
94
 
95
- # Execute all operations using structured concurrency
96
- async with create_task_group() as tg:
97
- for node in self.graph.internal_nodes.values():
98
- if isinstance(node, Operation):
99
- await tg.start_soon(self._execute_operation, node, limiter)
95
+ nodes = [
96
+ n
97
+ for n in self.graph.internal_nodes.values()
98
+ if isinstance(n, Operation)
99
+ ]
100
+ await self._alcall(nodes, self._execute_operation, limiter=limiter)
100
101
 
101
102
  # Return results - only include actually completed operations
102
103
  completed_ops = [
@@ -507,6 +508,7 @@ async def flow(
507
508
  parallel: bool = True,
508
509
  max_concurrent: int = None,
509
510
  verbose: bool = False,
511
+ alcall_params: AlcallParams | None = None,
510
512
  ) -> dict[str, Any]:
511
513
  """Execute a graph using structured concurrency primitives.
512
514
 
@@ -521,6 +523,7 @@ async def flow(
521
523
  parallel: Whether to execute independent operations in parallel
522
524
  max_concurrent: Max concurrent operations (1 if not parallel)
523
525
  verbose: Enable verbose logging
526
+ alcall_params: Parameters for async parallel call execution
524
527
 
525
528
  Returns:
526
529
  Execution results with completed operations and final context
@@ -528,8 +531,7 @@ async def flow(
528
531
 
529
532
  # Handle concurrency limits
530
533
  if not parallel:
531
- max_concurrent = 1 # Force sequential execution
532
- # If max_concurrent is None, it means no limit
534
+ max_concurrent = 1
533
535
 
534
536
  # Execute using the dependency-aware executor
535
537
  executor = DependencyAwareExecutor(
@@ -539,6 +541,7 @@ async def flow(
539
541
  max_concurrent=max_concurrent,
540
542
  verbose=verbose,
541
543
  default_branch=branch,
544
+ alcall_params=alcall_params,
542
545
  )
543
546
 
544
547
  return await executor.execute()