lionagi 0.16.3__py3-none-any.whl → 0.17.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
lionagi/__init__.py CHANGED
@@ -6,9 +6,8 @@ import logging
6
6
 
7
7
  from pydantic import BaseModel, Field
8
8
 
9
- from . import _types as types
9
+ # Eager imports for commonly used components
10
10
  from . import ln as ln
11
- from .operations.builder import OperationGraphBuilder as Builder
12
11
  from .operations.node import Operation
13
12
  from .service.imodel import iModel
14
13
  from .session.session import Branch, Session
@@ -17,6 +16,29 @@ from .version import __version__
17
16
  logger = logging.getLogger(__name__)
18
17
  logger.setLevel(logging.INFO)
19
18
 
19
+ # Module-level lazy loading cache
20
+ _lazy_imports = {}
21
+
22
+
23
+ def __getattr__(name: str):
24
+ """Lazy loading for expensive imports."""
25
+ if name in _lazy_imports:
26
+ return _lazy_imports[name]
27
+
28
+ if name == "types":
29
+ from . import _types as types
30
+
31
+ _lazy_imports["types"] = types
32
+ return types
33
+ elif name == "Builder":
34
+ from .operations.builder import OperationGraphBuilder as Builder
35
+
36
+ _lazy_imports["Builder"] = Builder
37
+ return Builder
38
+
39
+ raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
40
+
41
+
20
42
  __all__ = (
21
43
  "Session",
22
44
  "Branch",
lionagi/_types.py CHANGED
@@ -1,3 +1,47 @@
1
- from .fields import *
2
- from .models import *
3
- from .protocols.types import *
1
+ # Lazy loading for heavy type imports to improve startup performance
2
+ _lazy_type_imports = {}
3
+
4
+
5
+ def __getattr__(name: str):
6
+ """Lazy loading for type definitions."""
7
+ if name in _lazy_type_imports:
8
+ return _lazy_type_imports[name]
9
+
10
+ # Import from fields
11
+ try:
12
+ from .fields import __all__ as fields_all
13
+
14
+ if name in fields_all:
15
+ from . import fields
16
+
17
+ attr = getattr(fields, name)
18
+ _lazy_type_imports[name] = attr
19
+ return attr
20
+ except (ImportError, AttributeError):
21
+ pass
22
+
23
+ # Import from models
24
+ try:
25
+ from .models import __all__ as models_all
26
+
27
+ if name in models_all:
28
+ from . import models
29
+
30
+ attr = getattr(models, name)
31
+ _lazy_type_imports[name] = attr
32
+ return attr
33
+ except (ImportError, AttributeError):
34
+ pass
35
+
36
+ # Import from protocols.types
37
+ try:
38
+ from .protocols import types as protocol_types
39
+
40
+ if hasattr(protocol_types, name):
41
+ attr = getattr(protocol_types, name)
42
+ _lazy_type_imports[name] = attr
43
+ return attr
44
+ except (ImportError, AttributeError):
45
+ pass
46
+
47
+ raise AttributeError(f"module '{__name__}' has no attribute '{name}'")
@@ -1,12 +1,13 @@
1
1
  def check_async_postgres_available():
2
- try:
3
- import sqlalchemy as sa
4
- from pydapter.extras.async_postgres_ import AsyncPostgresAdapter
5
- from sqlalchemy.ext.asyncio import create_async_engine
2
+ from lionagi.utils import is_import_installed
6
3
 
4
+ all_import_present = 0
5
+ for pkg in ("sqlalchemy", "asyncpg"):
6
+ if is_import_installed(pkg):
7
+ all_import_present += 1
8
+ if all_import_present == 2:
7
9
  return True
8
- except Exception:
9
- return ImportError(
10
- "This adapter requires postgres option to be installed. "
11
- 'Please install them using `uv pip install "lionagi[postgres]"`.'
12
- )
10
+ return ImportError(
11
+ "This adapter requires postgres option to be installed. "
12
+ 'Please install them using `uv pip install "lionagi[postgres]"`.'
13
+ )
@@ -13,86 +13,90 @@ from __future__ import annotations
13
13
 
14
14
  from typing import ClassVar, TypeVar
15
15
 
16
- import sqlalchemy as sa
17
- from pydapter.extras.async_postgres_ import AsyncPostgresAdapter
18
- from sqlalchemy.ext.asyncio import create_async_engine
19
-
20
- from ._utils import check_async_postgres_available
21
-
22
- _ASYNC_POSTGRES_AVAILABLE = check_async_postgres_available()
23
-
24
- if isinstance(_ASYNC_POSTGRES_AVAILABLE, ImportError):
25
- raise _ASYNC_POSTGRES_AVAILABLE
16
+ from pydapter import AsyncAdapter
26
17
 
27
18
  T = TypeVar("T")
28
19
 
29
20
 
30
- class LionAGIAsyncPostgresAdapter(AsyncPostgresAdapter[T]):
31
- """
32
- Streamlined async adapter for lionagi Nodes.
33
-
34
- Features:
35
- - Auto-creates tables with lionagi schema
36
- - Inherits all pydapter v1.0.4+ improvements
37
- - No workarounds needed for SQLite or raw SQL
38
- """
39
-
40
- obj_key: ClassVar[str] = "lionagi_async_pg"
41
-
42
- @classmethod
43
- async def to_obj(
44
- cls,
45
- subj,
46
- /,
47
- *,
48
- many: bool = True,
49
- adapt_meth: str = None,
50
- **kw,
51
- ):
52
- """Write lionagi Node(s) to database with auto-table creation."""
53
- # Auto-create table if needed
54
- if table := kw.get("table"):
55
- if engine_url := (kw.get("dsn") or kw.get("engine_url")):
56
- await cls._ensure_table(engine_url, table)
57
- elif engine := kw.get("engine"):
58
- await cls._ensure_table(engine, table)
59
-
60
- return await super().to_obj(
61
- subj, many=many, adapt_meth=adapt_meth, **kw
62
- )
63
-
64
- @classmethod
65
- async def _ensure_table(cls, engine_or_url, table_name: str):
66
- """Create table with lionagi schema if it doesn't exist."""
67
- should_dispose = False
68
- if isinstance(engine_or_url, str):
69
- engine = create_async_engine(engine_or_url, future=True)
70
- should_dispose = True
71
- else:
72
- engine = engine_or_url
73
-
74
- try:
75
- async with engine.begin() as conn:
76
- # Determine JSON type based on database
77
- engine_url = str(engine.url)
78
- json_type = (
79
- sa.dialects.postgresql.JSONB
80
- if "postgresql" in engine_url
81
- else sa.JSON
82
- )
83
-
84
- # Create table with lionagi schema
85
- await conn.run_sync(
86
- lambda sync_conn: sa.Table(
87
- table_name,
88
- sa.MetaData(),
89
- sa.Column("id", sa.String, primary_key=True),
90
- sa.Column("content", json_type),
91
- sa.Column("node_metadata", json_type),
92
- sa.Column("created_at", sa.Float),
93
- sa.Column("embedding", json_type, nullable=True),
94
- ).create(sync_conn, checkfirst=True)
95
- )
96
- finally:
97
- if should_dispose:
98
- await engine.dispose()
21
+ def create_lionagi_async_postgres_adapter() -> type[AsyncAdapter]:
22
+ from pydapter.extras.async_postgres_ import AsyncPostgresAdapter
23
+
24
+ class LionAGIAsyncPostgresAdapter(AsyncPostgresAdapter[T]):
25
+ """
26
+ Streamlined async adapter for lionagi Nodes.
27
+
28
+ Features:
29
+ - Auto-creates tables with lionagi schema
30
+ - Inherits all pydapter v1.0.4+ improvements
31
+ - No workarounds needed for SQLite or raw SQL
32
+ """
33
+
34
+ obj_key: ClassVar[str] = "lionagi_async_pg"
35
+
36
+ @classmethod
37
+ async def to_obj(
38
+ cls,
39
+ subj,
40
+ /,
41
+ *,
42
+ many: bool = True,
43
+ adapt_meth: str = None,
44
+ **kw,
45
+ ):
46
+ """Write lionagi Node(s) to database with auto-table creation."""
47
+ # Auto-create table if needed
48
+ if table := kw.get("table"):
49
+ if engine_url := (kw.get("dsn") or kw.get("engine_url")):
50
+ await cls._ensure_table(engine_url, table)
51
+ elif engine := kw.get("engine"):
52
+ await cls._ensure_table(engine, table)
53
+
54
+ return await super().to_obj(
55
+ subj, many=many, adapt_meth=adapt_meth, **kw
56
+ )
57
+
58
+ @classmethod
59
+ async def _ensure_table(cls, engine_or_url, table_name: str):
60
+ """Create table with lionagi schema if it doesn't exist."""
61
+ import sqlalchemy as sa
62
+ from sqlalchemy.ext.asyncio import create_async_engine
63
+
64
+ should_dispose = False
65
+ if isinstance(engine_or_url, str):
66
+ engine = create_async_engine(engine_or_url, future=True)
67
+ should_dispose = True
68
+ else:
69
+ engine = engine_or_url
70
+
71
+ try:
72
+ async with engine.begin() as conn:
73
+ # Determine JSON type based on database
74
+ engine_url = str(engine.url)
75
+ json_type = (
76
+ sa.dialects.postgresql.JSONB
77
+ if "postgresql" in engine_url
78
+ else sa.JSON
79
+ )
80
+
81
+ # Create table with lionagi schema
82
+ await conn.run_sync(
83
+ lambda sync_conn: sa.Table(
84
+ table_name,
85
+ sa.MetaData(),
86
+ sa.Column("id", sa.String, primary_key=True),
87
+ sa.Column("content", json_type),
88
+ sa.Column("node_metadata", json_type),
89
+ sa.Column("created_at", sa.Float),
90
+ sa.Column("embedding", json_type, nullable=True),
91
+ ).create(sync_conn, checkfirst=True)
92
+ )
93
+ finally:
94
+ if should_dispose:
95
+ await engine.dispose()
96
+
97
+ return LionAGIAsyncPostgresAdapter
98
+
99
+
100
+ LionAGIAsyncPostgresAdapter = create_lionagi_async_postgres_adapter()
101
+
102
+ __all__ = ("LionAGIAsyncPostgresAdapter",)
lionagi/ln/__init__.py CHANGED
@@ -1,8 +1,6 @@
1
1
  from ._async_call import alcall, bcall
2
2
  from ._hash import hash_dict
3
3
  from ._json_dump import (
4
- DEFAULT_SERIALIZER,
5
- DEFAULT_SERIALIZER_OPTION,
6
4
  get_orjson_default,
7
5
  json_dumpb,
8
6
  json_dumps,
@@ -48,8 +46,6 @@ __all__ = (
48
46
  "alcall",
49
47
  "bcall",
50
48
  "hash_dict",
51
- "DEFAULT_SERIALIZER",
52
- "DEFAULT_SERIALIZER_OPTION",
53
49
  "get_orjson_default",
54
50
  "json_dumps",
55
51
  "make_options",
lionagi/ln/_json_dump.py CHANGED
@@ -15,8 +15,6 @@ import orjson
15
15
 
16
16
  __all__ = [
17
17
  "get_orjson_default",
18
- "DEFAULT_SERIALIZER",
19
- "DEFAULT_SERIALIZER_OPTION",
20
18
  "make_options",
21
19
  "json_dumpb",
22
20
  "json_dumps",
@@ -193,10 +191,6 @@ def _cached_default(
193
191
 
194
192
  # --------- defaults & options -------------------------------------------------
195
193
 
196
- # Compact, no newline, no sorting: neutral default for most use-cases.
197
- DEFAULT_SERIALIZER_OPTION = 0
198
- DEFAULT_SERIALIZER = get_orjson_default()
199
-
200
194
 
201
195
  def make_options(
202
196
  *,
@@ -2,11 +2,9 @@
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
- from .brainstorm.brainstorm import BrainstormOperation, brainstorm
6
5
  from .builder import ExpansionStrategy, OperationGraphBuilder
7
6
  from .flow import flow
8
7
  from .node import BranchOperations, Operation
9
- from .plan.plan import PlanOperation, plan
10
8
 
11
9
  Builder = OperationGraphBuilder
12
10
 
@@ -17,9 +15,5 @@ __all__ = (
17
15
  "flow",
18
16
  "BranchOperations",
19
17
  "Operation",
20
- "plan",
21
- "PlanOperation",
22
- "brainstorm",
23
- "BrainstormOperation",
24
18
  "Builder",
25
19
  )
@@ -0,0 +1,285 @@
1
+ def visualize_graph(
2
+ builder,
3
+ title: str = "Operation Graph",
4
+ figsize=(14, 10),
5
+ ):
6
+ """Visualization with improved layout for complex graphs."""
7
+ from lionagi.utils import is_import_installed
8
+
9
+ if not is_import_installed("matplotlib"):
10
+ raise ImportError(
11
+ "matplotlib is required for visualization. "
12
+ "Please install it using `pip install matplotlib`."
13
+ )
14
+ if not is_import_installed("networkx"):
15
+ raise ImportError(
16
+ "networkx is required for visualization. "
17
+ "Please install it using `pip install networkx`."
18
+ )
19
+
20
+ import matplotlib.pyplot as plt
21
+ import networkx as nx
22
+ import numpy as np
23
+
24
+ graph = builder.get_graph()
25
+
26
+ # Convert to networkx
27
+ G = nx.DiGraph()
28
+
29
+ # Track node positions for hierarchical layout
30
+ node_levels = {}
31
+ node_labels = {}
32
+ node_colors = []
33
+ node_sizes = []
34
+
35
+ # First pass: add nodes and determine levels
36
+ for node in graph.internal_nodes.values():
37
+ node_id = str(node.id)[:8]
38
+ G.add_node(node_id)
39
+
40
+ # Determine level based on dependencies
41
+ in_edges = [
42
+ e
43
+ for e in graph.internal_edges.values()
44
+ if str(e.tail)[:8] == node_id
45
+ ]
46
+ if not in_edges:
47
+ level = 0 # Root nodes
48
+ else:
49
+ # Get max level of predecessors + 1
50
+ pred_levels = []
51
+ for edge in in_edges:
52
+ pred_id = str(edge.head)[:8]
53
+ if pred_id in node_levels:
54
+ pred_levels.append(node_levels[pred_id])
55
+ level = max(pred_levels, default=0) + 1
56
+
57
+ node_levels[node_id] = level
58
+
59
+ # Create label
60
+ ref_id = node.metadata.get("reference_id", "")
61
+ if ref_id:
62
+ label = f"{node.operation}\n[{ref_id}]"
63
+ else:
64
+ label = f"{node.operation}\n{node_id}"
65
+ node_labels[node_id] = label
66
+
67
+ # Color and size based on status and type
68
+ if node.id in builder._executed:
69
+ node_colors.append("#90EE90") # Light green
70
+ node_sizes.append(4000)
71
+ elif node.metadata.get("expansion_source"):
72
+ node_colors.append("#87CEEB") # Sky blue
73
+ node_sizes.append(3500)
74
+ elif node.metadata.get("aggregation"):
75
+ node_colors.append("#FFD700") # Gold
76
+ node_sizes.append(4500)
77
+ elif node.metadata.get("is_condition_check"):
78
+ node_colors.append("#DDA0DD") # Plum
79
+ node_sizes.append(3500)
80
+ else:
81
+ node_colors.append("#E0E0E0") # Light gray
82
+ node_sizes.append(3000)
83
+
84
+ # Add edges
85
+ edge_colors = []
86
+ edge_styles = []
87
+ edge_widths = []
88
+ edge_labels = {}
89
+
90
+ for edge in graph.internal_edges.values():
91
+ head_id = str(edge.head)[:8]
92
+ tail_id = str(edge.tail)[:8]
93
+ G.add_edge(head_id, tail_id)
94
+
95
+ # Style edges based on type
96
+ edge_label = edge.label[0] if edge.label else ""
97
+ edge_labels[(head_id, tail_id)] = edge_label
98
+
99
+ if "expansion" in edge_label:
100
+ edge_colors.append("#4169E1") # Royal blue
101
+ edge_styles.append("dashed")
102
+ edge_widths.append(2)
103
+ elif "aggregate" in edge_label:
104
+ edge_colors.append("#FF6347") # Tomato
105
+ edge_styles.append("dotted")
106
+ edge_widths.append(2.5)
107
+ else:
108
+ edge_colors.append("#808080") # Gray
109
+ edge_styles.append("solid")
110
+ edge_widths.append(1.5)
111
+
112
+ # Create improved hierarchical layout
113
+ pos = {}
114
+ nodes_by_level = {}
115
+
116
+ for node_id, level in node_levels.items():
117
+ if level not in nodes_by_level:
118
+ nodes_by_level[level] = []
119
+ nodes_by_level[level].append(node_id)
120
+
121
+ # Position nodes with better spacing algorithm
122
+ y_spacing = 2.5
123
+ max_width = 16 # Maximum horizontal spread
124
+
125
+ for level, nodes in nodes_by_level.items():
126
+ num_nodes = len(nodes)
127
+
128
+ if num_nodes <= 6:
129
+ # Normal spacing for small levels
130
+ x_spacing = 2.5
131
+ x_offset = -(num_nodes - 1) * x_spacing / 2
132
+ for i, node_id in enumerate(nodes):
133
+ pos[node_id] = (x_offset + i * x_spacing, -level * y_spacing)
134
+ else:
135
+ # Multi-row layout for large levels
136
+ nodes_per_row = min(6, int(np.ceil(np.sqrt(num_nodes * 1.5))))
137
+ rows = int(np.ceil(num_nodes / nodes_per_row))
138
+
139
+ for i, node_id in enumerate(nodes):
140
+ row = i // nodes_per_row
141
+ col = i % nodes_per_row
142
+
143
+ # Calculate row width
144
+ nodes_in_row = min(
145
+ nodes_per_row, num_nodes - row * nodes_per_row
146
+ )
147
+ x_spacing = 2.5
148
+ x_offset = -(nodes_in_row - 1) * x_spacing / 2
149
+
150
+ # Add slight y offset for different rows
151
+ y_offset = row * 0.8
152
+
153
+ pos[node_id] = (
154
+ x_offset + col * x_spacing,
155
+ -level * y_spacing - y_offset,
156
+ )
157
+
158
+ # Create figure
159
+ plt.figure(figsize=figsize)
160
+
161
+ # Draw nodes
162
+ nx.draw_networkx_nodes(
163
+ G,
164
+ pos,
165
+ node_color=node_colors,
166
+ node_size=node_sizes,
167
+ alpha=0.9,
168
+ linewidths=2,
169
+ edgecolors="black",
170
+ )
171
+
172
+ # Draw edges with different styles - use curved edges for better visibility
173
+ for i, (u, v) in enumerate(G.edges()):
174
+ # Calculate curve based on node positions
175
+ u_pos = pos[u]
176
+ v_pos = pos[v]
177
+
178
+ # Determine connection style based on relative positions
179
+ if abs(u_pos[0] - v_pos[0]) > 5: # Far apart horizontally
180
+ connectionstyle = "arc3,rad=0.2"
181
+ else:
182
+ connectionstyle = "arc3,rad=0.1"
183
+
184
+ nx.draw_networkx_edges(
185
+ G,
186
+ pos,
187
+ [(u, v)],
188
+ edge_color=[edge_colors[i]],
189
+ style=edge_styles[i],
190
+ width=edge_widths[i],
191
+ alpha=0.7,
192
+ arrows=True,
193
+ arrowsize=20,
194
+ arrowstyle="-|>",
195
+ connectionstyle=connectionstyle,
196
+ )
197
+
198
+ # Draw labels
199
+ nx.draw_networkx_labels(
200
+ G,
201
+ pos,
202
+ node_labels,
203
+ font_size=9,
204
+ font_weight="bold",
205
+ font_family="monospace",
206
+ )
207
+
208
+ # Draw edge labels (only for smaller graphs)
209
+ if len(G.edges()) < 20:
210
+ nx.draw_networkx_edge_labels(
211
+ G,
212
+ pos,
213
+ edge_labels,
214
+ font_size=7,
215
+ font_color="darkblue",
216
+ bbox=dict(
217
+ boxstyle="round,pad=0.3",
218
+ facecolor="white",
219
+ edgecolor="none",
220
+ alpha=0.7,
221
+ ),
222
+ )
223
+
224
+ plt.title(title, fontsize=18, fontweight="bold", pad=20)
225
+ plt.axis("off")
226
+
227
+ # Enhanced legend
228
+ from matplotlib.lines import Line2D
229
+ from matplotlib.patches import Patch, Rectangle
230
+
231
+ legend_elements = [
232
+ Patch(facecolor="#90EE90", edgecolor="black", label="Executed"),
233
+ Patch(facecolor="#87CEEB", edgecolor="black", label="Expanded"),
234
+ Patch(facecolor="#FFD700", edgecolor="black", label="Aggregation"),
235
+ Patch(facecolor="#DDA0DD", edgecolor="black", label="Condition"),
236
+ Patch(facecolor="#E0E0E0", edgecolor="black", label="Pending"),
237
+ Line2D([0], [0], color="#808080", linewidth=2, label="Sequential"),
238
+ Line2D(
239
+ [0],
240
+ [0],
241
+ color="#4169E1",
242
+ linewidth=2,
243
+ linestyle="dashed",
244
+ label="Expansion",
245
+ ),
246
+ Line2D(
247
+ [0],
248
+ [0],
249
+ color="#FF6347",
250
+ linewidth=2,
251
+ linestyle="dotted",
252
+ label="Aggregate",
253
+ ),
254
+ ]
255
+
256
+ plt.legend(
257
+ handles=legend_elements,
258
+ loc="upper left",
259
+ bbox_to_anchor=(0, 1),
260
+ frameon=True,
261
+ fancybox=True,
262
+ shadow=True,
263
+ ncol=2,
264
+ )
265
+
266
+ # Add statistics box
267
+ stats_text = f"Nodes: {len(G.nodes())}\nEdges: {len(G.edges())}\nExecuted: {len(builder._executed)}"
268
+ if nodes_by_level:
269
+ max_level = max(nodes_by_level.keys())
270
+ stats_text += f"\nLevels: {max_level + 1}"
271
+
272
+ plt.text(
273
+ 0.98,
274
+ 0.02,
275
+ stats_text,
276
+ transform=plt.gca().transAxes,
277
+ bbox=dict(boxstyle="round,pad=0.5", facecolor="lightgray", alpha=0.8),
278
+ verticalalignment="bottom",
279
+ horizontalalignment="right",
280
+ fontsize=10,
281
+ fontfamily="monospace",
282
+ )
283
+
284
+ plt.tight_layout()
285
+ plt.show()