lionagi 0.13.3__py3-none-any.whl → 0.13.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,436 @@
1
+ # Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
2
+ #
3
+ # SPDX-License-Identifier: Apache-2.0
4
+
5
+ import asyncio
6
+ from typing import Any
7
+
8
+ from lionagi.operations.node import Operation
9
+ from lionagi.protocols.types import Graph, Node
10
+ from lionagi.session.branch import Branch
11
+ from lionagi.session.session import Session
12
+ from lionagi.utils import to_dict
13
+
14
+
15
+ async def flow(
16
+ branch: Branch,
17
+ graph: Graph,
18
+ *,
19
+ context: dict[str, Any] | None = None,
20
+ parallel: bool = True,
21
+ max_concurrent: int = 5,
22
+ verbose: bool = False,
23
+ session: Session | None = None,
24
+ ) -> dict[str, Any]:
25
+ """
26
+ Execute a graph-based workflow using the branch's operations.
27
+
28
+ For simple graphs, executes directly on the branch.
29
+ For parallel execution, uses session for coordination.
30
+
31
+ Args:
32
+ branch: The branch to execute operations on
33
+ graph: The workflow graph containing Operation nodes
34
+ context: Initial context
35
+ parallel: Whether to execute independent operations in parallel
36
+ max_concurrent: Max concurrent operations
37
+ verbose: Enable verbose logging
38
+ session: Optional session for multi-branch parallel execution
39
+
40
+ Returns:
41
+ Execution results with completed operations and final context
42
+ """
43
+ # Validate graph
44
+ if not graph.is_acyclic():
45
+ raise ValueError("Graph must be acyclic for flow execution")
46
+
47
+ # Simple sequential execution on single branch
48
+ if not parallel or max_concurrent == 1:
49
+ return await _execute_sequential(branch, graph, context, verbose)
50
+
51
+ # Parallel execution using session
52
+ if session is None:
53
+ # Create temporary session for this flow
54
+ from lionagi.session.session import Session
55
+
56
+ session = Session()
57
+ session.branches.include(branch)
58
+ session.default_branch = branch
59
+
60
+ return await _execute_parallel(
61
+ session, graph, context, max_concurrent, verbose
62
+ )
63
+
64
+
65
+ async def _execute_sequential(
66
+ branch: Branch, graph: Graph, context: dict[str, Any] | None, verbose: bool
67
+ ) -> dict[str, Any]:
68
+ """Execute graph sequentially on a single branch."""
69
+ completed = []
70
+ results = {}
71
+ execution_context = context or {}
72
+
73
+ # Get execution order (topological sort)
74
+ execution_order = _topological_sort(graph)
75
+
76
+ for node_id in execution_order:
77
+ node = graph.internal_nodes[node_id]
78
+
79
+ if not isinstance(node, Operation):
80
+ continue
81
+
82
+ # Check dependencies using set for fast lookup
83
+ completed_set = set(completed)
84
+
85
+ # Check if dependencies and conditions are satisfied
86
+ if not await _dependencies_satisfied_async(
87
+ node, graph, completed_set, results, execution_context
88
+ ):
89
+ continue
90
+
91
+ predecessors = graph.get_predecessors(node)
92
+
93
+ # Update operation context with predecessors
94
+ if predecessors:
95
+ pred_context = {}
96
+ for pred in predecessors:
97
+ if pred.id in results:
98
+ result = results[pred.id]
99
+ # Use to_dict for proper serialization of complex types only
100
+ if result is not None and not isinstance(
101
+ result, (str, int, float, bool)
102
+ ):
103
+ result = to_dict(result, recursive=True)
104
+ pred_context[f"{pred.id}_result"] = result
105
+
106
+ if "context" not in node.parameters:
107
+ node.parameters["context"] = pred_context
108
+ else:
109
+ node.parameters["context"].update(pred_context)
110
+
111
+ # Add execution context
112
+ if execution_context:
113
+ if "context" not in node.parameters:
114
+ node.parameters["context"] = execution_context.copy()
115
+ else:
116
+ node.parameters["context"].update(execution_context)
117
+
118
+ # Execute operation
119
+ if verbose:
120
+ print(f"Executing operation: {node.id}")
121
+
122
+ await node.invoke(branch)
123
+
124
+ completed.append(node.id)
125
+ results[node.id] = node.response
126
+
127
+ # Update execution context
128
+ if isinstance(node.response, dict) and "context" in node.response:
129
+ execution_context.update(node.response["context"])
130
+
131
+ return {
132
+ "completed_operations": completed,
133
+ "operation_results": results,
134
+ "final_context": execution_context,
135
+ }
136
+
137
+
138
+ async def _execute_parallel(
139
+ session: Session,
140
+ graph: Graph,
141
+ context: dict[str, Any] | None,
142
+ max_concurrent: int,
143
+ verbose: bool,
144
+ ) -> dict[str, Any]:
145
+ """Execute graph in parallel using multiple branches."""
146
+ results = {}
147
+ execution_context = context or {}
148
+ completed = [] # Track completed operations
149
+
150
+ # Get operation nodes in topological order
151
+ operation_nodes = []
152
+ execution_order = _topological_sort(graph)
153
+ for node_id in execution_order:
154
+ node = graph.internal_nodes.get(node_id)
155
+ if isinstance(node, Operation):
156
+ operation_nodes.append(node)
157
+
158
+ # Use session branches context manager for safe parallel execution
159
+ async with session.branches:
160
+ # Create a pool of worker branches
161
+ worker_branches = []
162
+ for i in range(min(max_concurrent, len(operation_nodes))):
163
+ if i == 0:
164
+ worker_branches.append(session.default_branch)
165
+ else:
166
+ worker_branches.append(session.split(session.default_branch))
167
+
168
+ # Process nodes in dependency order
169
+ remaining_nodes = {node.id for node in operation_nodes}
170
+ executing_tasks = {} # node_id -> asyncio.Task
171
+ blocked_nodes = set() # Nodes that have been checked and found blocked
172
+
173
+ max_iterations = 1000 # Prevent infinite loops
174
+ iteration = 0
175
+
176
+ while (
177
+ remaining_nodes or executing_tasks
178
+ ) and iteration < max_iterations:
179
+ iteration += 1
180
+
181
+ # Check for completed tasks
182
+ completed_in_round = []
183
+ for node_id, task in list(executing_tasks.items()):
184
+ if task.done():
185
+ try:
186
+ result = await task
187
+ results[node_id] = result
188
+ completed.append(node_id)
189
+ completed_in_round.append(node_id)
190
+ if verbose:
191
+ print(f"Completed operation: {node_id}")
192
+ except Exception as e:
193
+ if verbose:
194
+ print(f"Operation {node_id} failed: {e}")
195
+ results[node_id] = {"error": str(e)}
196
+ completed.append(node_id)
197
+ completed_in_round.append(node_id)
198
+ finally:
199
+ del executing_tasks[node_id]
200
+
201
+ # Remove completed from remaining
202
+ remaining_nodes -= set(completed_in_round)
203
+
204
+ # If new completions, clear blocked nodes to re-check
205
+ if completed_in_round:
206
+ blocked_nodes.clear()
207
+
208
+ # Find nodes ready to execute (skip already blocked nodes)
209
+ ready_nodes = []
210
+ completed_set = set(completed)
211
+ newly_blocked = []
212
+
213
+ for node in operation_nodes:
214
+ if (
215
+ node.id in remaining_nodes
216
+ and node.id not in executing_tasks
217
+ and node.id not in blocked_nodes
218
+ and len(executing_tasks) < max_concurrent
219
+ ):
220
+ if await _dependencies_satisfied_async(
221
+ node, graph, completed_set, results, execution_context
222
+ ):
223
+ ready_nodes.append(node)
224
+ else:
225
+ newly_blocked.append(node.id)
226
+
227
+ # Update blocked nodes
228
+ blocked_nodes.update(newly_blocked)
229
+
230
+ # If no ready nodes but we have remaining and no executing tasks, we're stuck
231
+ if not ready_nodes and remaining_nodes and not executing_tasks:
232
+ if verbose:
233
+ print(
234
+ f"Deadlock detected: {len(remaining_nodes)} nodes cannot execute"
235
+ )
236
+ remaining_node_names = [
237
+ n.operation
238
+ for n in operation_nodes
239
+ if n.id in remaining_nodes
240
+ ]
241
+ print(f"Remaining operations: {remaining_node_names}")
242
+ # Mark remaining nodes as failed
243
+ for node in operation_nodes:
244
+ if node.id in remaining_nodes:
245
+ results[node.id] = {
246
+ "error": "Blocked by unsatisfied conditions"
247
+ }
248
+ completed.append(node.id)
249
+ break
250
+
251
+ # Start execution for ready nodes
252
+ started_count = 0
253
+ for node in ready_nodes:
254
+ if len(executing_tasks) >= max_concurrent:
255
+ break
256
+
257
+ # Get an available branch (round-robin)
258
+ branch_idx = len(executing_tasks) % len(worker_branches)
259
+ node_branch = worker_branches[branch_idx]
260
+
261
+ # Check if node specifies a branch
262
+ branch_id = node.parameters.get("branch_id")
263
+ if branch_id:
264
+ try:
265
+ node_branch = session.branches[branch_id]
266
+ except:
267
+ pass # Use the selected worker branch
268
+
269
+ # Create task for this node
270
+ task = asyncio.create_task(
271
+ _execute_node_async(
272
+ node,
273
+ node_branch,
274
+ graph,
275
+ results,
276
+ execution_context,
277
+ verbose,
278
+ )
279
+ )
280
+ executing_tasks[node.id] = task
281
+ started_count += 1
282
+
283
+ if verbose:
284
+ branch_name = (
285
+ getattr(node_branch, "name", None) or node_branch.id
286
+ )
287
+ print(
288
+ f"Started operation {node.id} on branch: {branch_name}"
289
+ )
290
+
291
+ # If we started new tasks or have executing tasks, wait for some to complete
292
+ if started_count > 0 or executing_tasks:
293
+ # Wait for at least one task to complete before next iteration
294
+ if executing_tasks:
295
+ done, pending = await asyncio.wait(
296
+ executing_tasks.values(),
297
+ return_when=asyncio.FIRST_COMPLETED,
298
+ )
299
+ else:
300
+ await asyncio.sleep(0.01)
301
+ elif not remaining_nodes:
302
+ # All done
303
+ break
304
+
305
+ if iteration >= max_iterations:
306
+ raise RuntimeError(
307
+ f"Flow execution exceeded maximum iterations ({max_iterations})"
308
+ )
309
+
310
+ return {
311
+ "completed_operations": completed,
312
+ "operation_results": results,
313
+ "final_context": execution_context,
314
+ }
315
+
316
+
317
+ async def _execute_node_async(
318
+ node: Operation,
319
+ branch: Branch,
320
+ graph: Graph,
321
+ results: dict[str, Any],
322
+ execution_context: dict[str, Any],
323
+ verbose: bool,
324
+ ) -> Any:
325
+ """Execute a single node asynchronously."""
326
+ # Update operation context with predecessors
327
+ predecessors = graph.get_predecessors(node)
328
+ if predecessors:
329
+ pred_context = {}
330
+ for pred in predecessors:
331
+ if pred.id in results:
332
+ result = results[pred.id]
333
+ # Use to_dict for proper serialization of complex types only
334
+ if result is not None and not isinstance(
335
+ result, (str, int, float, bool)
336
+ ):
337
+ result = to_dict(result, recursive=True)
338
+ pred_context[f"{pred.id}_result"] = result
339
+
340
+ if "context" not in node.parameters:
341
+ node.parameters["context"] = pred_context
342
+ else:
343
+ node.parameters["context"].update(pred_context)
344
+
345
+ # Add execution context
346
+ if execution_context:
347
+ if "context" not in node.parameters:
348
+ node.parameters["context"] = execution_context.copy()
349
+ else:
350
+ node.parameters["context"].update(execution_context)
351
+
352
+ # Execute the operation
353
+ await node.invoke(branch)
354
+ result = node.response
355
+
356
+ # Update execution context if needed
357
+ if isinstance(result, dict) and "context" in result:
358
+ execution_context.update(result["context"])
359
+
360
+ return result
361
+
362
+
363
+ def _topological_sort(graph: Graph) -> list[str]:
364
+ """Get topological ordering of graph nodes."""
365
+ visited = set()
366
+ stack = []
367
+
368
+ def visit(node_id: str):
369
+ if node_id in visited:
370
+ return
371
+ visited.add(node_id)
372
+
373
+ successors = graph.get_successors(graph.internal_nodes[node_id])
374
+ for successor in successors:
375
+ visit(successor.id)
376
+
377
+ stack.append(node_id)
378
+
379
+ for node in graph.internal_nodes:
380
+ if node.id not in visited:
381
+ visit(node.id)
382
+
383
+ return stack[::-1]
384
+
385
+
386
+ async def _dependencies_satisfied_async(
387
+ node: Node,
388
+ graph: Graph,
389
+ completed: set[str],
390
+ results: dict[str, Any],
391
+ execution_context: dict[str, Any] | None = None,
392
+ ) -> bool:
393
+ """Check if node dependencies are satisfied and edge conditions pass."""
394
+ # Get all incoming edges to this node
395
+ incoming_edges = []
396
+ for edge in graph.internal_edges:
397
+ if edge.tail == node.id:
398
+ incoming_edges.append(edge)
399
+
400
+ # If no incoming edges, node can execute
401
+ if not incoming_edges:
402
+ return True
403
+
404
+ # Check each incoming edge
405
+ at_least_one_satisfied = False
406
+ for edge in incoming_edges:
407
+ # Check if predecessor is completed
408
+ if edge.head not in completed:
409
+ # If edge has no condition, we need to wait for predecessor
410
+ if not edge.condition:
411
+ continue
412
+ # If edge has condition but predecessor not complete, skip
413
+ continue
414
+
415
+ # Predecessor is completed
416
+ if edge.condition:
417
+ # Evaluate condition
418
+ # Get the result - don't use to_dict if it's already a simple type
419
+ result_value = results.get(edge.head)
420
+ if result_value is not None and not isinstance(
421
+ result_value, (str, int, float, bool)
422
+ ):
423
+ result_value = to_dict(result_value, recursive=True)
424
+
425
+ ctx = {"result": result_value, "context": execution_context or {}}
426
+ try:
427
+ if await edge.condition.apply(ctx):
428
+ at_least_one_satisfied = True
429
+ except Exception as e:
430
+ # Condition evaluation failed
431
+ continue
432
+ else:
433
+ # No condition, edge is satisfied
434
+ at_least_one_satisfied = True
435
+
436
+ return at_least_one_satisfied
@@ -0,0 +1,107 @@
1
+ import asyncio
2
+ import logging
3
+ from typing import Any, Literal
4
+ from uuid import UUID
5
+
6
+ from pydantic import BaseModel, Field
7
+
8
+ from lionagi.protocols.types import ID, Event, EventStatus, IDType, Node
9
+ from lionagi.session.branch import Branch
10
+
11
+ BranchOperations = Literal[
12
+ "chat",
13
+ "operate",
14
+ "communicate",
15
+ "parse",
16
+ "ReAct",
17
+ "select",
18
+ "translate",
19
+ "interpret",
20
+ "act",
21
+ "ReActStream",
22
+ "instruct",
23
+ ]
24
+
25
+ logger = logging.getLogger("operation")
26
+
27
+
28
+ class Operation(Node, Event):
29
+ operation: BranchOperations
30
+ parameters: dict[str, Any] | BaseModel = Field(
31
+ default_factory=dict, description="Parameters for the operation"
32
+ )
33
+
34
+ @property
35
+ def branch_id(self) -> IDType | None:
36
+ if a := self.metadata.get("branch_id"):
37
+ return ID.get_id(a)
38
+
39
+ @branch_id.setter
40
+ def branch_id(self, value: str | UUID | IDType | None):
41
+ if value is None:
42
+ self.metadata.pop("branch_id", None)
43
+ else:
44
+ self.metadata["branch_id"] = str(value)
45
+
46
+ @property
47
+ def graph_id(self) -> str | None:
48
+ return self.metadata.get("graph_id")
49
+
50
+ @graph_id.setter
51
+ def graph_id(self, value: str | UUID | IDType | None):
52
+ if value is None:
53
+ self.metadata.pop("graph_id", None)
54
+ else:
55
+ self.metadata["graph_id"] = str(value)
56
+
57
+ @property
58
+ def request(self) -> dict:
59
+ # Convert parameters to dict if it's a BaseModel
60
+ params = self.parameters
61
+ if hasattr(params, "model_dump"):
62
+ params = params.model_dump()
63
+ elif hasattr(params, "dict"):
64
+ params = params.dict()
65
+
66
+ return params if isinstance(params, dict) else {}
67
+
68
+ @property
69
+ def response(self):
70
+ """Get the response from the execution."""
71
+ return self.execution.response if self.execution else None
72
+
73
+ async def invoke(self, branch: Branch):
74
+ meth = getattr(branch, self.operation, None)
75
+ if meth is None:
76
+ raise ValueError(f"Unsupported operation type: {self.operation}")
77
+
78
+ start = asyncio.get_event_loop().time()
79
+
80
+ try:
81
+ self.execution.status = EventStatus.PROCESSING
82
+ self.branch_id = branch.id
83
+ response = await self._invoke(meth)
84
+
85
+ self.execution.response = response
86
+ self.execution.status = EventStatus.COMPLETED
87
+
88
+ except asyncio.CancelledError:
89
+ self.execution.error = "Operation cancelled"
90
+ self.execution.status = EventStatus.FAILED
91
+ raise
92
+
93
+ except Exception as e:
94
+ self.execution.error = str(e)
95
+ self.execution.status = EventStatus.FAILED
96
+ logger.error(f"Operation failed: {e}")
97
+
98
+ finally:
99
+ self.execution.duration = asyncio.get_event_loop().time() - start
100
+
101
+ async def _invoke(self, meth):
102
+ if self.operation == "ReActStream":
103
+ res = []
104
+ async for i in meth(**self.request):
105
+ res.append(i)
106
+ return res
107
+ return await meth(**self.request)
@@ -1,3 +1,9 @@
1
1
  # Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+
5
+ from .edge import Edge, EdgeCondition
6
+ from .graph import Graph
7
+ from .node import Node
8
+
9
+ __all__ = ["Edge", "EdgeCondition", "Graph", "Node"]
@@ -196,10 +196,7 @@ def _pp_tool_use(tu: dict[str, Any], theme) -> None:
196
196
  def _pp_tool_result(tr: dict[str, Any], theme) -> None:
197
197
  body_preview = shorten(str(tr["content"]).replace("\n", " "), 130)
198
198
  status = "ERR" if tr.get("is_error") else "OK"
199
- body = (
200
- f"- 📄 Tool Result({tr['tool_use_id']}) - {status}\n\n"
201
- f"\tcontent: {body_preview}"
202
- )
199
+ body = f"- 📄 Tool Result({tr['tool_use_id']}) - {status}\n\n\tcontent: {body_preview}"
203
200
  print_readable(body, border=False, panel=False, theme=theme)
204
201
 
205
202
 
@@ -1,3 +1,8 @@
1
1
  # Copyright (c) 2023 - 2025, HaiyangLi <quantocean.li at gmail dot com>
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
+
5
+ from .branch import Branch
6
+ from .session import Session
7
+
8
+ __all__ = ["Branch", "Session"]
@@ -16,6 +16,8 @@ from lionagi.protocols.types import (
16
16
  ActionManager,
17
17
  Communicatable,
18
18
  Exchange,
19
+ Graph,
20
+ IDType,
19
21
  MailManager,
20
22
  MessageFlag,
21
23
  Node,
@@ -303,6 +305,51 @@ class Session(Node, Communicatable, Relational):
303
305
  except Exception as e:
304
306
  raise ValueError(f"Failed to collect mail. Error: {e}")
305
307
 
308
+ async def flow(
309
+ self,
310
+ graph: Graph,
311
+ *,
312
+ context: dict[str, Any] | None = None,
313
+ parallel: bool = True,
314
+ max_concurrent: int = 5,
315
+ verbose: bool = False,
316
+ default_branch: Branch | ID.Ref | None = None,
317
+ ) -> dict[str, Any]:
318
+ """
319
+ Execute a graph-based workflow using multi-branch orchestration.
320
+
321
+ This is a Session-native operation that coordinates execution across
322
+ multiple branches for parallel processing.
323
+
324
+ Args:
325
+ graph: The workflow graph containing Operation nodes
326
+ context: Initial context for the workflow
327
+ parallel: Whether to execute independent operations in parallel
328
+ max_concurrent: Maximum concurrent operations (branches)
329
+ verbose: Enable verbose logging
330
+ default_branch: Branch to use as default (defaults to self.default_branch)
331
+ **kwargs: Additional arguments passed to operations
332
+
333
+ Returns:
334
+ Execution results with completed operations and final context
335
+ """
336
+ from lionagi.operations.flow import flow
337
+
338
+ # Use specified branch or session's default
339
+ branch = default_branch or self.default_branch
340
+ if isinstance(branch, (str, IDType)):
341
+ branch = self.branches[branch]
342
+
343
+ return await flow(
344
+ branch=branch,
345
+ graph=graph,
346
+ context=context,
347
+ parallel=parallel,
348
+ max_concurrent=max_concurrent,
349
+ verbose=verbose,
350
+ session=self,
351
+ )
352
+
306
353
 
307
354
  __all__ = ["Session"]
308
355
  # File: autoos/session/session.py
lionagi/settings.py CHANGED
@@ -18,7 +18,7 @@ CACHED_CONFIG = {
18
18
 
19
19
  CHAT_IMODEL_CONFIG = {
20
20
  "provider": "openai",
21
- "model": "gpt-4o",
21
+ "model": "gpt-4.1-nano",
22
22
  "base_url": "https://api.openai.com/v1",
23
23
  "endpoint": "chat/completions",
24
24
  "api_key": "OPENAI_API_KEY",
@@ -31,7 +31,7 @@ CHAT_IMODEL_CONFIG = {
31
31
 
32
32
  PARSE_IMODEL_CONFIG = {
33
33
  "provider": "openai",
34
- "model": "gpt-4o-mini",
34
+ "model": "gpt-4.1-nano",
35
35
  "base_url": "https://api.openai.com/v1",
36
36
  "endpoint": "chat/completions",
37
37
  "api_key": "OPENAI_API_KEY",
lionagi/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.13.3"
1
+ __version__ = "0.13.5"