lionagi 0.14.5__py3-none-any.whl → 0.14.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -35,7 +35,6 @@ class Instruct(HashableModel):
35
35
  "reason",
36
36
  "actions",
37
37
  "action_strategy",
38
- "batch_size",
39
38
  "request_params",
40
39
  "response_params",
41
40
  ]
@@ -97,16 +96,10 @@ class Instruct(HashableModel):
97
96
  "None: Contextual execution."
98
97
  ),
99
98
  )
100
- action_strategy: Literal["batch", "sequential", "concurrent"] | None = (
101
- Field(
102
- None,
103
- description="Action strategy to use for executing actions. Default "
104
- "is 'concurrent'. Only provide for if actions are enabled.",
105
- )
106
- )
107
- batch_size: int | None = Field(
99
+ action_strategy: Literal["sequential", "concurrent"] | None = Field(
108
100
  None,
109
- description="Batch size for executing actions. Only provide for 'batch' strategy.",
101
+ description="Action strategy to use for executing actions. Default "
102
+ "is 'concurrent'. Only provide for if actions are enabled.",
110
103
  )
111
104
 
112
105
  @field_validator("instruction", "guidance", "context", mode="before")
@@ -123,13 +116,6 @@ class Instruct(HashableModel):
123
116
  return "concurrent"
124
117
  return v
125
118
 
126
- @field_validator("batch_size", mode="before")
127
- def _validate_batch_size(cls, v):
128
- try:
129
- return to_num(v, num_type=int)
130
- except Exception:
131
- return None
132
-
133
119
 
134
120
  class InstructResponse(HashableModel):
135
121
  instruct: Instruct
@@ -4,7 +4,7 @@ import time
4
4
  from collections.abc import Iterator
5
5
  from contextlib import contextmanager
6
6
  from types import TracebackType
7
- from typing import Optional, TypeVar
7
+ from typing import TypeVar
8
8
 
9
9
  import anyio
10
10
 
@@ -0,0 +1,3 @@
1
+ from .manager import HashUtils
2
+
3
+ __all__ = ("HashUtils",)
@@ -0,0 +1,108 @@
1
+ import copy
2
+
3
+ from pydantic import BaseModel as PydanticBaseModel
4
+
5
+ __all__ = ("hash_dict",)
6
+
7
+ # --- Canonical Representation Generator ---
8
+ _PRIMITIVE_TYPES = (str, int, float, bool, type(None))
9
+ _TYPE_MARKER_DICT = 0
10
+ _TYPE_MARKER_LIST = 1
11
+ _TYPE_MARKER_TUPLE = 2
12
+ _TYPE_MARKER_SET = 3
13
+ _TYPE_MARKER_FROZENSET = 4
14
+ _TYPE_MARKER_PYDANTIC = 5 # Distinguishes dumped Pydantic models
15
+
16
+
17
+ def _generate_hashable_representation(item: any) -> any:
18
+ """
19
+ Recursively converts a Python object into a stable, hashable representation.
20
+ This ensures that logically identical but structurally different inputs
21
+ (e.g., dicts with different key orders) produce the same representation.
22
+ """
23
+ if isinstance(item, _PRIMITIVE_TYPES):
24
+ return item
25
+
26
+ if isinstance(item, PydanticBaseModel):
27
+ # Process the Pydantic model by first dumping it to a dict, then processing that dict.
28
+ # The type marker distinguishes this from a regular dictionary.
29
+ return (
30
+ _TYPE_MARKER_PYDANTIC,
31
+ _generate_hashable_representation(item.model_dump()),
32
+ )
33
+
34
+ if isinstance(item, dict):
35
+ # Sort dictionary items by key (stringified) for order-insensitivity.
36
+ return (
37
+ _TYPE_MARKER_DICT,
38
+ tuple(
39
+ (str(k), _generate_hashable_representation(v))
40
+ for k, v in sorted(item.items(), key=lambda x: str(x[0]))
41
+ ),
42
+ )
43
+
44
+ if isinstance(item, list):
45
+ return (
46
+ _TYPE_MARKER_LIST,
47
+ tuple(_generate_hashable_representation(elem) for elem in item),
48
+ )
49
+
50
+ if isinstance(item, tuple):
51
+ return (
52
+ _TYPE_MARKER_TUPLE,
53
+ tuple(_generate_hashable_representation(elem) for elem in item),
54
+ )
55
+
56
+ # frozenset must be checked before set
57
+ if isinstance(item, frozenset):
58
+ try: # Attempt direct sort for comparable elements
59
+ sorted_elements = sorted(list(item))
60
+ except TypeError: # Fallback for unorderable mixed types
61
+ sorted_elements = sorted(
62
+ list(item), key=lambda x: (str(type(x)), str(x))
63
+ )
64
+ return (
65
+ _TYPE_MARKER_FROZENSET,
66
+ tuple(
67
+ _generate_hashable_representation(elem)
68
+ for elem in sorted_elements
69
+ ),
70
+ )
71
+
72
+ if isinstance(item, set):
73
+ try:
74
+ sorted_elements = sorted(list(item))
75
+ except TypeError:
76
+ sorted_elements = sorted(
77
+ list(item), key=lambda x: (str(type(x)), str(x))
78
+ )
79
+ return (
80
+ _TYPE_MARKER_SET,
81
+ tuple(
82
+ _generate_hashable_representation(elem)
83
+ for elem in sorted_elements
84
+ ),
85
+ )
86
+
87
+ # Fallback for other types (e.g., custom objects not derived from the above)
88
+ try:
89
+ return str(item)
90
+ except Exception: # If str() fails for some reason
91
+ return repr(item)
92
+
93
+
94
+ def hash_dict(data: any, strict: bool = False) -> int:
95
+ data_to_process = data
96
+ if strict:
97
+ data_to_process = copy.deepcopy(data)
98
+
99
+ hashable_repr = _generate_hashable_representation(data_to_process)
100
+
101
+ try:
102
+ return hash(hashable_repr)
103
+ except TypeError as e:
104
+ raise TypeError(
105
+ f"The generated representation for the input data was not hashable. "
106
+ f"Input type: {type(data).__name__}, Representation type: {type(hashable_repr).__name__}. "
107
+ f"Original error: {e}"
108
+ )
@@ -0,0 +1,26 @@
1
+ class HashUtils:
2
+ @staticmethod
3
+ def hash_dict(data: any, strict: bool = False) -> int:
4
+ """
5
+ Computes a deterministic hash for various Python data structures including
6
+ dictionaries, Pydantic BaseModels, lists, tuples, sets, frozensets, and primitives.
7
+
8
+ The hash is deterministic within the same Python process run (respecting
9
+ PYTHONHASHSEED for built-in hash behavior on strings, bytes, etc.).
10
+ It's suitable for tasks like finding unique objects within a collection
11
+ during a single program execution.
12
+
13
+ Args:
14
+ data: The Python object to hash.
15
+ strict: if True, will make a deep copy of the input data to ensure immutability.
16
+
17
+ Returns:
18
+ An integer hash value.
19
+
20
+ Raises:
21
+ TypeError: If the generated internal representation of the data is not hashable,
22
+ though this is unlikely with the current _generate_hashable_representation.
23
+ """
24
+ from .hash_dict import hash_dict as _hash_dict
25
+
26
+ return _hash_dict(data, strict=strict)
@@ -1,7 +1,8 @@
1
1
  from pydantic import BaseModel
2
2
  from typing_extensions import Self
3
3
 
4
- from lionagi.utils import UNDEFINED, hash_dict
4
+ from lionagi.libs.hash.hash_dict import hash_dict
5
+ from lionagi.utils import UNDEFINED
5
6
 
6
7
 
7
8
  class HashableModel(BaseModel):
@@ -14,6 +14,7 @@ from typing import Any
14
14
  from lionagi.operations.node import BranchOperations, Operation
15
15
  from lionagi.protocols.graph.edge import Edge
16
16
  from lionagi.protocols.graph.graph import Graph
17
+ from lionagi.protocols.types import ID
17
18
 
18
19
  __all__ = (
19
20
  "OperationGraphBuilder",
@@ -76,6 +77,7 @@ class OperationGraphBuilder:
76
77
  node_id: str | None = None,
77
78
  depends_on: list[str] | None = None,
78
79
  inherit_context: bool = False,
80
+ branch=None,
79
81
  **parameters,
80
82
  ) -> str:
81
83
  """
@@ -108,6 +110,9 @@ class OperationGraphBuilder:
108
110
  # Add as metadata for easy lookup
109
111
  node.metadata["reference_id"] = node_id
110
112
 
113
+ if branch:
114
+ node.branch_id = ID.get_id(branch)
115
+
111
116
  # Handle dependencies
112
117
  if depends_on:
113
118
  for dep_id in depends_on:
@@ -227,6 +232,7 @@ class OperationGraphBuilder:
227
232
  source_node_ids: list[str] | None = None,
228
233
  inherit_context: bool = False,
229
234
  inherit_from_source: int = 0,
235
+ branch=None,
230
236
  **parameters,
231
237
  ) -> str:
232
238
  """
@@ -264,6 +270,9 @@ class OperationGraphBuilder:
264
270
  if node_id:
265
271
  node.metadata["reference_id"] = node_id
266
272
 
273
+ if branch:
274
+ node.branch_id = ID.get_id(branch)
275
+
267
276
  # Store context inheritance for aggregations
268
277
  if inherit_context and sources:
269
278
  node.metadata["inherit_context"] = True
@@ -60,15 +60,25 @@ class DependencyAwareExecutor:
60
60
  self.operation_branches = {} # operation_id -> Branch
61
61
 
62
62
  # Initialize completion events for all operations
63
+ # and check for already completed operations
63
64
  for node in graph.internal_nodes.values():
64
65
  if isinstance(node, Operation):
65
66
  self.completion_events[node.id] = ConcurrencyEvent()
66
67
 
68
+ # If operation is already completed, mark it and store results
69
+ if node.execution.status == EventStatus.COMPLETED:
70
+ self.completion_events[node.id].set()
71
+ if hasattr(node, "response"):
72
+ self.results[node.id] = node.response
73
+
67
74
  async def execute(self) -> dict[str, Any]:
68
75
  """Execute the operation graph."""
69
76
  if not self.graph.is_acyclic():
70
77
  raise ValueError("Graph must be acyclic for flow execution")
71
78
 
79
+ # Pre-allocate ALL branches upfront to avoid any locking during execution
80
+ await self._preallocate_all_branches()
81
+
72
82
  # Create capacity limiter for concurrency control
73
83
  # None means no limit, use the configured unlimited value
74
84
  capacity = (
@@ -91,10 +101,97 @@ class DependencyAwareExecutor:
91
101
  "final_context": self.context,
92
102
  }
93
103
 
104
+ async def _preallocate_all_branches(self):
105
+ """Pre-allocate ALL branches including for context inheritance to eliminate runtime locking."""
106
+ operations_needing_branches = []
107
+
108
+ # First pass: identify all operations that need branches
109
+ for node in self.graph.internal_nodes.values():
110
+ if not isinstance(node, Operation):
111
+ continue
112
+
113
+ # Skip if operation already has a branch_id
114
+ if node.branch_id:
115
+ try:
116
+ # Ensure the branch exists in our local map
117
+ branch = self.session.branches[node.branch_id]
118
+ self.operation_branches[node.id] = branch
119
+ except:
120
+ pass
121
+ continue
122
+
123
+ # Check if operation needs a new branch
124
+ predecessors = self.graph.get_predecessors(node)
125
+ if predecessors or node.metadata.get("inherit_context"):
126
+ operations_needing_branches.append(node)
127
+
128
+ if not operations_needing_branches:
129
+ return
130
+
131
+ # Create all branches in a single lock acquisition
132
+ async with self.session.branches.async_lock:
133
+ # For context inheritance, we need to create placeholder branches
134
+ # that will be updated once dependencies complete
135
+ for operation in operations_needing_branches:
136
+ # Create a fresh branch for now
137
+ branch_clone = self.session.default_branch.clone(
138
+ sender=self.session.id
139
+ )
140
+
141
+ # Store in our operation branches map
142
+ self.operation_branches[operation.id] = branch_clone
143
+
144
+ # Add to session branches collection directly
145
+ # Check if this is a real branch (not a mock)
146
+ try:
147
+ from lionagi.protocols.types import IDType
148
+
149
+ # Try to validate the ID
150
+ if hasattr(branch_clone, "id"):
151
+ branch_id = branch_clone.id
152
+ # Only add to collections if it's a valid ID
153
+ if isinstance(branch_id, (str, IDType)) or (
154
+ hasattr(branch_id, "__str__")
155
+ and not hasattr(branch_id, "_mock_name")
156
+ ):
157
+ self.session.branches.collections[branch_id] = (
158
+ branch_clone
159
+ )
160
+ self.session.branches.progression.append(branch_id)
161
+ except:
162
+ # If validation fails, it's likely a mock - skip adding to collections
163
+ pass
164
+
165
+ # Mark branches that need context inheritance for later update
166
+ if operation.metadata.get("inherit_context"):
167
+ branch_clone.metadata = branch_clone.metadata or {}
168
+ branch_clone.metadata["pending_context_inheritance"] = True
169
+ branch_clone.metadata["inherit_from_operation"] = (
170
+ operation.metadata.get("primary_dependency")
171
+ )
172
+
173
+ if self.verbose:
174
+ print(f"Pre-allocated {len(operations_needing_branches)} branches")
175
+
94
176
  async def _execute_operation(
95
177
  self, operation: Operation, limiter: CapacityLimiter
96
178
  ):
97
179
  """Execute a single operation with dependency waiting."""
180
+ # Skip if operation is already completed
181
+ if operation.execution.status == EventStatus.COMPLETED:
182
+ if self.verbose:
183
+ print(
184
+ f"Skipping already completed operation: {str(operation.id)[:8]}"
185
+ )
186
+ # Ensure results are available for dependencies
187
+ if operation.id not in self.results and hasattr(
188
+ operation, "response"
189
+ ):
190
+ self.results[operation.id] = operation.response
191
+ # Signal completion for any waiting operations
192
+ self.completion_events[operation.id].set()
193
+ return
194
+
98
195
  try:
99
196
  # Wait for dependencies
100
197
  await self._wait_for_dependencies(operation)
@@ -102,7 +199,7 @@ class DependencyAwareExecutor:
102
199
  # Acquire capacity to limit concurrency
103
200
  async with limiter:
104
201
  # Prepare operation context
105
- await self._prepare_operation(operation)
202
+ self._prepare_operation(operation)
106
203
 
107
204
  # Execute the operation
108
205
  if self.verbose:
@@ -191,7 +288,7 @@ class DependencyAwareExecutor:
191
288
  f"Edge condition not satisfied for {str(operation.id)[:8]}"
192
289
  )
193
290
 
194
- async def _prepare_operation(self, operation: Operation):
291
+ def _prepare_operation(self, operation: Operation):
195
292
  """Prepare operation with context and branch assignment."""
196
293
  # Update operation context with predecessors
197
294
  predecessors = self.graph.get_predecessors(operation)
@@ -209,77 +306,83 @@ class DependencyAwareExecutor:
209
306
  if "context" not in operation.parameters:
210
307
  operation.parameters["context"] = pred_context
211
308
  else:
212
- operation.parameters["context"].update(pred_context)
309
+ # Handle case where context might be a string
310
+ existing_context = operation.parameters["context"]
311
+ if isinstance(existing_context, dict):
312
+ existing_context.update(pred_context)
313
+ else:
314
+ # If it's a string or other type, create a new dict
315
+ operation.parameters["context"] = {
316
+ "original_context": existing_context,
317
+ **pred_context,
318
+ }
213
319
 
214
320
  # Add execution context
215
321
  if self.context:
216
322
  if "context" not in operation.parameters:
217
323
  operation.parameters["context"] = self.context.copy()
218
324
  else:
219
- operation.parameters["context"].update(self.context)
325
+ # Handle case where context might be a string
326
+ existing_context = operation.parameters["context"]
327
+ if isinstance(existing_context, dict):
328
+ existing_context.update(self.context)
329
+ else:
330
+ # If it's a string or other type, create a new dict
331
+ operation.parameters["context"] = {
332
+ "original_context": existing_context,
333
+ **self.context,
334
+ }
220
335
 
221
336
  # Determine and assign branch
222
- branch = await self._resolve_branch_for_operation(operation)
337
+ branch = self._resolve_branch_for_operation(operation)
223
338
  self.operation_branches[operation.id] = branch
224
339
 
225
- async def _resolve_branch_for_operation(
226
- self, operation: Operation
227
- ) -> Branch:
228
- """Resolve which branch an operation should use based on inheritance rules."""
229
- # Check if operation has an explicit branch_id
230
- if operation.branch_id:
231
- try:
232
- return self.session.branches[operation.branch_id]
233
- except:
234
- pass
235
-
236
- # Get predecessors for context inheritance check
237
- predecessors = self.graph.get_predecessors(operation)
340
+ def _resolve_branch_for_operation(self, operation: Operation) -> Branch:
341
+ """Resolve which branch an operation should use - all branches are pre-allocated."""
342
+ # All branches should be pre-allocated
343
+ if operation.id in self.operation_branches:
344
+ branch = self.operation_branches[operation.id]
345
+
346
+ # Handle deferred context inheritance
347
+ if (
348
+ hasattr(branch, "metadata")
349
+ and branch.metadata
350
+ and branch.metadata.get("pending_context_inheritance")
351
+ ):
352
+
353
+ primary_dep_id = branch.metadata.get("inherit_from_operation")
354
+ if primary_dep_id and primary_dep_id in self.results:
355
+ # Find the primary dependency's branch
356
+ primary_branch = self.operation_branches.get(
357
+ primary_dep_id, self.session.default_branch
358
+ )
238
359
 
239
- # Handle context inheritance
240
- if operation.metadata.get("inherit_context"):
241
- primary_dep_id = operation.metadata.get("primary_dependency")
242
- if primary_dep_id and primary_dep_id in self.results:
243
- # Find the operation that was the primary dependency
244
- for node in self.graph.internal_nodes.values():
245
- if (
246
- isinstance(node, Operation)
247
- and node.id == primary_dep_id
248
- and node.branch_id
360
+ # Copy the messages from primary branch to this branch
361
+ # This avoids creating a new branch and thus avoids locking
362
+ # Access messages through the MessageManager
363
+ if hasattr(branch, "_message_manager") and hasattr(
364
+ primary_branch, "_message_manager"
249
365
  ):
250
- try:
251
- primary_branch = self.session.branches[
252
- node.branch_id
253
- ]
254
- # Use session.branches context manager for split
255
- async with self.session.branches:
256
- split_branch = self.session.split(
257
- primary_branch
258
- )
259
- if self.verbose:
260
- print(
261
- f"Operation {str(operation.id)[:8]} inheriting context from {str(primary_dep_id)[:8]}"
262
- )
263
- return split_branch
264
- except:
265
- pass
266
-
267
- # If operation has dependencies but no inheritance, create fresh branch
268
- elif predecessors:
269
- try:
270
- async with self.session.branches:
271
- fresh_branch = self.session.split(
272
- self.session.default_branch
273
- )
274
- if self.verbose:
275
- print(
276
- f"Operation {str(operation.id)[:8]} starting with fresh context"
277
- )
278
- return fresh_branch
279
- except:
280
- pass
366
+ branch._message_manager.pile.clear()
367
+ for msg in primary_branch._message_manager.pile:
368
+ branch._message_manager.pile.append(msg.clone())
369
+
370
+ # Clear the pending flag
371
+ branch.metadata["pending_context_inheritance"] = False
372
+
373
+ if self.verbose:
374
+ print(
375
+ f"Operation {str(operation.id)[:8]} inherited context from {str(primary_dep_id)[:8]}"
376
+ )
377
+
378
+ return branch
379
+
380
+ # Fallback to default branch (should not happen with proper pre-allocation)
381
+ if self.verbose:
382
+ print(
383
+ f"Warning: Operation {str(operation.id)[:8]} using default branch (not pre-allocated)"
384
+ )
281
385
 
282
- # Default to session's default branch or the provided branch
283
386
  if hasattr(self, "_default_branch") and self._default_branch:
284
387
  return self._default_branch
285
388
  return self.session.default_branch