lionagi 0.14.5__py3-none-any.whl → 0.14.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lionagi/fields/instruct.py +3 -17
- lionagi/libs/concurrency/cancel.py +1 -1
- lionagi/operations/builder.py +9 -0
- lionagi/operations/flow.py +163 -60
- lionagi/protocols/generic/pile.py +34 -15
- lionagi/service/connections/providers/_claude_code/__init__.py +3 -0
- lionagi/service/connections/providers/_claude_code/models.py +235 -0
- lionagi/service/connections/providers/_claude_code/stream_cli.py +350 -0
- lionagi/service/connections/providers/claude_code_.py +13 -223
- lionagi/service/connections/providers/claude_code_cli.py +38 -343
- lionagi/session/branch.py +6 -46
- lionagi/session/session.py +26 -8
- lionagi/version.py +1 -1
- {lionagi-0.14.5.dist-info → lionagi-0.14.6.dist-info}/METADATA +1 -1
- {lionagi-0.14.5.dist-info → lionagi-0.14.6.dist-info}/RECORD +17 -14
- {lionagi-0.14.5.dist-info → lionagi-0.14.6.dist-info}/WHEEL +0 -0
- {lionagi-0.14.5.dist-info → lionagi-0.14.6.dist-info}/licenses/LICENSE +0 -0
lionagi/fields/instruct.py
CHANGED
@@ -35,7 +35,6 @@ class Instruct(HashableModel):
|
|
35
35
|
"reason",
|
36
36
|
"actions",
|
37
37
|
"action_strategy",
|
38
|
-
"batch_size",
|
39
38
|
"request_params",
|
40
39
|
"response_params",
|
41
40
|
]
|
@@ -97,16 +96,10 @@ class Instruct(HashableModel):
|
|
97
96
|
"None: Contextual execution."
|
98
97
|
),
|
99
98
|
)
|
100
|
-
action_strategy: Literal["
|
101
|
-
Field(
|
102
|
-
None,
|
103
|
-
description="Action strategy to use for executing actions. Default "
|
104
|
-
"is 'concurrent'. Only provide for if actions are enabled.",
|
105
|
-
)
|
106
|
-
)
|
107
|
-
batch_size: int | None = Field(
|
99
|
+
action_strategy: Literal["sequential", "concurrent"] | None = Field(
|
108
100
|
None,
|
109
|
-
description="
|
101
|
+
description="Action strategy to use for executing actions. Default "
|
102
|
+
"is 'concurrent'. Only provide for if actions are enabled.",
|
110
103
|
)
|
111
104
|
|
112
105
|
@field_validator("instruction", "guidance", "context", mode="before")
|
@@ -123,13 +116,6 @@ class Instruct(HashableModel):
|
|
123
116
|
return "concurrent"
|
124
117
|
return v
|
125
118
|
|
126
|
-
@field_validator("batch_size", mode="before")
|
127
|
-
def _validate_batch_size(cls, v):
|
128
|
-
try:
|
129
|
-
return to_num(v, num_type=int)
|
130
|
-
except Exception:
|
131
|
-
return None
|
132
|
-
|
133
119
|
|
134
120
|
class InstructResponse(HashableModel):
|
135
121
|
instruct: Instruct
|
lionagi/operations/builder.py
CHANGED
@@ -14,6 +14,7 @@ from typing import Any
|
|
14
14
|
from lionagi.operations.node import BranchOperations, Operation
|
15
15
|
from lionagi.protocols.graph.edge import Edge
|
16
16
|
from lionagi.protocols.graph.graph import Graph
|
17
|
+
from lionagi.protocols.types import ID
|
17
18
|
|
18
19
|
__all__ = (
|
19
20
|
"OperationGraphBuilder",
|
@@ -76,6 +77,7 @@ class OperationGraphBuilder:
|
|
76
77
|
node_id: str | None = None,
|
77
78
|
depends_on: list[str] | None = None,
|
78
79
|
inherit_context: bool = False,
|
80
|
+
branch=None,
|
79
81
|
**parameters,
|
80
82
|
) -> str:
|
81
83
|
"""
|
@@ -108,6 +110,9 @@ class OperationGraphBuilder:
|
|
108
110
|
# Add as metadata for easy lookup
|
109
111
|
node.metadata["reference_id"] = node_id
|
110
112
|
|
113
|
+
if branch:
|
114
|
+
node.branch_id = ID.get_id(branch)
|
115
|
+
|
111
116
|
# Handle dependencies
|
112
117
|
if depends_on:
|
113
118
|
for dep_id in depends_on:
|
@@ -227,6 +232,7 @@ class OperationGraphBuilder:
|
|
227
232
|
source_node_ids: list[str] | None = None,
|
228
233
|
inherit_context: bool = False,
|
229
234
|
inherit_from_source: int = 0,
|
235
|
+
branch=None,
|
230
236
|
**parameters,
|
231
237
|
) -> str:
|
232
238
|
"""
|
@@ -264,6 +270,9 @@ class OperationGraphBuilder:
|
|
264
270
|
if node_id:
|
265
271
|
node.metadata["reference_id"] = node_id
|
266
272
|
|
273
|
+
if branch:
|
274
|
+
node.branch_id = ID.get_id(branch)
|
275
|
+
|
267
276
|
# Store context inheritance for aggregations
|
268
277
|
if inherit_context and sources:
|
269
278
|
node.metadata["inherit_context"] = True
|
lionagi/operations/flow.py
CHANGED
@@ -60,15 +60,25 @@ class DependencyAwareExecutor:
|
|
60
60
|
self.operation_branches = {} # operation_id -> Branch
|
61
61
|
|
62
62
|
# Initialize completion events for all operations
|
63
|
+
# and check for already completed operations
|
63
64
|
for node in graph.internal_nodes.values():
|
64
65
|
if isinstance(node, Operation):
|
65
66
|
self.completion_events[node.id] = ConcurrencyEvent()
|
66
67
|
|
68
|
+
# If operation is already completed, mark it and store results
|
69
|
+
if node.execution.status == EventStatus.COMPLETED:
|
70
|
+
self.completion_events[node.id].set()
|
71
|
+
if hasattr(node, "response"):
|
72
|
+
self.results[node.id] = node.response
|
73
|
+
|
67
74
|
async def execute(self) -> dict[str, Any]:
|
68
75
|
"""Execute the operation graph."""
|
69
76
|
if not self.graph.is_acyclic():
|
70
77
|
raise ValueError("Graph must be acyclic for flow execution")
|
71
78
|
|
79
|
+
# Pre-allocate ALL branches upfront to avoid any locking during execution
|
80
|
+
await self._preallocate_all_branches()
|
81
|
+
|
72
82
|
# Create capacity limiter for concurrency control
|
73
83
|
# None means no limit, use the configured unlimited value
|
74
84
|
capacity = (
|
@@ -91,10 +101,97 @@ class DependencyAwareExecutor:
|
|
91
101
|
"final_context": self.context,
|
92
102
|
}
|
93
103
|
|
104
|
+
async def _preallocate_all_branches(self):
|
105
|
+
"""Pre-allocate ALL branches including for context inheritance to eliminate runtime locking."""
|
106
|
+
operations_needing_branches = []
|
107
|
+
|
108
|
+
# First pass: identify all operations that need branches
|
109
|
+
for node in self.graph.internal_nodes.values():
|
110
|
+
if not isinstance(node, Operation):
|
111
|
+
continue
|
112
|
+
|
113
|
+
# Skip if operation already has a branch_id
|
114
|
+
if node.branch_id:
|
115
|
+
try:
|
116
|
+
# Ensure the branch exists in our local map
|
117
|
+
branch = self.session.branches[node.branch_id]
|
118
|
+
self.operation_branches[node.id] = branch
|
119
|
+
except:
|
120
|
+
pass
|
121
|
+
continue
|
122
|
+
|
123
|
+
# Check if operation needs a new branch
|
124
|
+
predecessors = self.graph.get_predecessors(node)
|
125
|
+
if predecessors or node.metadata.get("inherit_context"):
|
126
|
+
operations_needing_branches.append(node)
|
127
|
+
|
128
|
+
if not operations_needing_branches:
|
129
|
+
return
|
130
|
+
|
131
|
+
# Create all branches in a single lock acquisition
|
132
|
+
async with self.session.branches.async_lock:
|
133
|
+
# For context inheritance, we need to create placeholder branches
|
134
|
+
# that will be updated once dependencies complete
|
135
|
+
for operation in operations_needing_branches:
|
136
|
+
# Create a fresh branch for now
|
137
|
+
branch_clone = self.session.default_branch.clone(
|
138
|
+
sender=self.session.id
|
139
|
+
)
|
140
|
+
|
141
|
+
# Store in our operation branches map
|
142
|
+
self.operation_branches[operation.id] = branch_clone
|
143
|
+
|
144
|
+
# Add to session branches collection directly
|
145
|
+
# Check if this is a real branch (not a mock)
|
146
|
+
try:
|
147
|
+
from lionagi.protocols.types import IDType
|
148
|
+
|
149
|
+
# Try to validate the ID
|
150
|
+
if hasattr(branch_clone, "id"):
|
151
|
+
branch_id = branch_clone.id
|
152
|
+
# Only add to collections if it's a valid ID
|
153
|
+
if isinstance(branch_id, (str, IDType)) or (
|
154
|
+
hasattr(branch_id, "__str__")
|
155
|
+
and not hasattr(branch_id, "_mock_name")
|
156
|
+
):
|
157
|
+
self.session.branches.collections[branch_id] = (
|
158
|
+
branch_clone
|
159
|
+
)
|
160
|
+
self.session.branches.progression.append(branch_id)
|
161
|
+
except:
|
162
|
+
# If validation fails, it's likely a mock - skip adding to collections
|
163
|
+
pass
|
164
|
+
|
165
|
+
# Mark branches that need context inheritance for later update
|
166
|
+
if operation.metadata.get("inherit_context"):
|
167
|
+
branch_clone.metadata = branch_clone.metadata or {}
|
168
|
+
branch_clone.metadata["pending_context_inheritance"] = True
|
169
|
+
branch_clone.metadata["inherit_from_operation"] = (
|
170
|
+
operation.metadata.get("primary_dependency")
|
171
|
+
)
|
172
|
+
|
173
|
+
if self.verbose:
|
174
|
+
print(f"Pre-allocated {len(operations_needing_branches)} branches")
|
175
|
+
|
94
176
|
async def _execute_operation(
|
95
177
|
self, operation: Operation, limiter: CapacityLimiter
|
96
178
|
):
|
97
179
|
"""Execute a single operation with dependency waiting."""
|
180
|
+
# Skip if operation is already completed
|
181
|
+
if operation.execution.status == EventStatus.COMPLETED:
|
182
|
+
if self.verbose:
|
183
|
+
print(
|
184
|
+
f"Skipping already completed operation: {str(operation.id)[:8]}"
|
185
|
+
)
|
186
|
+
# Ensure results are available for dependencies
|
187
|
+
if operation.id not in self.results and hasattr(
|
188
|
+
operation, "response"
|
189
|
+
):
|
190
|
+
self.results[operation.id] = operation.response
|
191
|
+
# Signal completion for any waiting operations
|
192
|
+
self.completion_events[operation.id].set()
|
193
|
+
return
|
194
|
+
|
98
195
|
try:
|
99
196
|
# Wait for dependencies
|
100
197
|
await self._wait_for_dependencies(operation)
|
@@ -102,7 +199,7 @@ class DependencyAwareExecutor:
|
|
102
199
|
# Acquire capacity to limit concurrency
|
103
200
|
async with limiter:
|
104
201
|
# Prepare operation context
|
105
|
-
|
202
|
+
self._prepare_operation(operation)
|
106
203
|
|
107
204
|
# Execute the operation
|
108
205
|
if self.verbose:
|
@@ -191,7 +288,7 @@ class DependencyAwareExecutor:
|
|
191
288
|
f"Edge condition not satisfied for {str(operation.id)[:8]}"
|
192
289
|
)
|
193
290
|
|
194
|
-
|
291
|
+
def _prepare_operation(self, operation: Operation):
|
195
292
|
"""Prepare operation with context and branch assignment."""
|
196
293
|
# Update operation context with predecessors
|
197
294
|
predecessors = self.graph.get_predecessors(operation)
|
@@ -209,77 +306,83 @@ class DependencyAwareExecutor:
|
|
209
306
|
if "context" not in operation.parameters:
|
210
307
|
operation.parameters["context"] = pred_context
|
211
308
|
else:
|
212
|
-
|
309
|
+
# Handle case where context might be a string
|
310
|
+
existing_context = operation.parameters["context"]
|
311
|
+
if isinstance(existing_context, dict):
|
312
|
+
existing_context.update(pred_context)
|
313
|
+
else:
|
314
|
+
# If it's a string or other type, create a new dict
|
315
|
+
operation.parameters["context"] = {
|
316
|
+
"original_context": existing_context,
|
317
|
+
**pred_context,
|
318
|
+
}
|
213
319
|
|
214
320
|
# Add execution context
|
215
321
|
if self.context:
|
216
322
|
if "context" not in operation.parameters:
|
217
323
|
operation.parameters["context"] = self.context.copy()
|
218
324
|
else:
|
219
|
-
|
325
|
+
# Handle case where context might be a string
|
326
|
+
existing_context = operation.parameters["context"]
|
327
|
+
if isinstance(existing_context, dict):
|
328
|
+
existing_context.update(self.context)
|
329
|
+
else:
|
330
|
+
# If it's a string or other type, create a new dict
|
331
|
+
operation.parameters["context"] = {
|
332
|
+
"original_context": existing_context,
|
333
|
+
**self.context,
|
334
|
+
}
|
220
335
|
|
221
336
|
# Determine and assign branch
|
222
|
-
branch =
|
337
|
+
branch = self._resolve_branch_for_operation(operation)
|
223
338
|
self.operation_branches[operation.id] = branch
|
224
339
|
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
340
|
+
def _resolve_branch_for_operation(self, operation: Operation) -> Branch:
|
341
|
+
"""Resolve which branch an operation should use - all branches are pre-allocated."""
|
342
|
+
# All branches should be pre-allocated
|
343
|
+
if operation.id in self.operation_branches:
|
344
|
+
branch = self.operation_branches[operation.id]
|
345
|
+
|
346
|
+
# Handle deferred context inheritance
|
347
|
+
if (
|
348
|
+
hasattr(branch, "metadata")
|
349
|
+
and branch.metadata
|
350
|
+
and branch.metadata.get("pending_context_inheritance")
|
351
|
+
):
|
352
|
+
|
353
|
+
primary_dep_id = branch.metadata.get("inherit_from_operation")
|
354
|
+
if primary_dep_id and primary_dep_id in self.results:
|
355
|
+
# Find the primary dependency's branch
|
356
|
+
primary_branch = self.operation_branches.get(
|
357
|
+
primary_dep_id, self.session.default_branch
|
358
|
+
)
|
238
359
|
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
for node in self.graph.internal_nodes.values():
|
245
|
-
if (
|
246
|
-
isinstance(node, Operation)
|
247
|
-
and node.id == primary_dep_id
|
248
|
-
and node.branch_id
|
360
|
+
# Copy the messages from primary branch to this branch
|
361
|
+
# This avoids creating a new branch and thus avoids locking
|
362
|
+
# Access messages through the MessageManager
|
363
|
+
if hasattr(branch, "_message_manager") and hasattr(
|
364
|
+
primary_branch, "_message_manager"
|
249
365
|
):
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
try:
|
270
|
-
async with self.session.branches:
|
271
|
-
fresh_branch = self.session.split(
|
272
|
-
self.session.default_branch
|
273
|
-
)
|
274
|
-
if self.verbose:
|
275
|
-
print(
|
276
|
-
f"Operation {str(operation.id)[:8]} starting with fresh context"
|
277
|
-
)
|
278
|
-
return fresh_branch
|
279
|
-
except:
|
280
|
-
pass
|
366
|
+
branch._message_manager.pile.clear()
|
367
|
+
for msg in primary_branch._message_manager.pile:
|
368
|
+
branch._message_manager.pile.append(msg.clone())
|
369
|
+
|
370
|
+
# Clear the pending flag
|
371
|
+
branch.metadata["pending_context_inheritance"] = False
|
372
|
+
|
373
|
+
if self.verbose:
|
374
|
+
print(
|
375
|
+
f"Operation {str(operation.id)[:8]} inherited context from {str(primary_dep_id)[:8]}"
|
376
|
+
)
|
377
|
+
|
378
|
+
return branch
|
379
|
+
|
380
|
+
# Fallback to default branch (should not happen with proper pre-allocation)
|
381
|
+
if self.verbose:
|
382
|
+
print(
|
383
|
+
f"Warning: Operation {str(operation.id)[:8]} using default branch (not pre-allocated)"
|
384
|
+
)
|
281
385
|
|
282
|
-
# Default to session's default branch or the provided branch
|
283
386
|
if hasattr(self, "_default_branch") and self._default_branch:
|
284
387
|
return self._default_branch
|
285
388
|
return self.session.default_branch
|
@@ -5,6 +5,7 @@
|
|
5
5
|
from __future__ import annotations
|
6
6
|
|
7
7
|
import asyncio
|
8
|
+
import threading
|
8
9
|
from collections import deque
|
9
10
|
from collections.abc import (
|
10
11
|
AsyncIterator,
|
@@ -35,13 +36,19 @@ D = TypeVar("D")
|
|
35
36
|
T = TypeVar("T", bound=E)
|
36
37
|
|
37
38
|
|
38
|
-
|
39
|
+
def synchronized(func: Callable):
|
40
|
+
@wraps(func)
|
41
|
+
def wrapper(self: Pile, *args, **kwargs):
|
42
|
+
with self.lock:
|
43
|
+
return func(self, *args, **kwargs)
|
44
|
+
|
45
|
+
return wrapper
|
39
46
|
|
40
47
|
|
41
48
|
def async_synchronized(func: Callable):
|
42
49
|
@wraps(func)
|
43
50
|
async def wrapper(self: Pile, *args, **kwargs):
|
44
|
-
async with self.
|
51
|
+
async with self.async_lock:
|
45
52
|
return await func(self, *args, **kwargs)
|
46
53
|
|
47
54
|
return wrapper
|
@@ -82,7 +89,8 @@ class Pile(Element, Collective[E], Generic[E], Adaptable, AsyncAdaptable):
|
|
82
89
|
|
83
90
|
def __pydantic_extra__(self) -> dict[str, FieldInfo]:
|
84
91
|
return {
|
85
|
-
"_lock": Field(default_factory=
|
92
|
+
"_lock": Field(default_factory=threading.Lock),
|
93
|
+
"_async": Field(default_factory=ConcurrencyLock),
|
86
94
|
}
|
87
95
|
|
88
96
|
def __pydantic_private__(self) -> dict[str, FieldInfo]:
|
@@ -162,6 +170,7 @@ class Pile(Element, Collective[E], Generic[E], Adaptable, AsyncAdaptable):
|
|
162
170
|
"""
|
163
171
|
self._setitem(key, item)
|
164
172
|
|
173
|
+
@synchronized
|
165
174
|
def pop(
|
166
175
|
self,
|
167
176
|
key: ID.Ref | ID.RefSeq | int | slice,
|
@@ -224,6 +233,7 @@ class Pile(Element, Collective[E], Generic[E], Adaptable, AsyncAdaptable):
|
|
224
233
|
"""
|
225
234
|
self._exclude(item)
|
226
235
|
|
236
|
+
@synchronized
|
227
237
|
def clear(self) -> None:
|
228
238
|
"""Remove all items."""
|
229
239
|
self._clear()
|
@@ -243,6 +253,7 @@ class Pile(Element, Collective[E], Generic[E], Adaptable, AsyncAdaptable):
|
|
243
253
|
"""
|
244
254
|
self._update(other)
|
245
255
|
|
256
|
+
@synchronized
|
246
257
|
def insert(self, index: int, item: T, /) -> None:
|
247
258
|
"""Insert item at position.
|
248
259
|
|
@@ -256,6 +267,7 @@ class Pile(Element, Collective[E], Generic[E], Adaptable, AsyncAdaptable):
|
|
256
267
|
"""
|
257
268
|
self._insert(index, item)
|
258
269
|
|
270
|
+
@synchronized
|
259
271
|
def append(self, item: T, /) -> None:
|
260
272
|
"""Append item to end (alias for include).
|
261
273
|
|
@@ -267,6 +279,7 @@ class Pile(Element, Collective[E], Generic[E], Adaptable, AsyncAdaptable):
|
|
267
279
|
"""
|
268
280
|
self.update(item)
|
269
281
|
|
282
|
+
@synchronized
|
270
283
|
def get(
|
271
284
|
self,
|
272
285
|
key: ID.Ref | ID.RefSeq | int | slice,
|
@@ -306,12 +319,10 @@ class Pile(Element, Collective[E], Generic[E], Adaptable, AsyncAdaptable):
|
|
306
319
|
|
307
320
|
def __iter__(self) -> Iterator[T]:
|
308
321
|
"""Iterate over items safely."""
|
309
|
-
# Take a snapshot of the current order to avoid holding lock during iteration
|
310
322
|
current_order = list(self.progression)
|
311
323
|
|
312
324
|
for key in current_order:
|
313
|
-
|
314
|
-
yield self.collections[key]
|
325
|
+
yield self.collections[key]
|
315
326
|
|
316
327
|
def __next__(self) -> T:
|
317
328
|
"""Get next item."""
|
@@ -464,20 +475,29 @@ class Pile(Element, Collective[E], Generic[E], Adaptable, AsyncAdaptable):
|
|
464
475
|
"""Prepare for pickling."""
|
465
476
|
state = self.__dict__.copy()
|
466
477
|
state["_lock"] = None
|
478
|
+
state["_async_lock"] = None
|
467
479
|
return state
|
468
480
|
|
469
481
|
def __setstate__(self, state):
|
470
482
|
"""Restore after unpickling."""
|
471
483
|
self.__dict__.update(state)
|
472
|
-
self._lock =
|
484
|
+
self._lock = threading.Lock()
|
485
|
+
self._async_lock = ConcurrencyLock()
|
473
486
|
|
474
487
|
@property
|
475
488
|
def lock(self):
|
476
|
-
"""
|
489
|
+
"""Thread lock."""
|
477
490
|
if not hasattr(self, "_lock") or self._lock is None:
|
478
|
-
self._lock =
|
491
|
+
self._lock = threading.Lock()
|
479
492
|
return self._lock
|
480
493
|
|
494
|
+
@property
|
495
|
+
def async_lock(self):
|
496
|
+
"""Async lock."""
|
497
|
+
if not hasattr(self, "_async_lock") or self._async_lock is None:
|
498
|
+
self._async_lock = ConcurrencyLock()
|
499
|
+
return self._async_lock
|
500
|
+
|
481
501
|
# Async Interface methods
|
482
502
|
@async_synchronized
|
483
503
|
async def asetitem(
|
@@ -554,13 +574,12 @@ class Pile(Element, Collective[E], Generic[E], Adaptable, AsyncAdaptable):
|
|
554
574
|
|
555
575
|
async def __aiter__(self) -> AsyncIterator[T]:
|
556
576
|
"""Async iterate over items."""
|
557
|
-
async with self.
|
577
|
+
async with self.async_lock:
|
558
578
|
current_order = list(self.progression)
|
559
579
|
|
560
580
|
for key in current_order:
|
561
|
-
|
562
|
-
|
563
|
-
await asyncio.sleep(0) # Yield control to the event loop
|
581
|
+
yield self.collections[key]
|
582
|
+
await asyncio.sleep(0) # Yield control to the event loop
|
564
583
|
|
565
584
|
async def __anext__(self) -> T:
|
566
585
|
"""Async get next item."""
|
@@ -893,7 +912,7 @@ class Pile(Element, Collective[E], Generic[E], Adaptable, AsyncAdaptable):
|
|
893
912
|
|
894
913
|
async def __aenter__(self) -> Self:
|
895
914
|
"""Enter async context."""
|
896
|
-
await self.
|
915
|
+
await self.async_lock.__aenter__()
|
897
916
|
return self
|
898
917
|
|
899
918
|
async def __aexit__(
|
@@ -903,7 +922,7 @@ class Pile(Element, Collective[E], Generic[E], Adaptable, AsyncAdaptable):
|
|
903
922
|
exc_tb: Any,
|
904
923
|
) -> None:
|
905
924
|
"""Exit async context."""
|
906
|
-
await self.
|
925
|
+
await self.async_lock.__aexit__(exc_type, exc_val, exc_tb)
|
907
926
|
|
908
927
|
def is_homogenous(self) -> bool:
|
909
928
|
"""Check if all items are same type."""
|