lionagi 0.5.3__py3-none-any.whl → 0.5.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lionagi/core/action/action_manager.py +2 -0
- lionagi/core/communication/assistant_response.py +10 -0
- lionagi/core/communication/message.py +1 -1
- lionagi/core/communication/message_manager.py +13 -0
- lionagi/core/communication/utils.py +4 -2
- lionagi/core/session/branch_mixins.py +76 -39
- lionagi/core/session/session.py +3 -3
- lionagi/integrations/anthropic_/AnthropicModel.py +4 -9
- lionagi/integrations/anthropic_/AnthropicService.py +10 -0
- lionagi/integrations/anthropic_/anthropic_max_output_token_data.yaml +5 -0
- lionagi/integrations/anthropic_/anthropic_price_data.yaml +26 -6
- lionagi/integrations/anthropic_/version.py +1 -1
- lionagi/integrations/groq_/GroqService.py +5 -0
- lionagi/integrations/groq_/version.py +1 -1
- lionagi/integrations/litellm_/imodel.py +5 -0
- lionagi/integrations/openai_/OpenAIModel.py +0 -4
- lionagi/integrations/openai_/OpenAIService.py +9 -0
- lionagi/integrations/openai_/version.py +1 -1
- lionagi/integrations/perplexity_/PerplexityService.py +5 -0
- lionagi/integrations/perplexity_/version.py +1 -1
- lionagi/libs/func/async_calls/alcall.py +7 -0
- lionagi/operations/brainstorm/brainstorm.py +318 -93
- lionagi/operations/plan/plan.py +280 -67
- lionagi/service/imodel.py +5 -0
- lionagi/version.py +1 -1
- {lionagi-0.5.3.dist-info → lionagi-0.5.5.dist-info}/METADATA +1 -1
- {lionagi-0.5.3.dist-info → lionagi-0.5.5.dist-info}/RECORD +29 -29
- {lionagi-0.5.3.dist-info → lionagi-0.5.5.dist-info}/WHEEL +0 -0
- {lionagi-0.5.3.dist-info → lionagi-0.5.5.dist-info}/licenses/LICENSE +0 -0
lionagi/operations/plan/plan.py
CHANGED
@@ -2,10 +2,11 @@
|
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
|
5
|
-
|
6
5
|
from lionagi.core.session.branch import Branch
|
7
6
|
from lionagi.core.session.session import Session
|
8
7
|
from lionagi.core.typing import ID, Any, BaseModel, Literal
|
8
|
+
from lionagi.libs.func.types import alcall
|
9
|
+
from lionagi.libs.parse import to_flat_list
|
9
10
|
from lionagi.protocols.operatives.instruct import (
|
10
11
|
INSTRUCT_FIELD_MODEL,
|
11
12
|
Instruct,
|
@@ -15,13 +16,45 @@ from lionagi.protocols.operatives.instruct import (
|
|
15
16
|
from ..utils import prepare_instruct, prepare_session
|
16
17
|
from .prompt import EXPANSION_PROMPT, PLAN_PROMPT
|
17
18
|
|
19
|
+
# ---------------------------------------------------------------------
|
20
|
+
# Data Model
|
21
|
+
# ---------------------------------------------------------------------
|
22
|
+
|
18
23
|
|
19
24
|
class PlanOperation(BaseModel):
|
25
|
+
"""
|
26
|
+
Stores all relevant outcomes for a multi-step Plan:
|
27
|
+
* initial: The result of the initial plan prompt
|
28
|
+
* plan: A list of plan steps (Instruct objects) generated from the initial planning
|
29
|
+
* execute: Any responses from executing those plan steps
|
30
|
+
"""
|
31
|
+
|
20
32
|
initial: Any
|
21
33
|
plan: list[Instruct] | None = None
|
22
34
|
execute: list[InstructResponse] | None = None
|
23
35
|
|
24
36
|
|
37
|
+
# ---------------------------------------------------------------------
|
38
|
+
# Utilities
|
39
|
+
# ---------------------------------------------------------------------
|
40
|
+
|
41
|
+
|
42
|
+
def chunked(iterable, n):
|
43
|
+
"""
|
44
|
+
Yield successive n-sized chunks from an iterable.
|
45
|
+
Example:
|
46
|
+
>>> list(chunked([1,2,3,4,5], 2))
|
47
|
+
[[1,2],[3,4],[5]]
|
48
|
+
"""
|
49
|
+
for i in range(0, len(iterable), n):
|
50
|
+
yield iterable[i : i + n]
|
51
|
+
|
52
|
+
|
53
|
+
# ---------------------------------------------------------------------
|
54
|
+
# Single-Step Runner
|
55
|
+
# ---------------------------------------------------------------------
|
56
|
+
|
57
|
+
|
25
58
|
async def run_step(
|
26
59
|
ins: Instruct,
|
27
60
|
session: Session,
|
@@ -29,33 +62,41 @@ async def run_step(
|
|
29
62
|
verbose: bool = True,
|
30
63
|
**kwargs: Any,
|
31
64
|
) -> Any:
|
32
|
-
"""
|
65
|
+
"""
|
66
|
+
Execute a single step of the plan with an 'expansion' or guidance prompt.
|
33
67
|
|
34
68
|
Args:
|
35
69
|
ins: The instruction model for the step.
|
36
|
-
session: The current session.
|
37
|
-
branch: The branch to operate on.
|
70
|
+
session: The current session context.
|
71
|
+
branch: The branch to operate on for this step.
|
38
72
|
verbose: Whether to enable verbose output.
|
39
|
-
**kwargs: Additional keyword arguments.
|
73
|
+
**kwargs: Additional keyword arguments passed to the branch operation.
|
40
74
|
|
41
75
|
Returns:
|
42
|
-
The result of the branch operation.
|
76
|
+
The result of the branch operation (which may contain more instructions).
|
43
77
|
"""
|
44
78
|
if verbose:
|
45
|
-
|
79
|
+
snippet = (
|
46
80
|
ins.instruction[:100] + "..."
|
47
81
|
if len(ins.instruction) > 100
|
48
82
|
else ins.instruction
|
49
83
|
)
|
50
|
-
print(f"Further planning: {
|
84
|
+
print(f"Further planning: {snippet}")
|
51
85
|
|
86
|
+
# Incorporate the EXPANSION_PROMPT into guidance
|
52
87
|
config = {**ins.model_dump(), **kwargs}
|
53
|
-
|
54
|
-
config["guidance"] = EXPANSION_PROMPT
|
88
|
+
guidance_text = config.pop("guidance", "")
|
89
|
+
config["guidance"] = f"{EXPANSION_PROMPT}\n{guidance_text}"
|
90
|
+
|
91
|
+
# Run the step
|
92
|
+
result = await branch.operate(**config)
|
93
|
+
branch.msgs.logger.dump() # Dump logs if needed
|
94
|
+
return result
|
55
95
|
|
56
|
-
|
57
|
-
|
58
|
-
|
96
|
+
|
97
|
+
# ---------------------------------------------------------------------
|
98
|
+
# Main Plan Function (with Multiple Execution Strategies)
|
99
|
+
# ---------------------------------------------------------------------
|
59
100
|
|
60
101
|
|
61
102
|
async def plan(
|
@@ -65,108 +106,280 @@ async def plan(
|
|
65
106
|
branch: Branch | ID.Ref | None = None,
|
66
107
|
auto_run: bool = True,
|
67
108
|
auto_execute: bool = False,
|
68
|
-
execution_strategy: Literal[
|
109
|
+
execution_strategy: Literal[
|
110
|
+
"sequential",
|
111
|
+
"concurrent",
|
112
|
+
"sequential_concurrent_chunk",
|
113
|
+
"concurrent_sequential_chunk",
|
114
|
+
] = "sequential",
|
69
115
|
execution_kwargs: dict[str, Any] | None = None,
|
70
116
|
branch_kwargs: dict[str, Any] | None = None,
|
71
117
|
return_session: bool = False,
|
72
118
|
verbose: bool = True,
|
73
119
|
**kwargs: Any,
|
74
|
-
) -> PlanOperation | tuple[
|
75
|
-
"""
|
120
|
+
) -> PlanOperation | tuple[PlanOperation, Session]:
|
121
|
+
"""
|
122
|
+
Create and optionally execute a multi-step plan with up to `num_steps`.
|
123
|
+
|
124
|
+
Steps:
|
125
|
+
1. Generate an initial plan with up to `num_steps`.
|
126
|
+
2. Optionally (auto_run=True) expand on each planned step
|
127
|
+
to refine or further clarify them.
|
128
|
+
3. Optionally (auto_execute=True) execute those refined steps
|
129
|
+
according to `execution_strategy`.
|
76
130
|
|
77
131
|
Args:
|
78
|
-
instruct:
|
79
|
-
num_steps:
|
80
|
-
session:
|
81
|
-
branch:
|
82
|
-
auto_run: If True, automatically run the steps.
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
132
|
+
instruct: Initial instruction or a dict describing it.
|
133
|
+
num_steps: Maximum number of plan steps (must be <= 5).
|
134
|
+
session: An existing Session, or None to create a new one.
|
135
|
+
branch: An existing Branch, or None to create a new one.
|
136
|
+
auto_run: If True, automatically run the intermediate plan steps.
|
137
|
+
auto_execute: If True, automatically execute the fully refined steps.
|
138
|
+
execution_strategy:
|
139
|
+
- "sequential" (default) runs steps one by one
|
140
|
+
- "concurrent" runs all steps in parallel
|
141
|
+
- "sequential_concurrent_chunk" processes chunks sequentially, each chunk in parallel
|
142
|
+
- "concurrent_sequential_chunk" processes all chunks in parallel, each chunk sequentially
|
143
|
+
execution_kwargs: Extra kwargs used during execution calls.
|
144
|
+
branch_kwargs: Extra kwargs for branch/session creation.
|
145
|
+
return_session: Whether to return (PlanOperation, Session) instead of just PlanOperation.
|
146
|
+
verbose: If True, prints verbose logs.
|
147
|
+
**kwargs: Additional arguments for the initial plan operation.
|
87
148
|
|
88
149
|
Returns:
|
89
|
-
|
150
|
+
A PlanOperation object containing:
|
151
|
+
- initial plan
|
152
|
+
- (optional) plan expansions
|
153
|
+
- (optional) execution responses
|
154
|
+
Optionally returns the session as well, if `return_session=True`.
|
90
155
|
"""
|
156
|
+
|
157
|
+
# -----------------------------------------------------------------
|
158
|
+
# 0. Basic Validation & Setup
|
159
|
+
# -----------------------------------------------------------------
|
91
160
|
if num_steps > 5:
|
92
161
|
raise ValueError("Number of steps must be 5 or less")
|
93
162
|
|
94
163
|
if verbose:
|
95
164
|
print(f"Planning execution with {num_steps} steps...")
|
96
165
|
|
166
|
+
# Ensure the correct field model
|
97
167
|
field_models: list = kwargs.get("field_models", [])
|
98
168
|
if INSTRUCT_FIELD_MODEL not in field_models:
|
99
169
|
field_models.append(INSTRUCT_FIELD_MODEL)
|
100
170
|
kwargs["field_models"] = field_models
|
171
|
+
|
172
|
+
# Prepare session/branch
|
101
173
|
session, branch = prepare_session(session, branch, branch_kwargs)
|
102
|
-
execute_branch: Branch = session.split(
|
103
|
-
|
104
|
-
|
105
|
-
)
|
174
|
+
execute_branch: Branch = session.split(
|
175
|
+
branch
|
176
|
+
) # a separate branch for execution
|
106
177
|
|
107
|
-
|
108
|
-
|
178
|
+
# -----------------------------------------------------------------
|
179
|
+
# 1. Run the Initial Plan Prompt
|
180
|
+
# -----------------------------------------------------------------
|
181
|
+
plan_prompt = PLAN_PROMPT.format(num_steps=num_steps)
|
182
|
+
instruct = prepare_instruct(instruct, plan_prompt)
|
183
|
+
initial_res = await branch.operate(**instruct, **kwargs)
|
184
|
+
|
185
|
+
# Wrap initial result in the PlanOperation
|
186
|
+
out = PlanOperation(initial=initial_res)
|
109
187
|
|
110
188
|
if verbose:
|
111
189
|
print("Initial planning complete. Starting step planning...")
|
112
190
|
|
191
|
+
# If we aren't auto-running the steps, just return the initial plan
|
113
192
|
if not auto_run:
|
114
|
-
if return_session
|
115
|
-
return res1, session
|
116
|
-
return res1
|
193
|
+
return (out, session) if return_session else out
|
117
194
|
|
195
|
+
# -----------------------------------------------------------------
|
196
|
+
# 2. Expand Each Step (auto_run=True)
|
197
|
+
# -----------------------------------------------------------------
|
118
198
|
results = []
|
119
|
-
if hasattr(
|
120
|
-
instructs: list[Instruct] =
|
121
|
-
for i,
|
199
|
+
if hasattr(initial_res, "instruct_models"):
|
200
|
+
instructs: list[Instruct] = initial_res.instruct_models
|
201
|
+
for i, step_ins in enumerate(instructs, start=1):
|
122
202
|
if verbose:
|
123
203
|
print(f"\n----- Planning step {i}/{len(instructs)} -----")
|
124
|
-
|
125
|
-
|
204
|
+
expanded_res = await run_step(
|
205
|
+
step_ins, session, branch, verbose=verbose, **kwargs
|
126
206
|
)
|
127
|
-
results.append(
|
207
|
+
results.append(expanded_res)
|
128
208
|
|
129
209
|
if verbose:
|
130
|
-
print("\nAll planning
|
210
|
+
print("\nAll planning steps expanded/refined successfully!")
|
211
|
+
|
212
|
+
# Gather all newly created plan instructions
|
213
|
+
refined_plans = []
|
214
|
+
for step_result in results:
|
215
|
+
if hasattr(step_result, "instruct_models"):
|
216
|
+
for model in step_result.instruct_models:
|
217
|
+
if model and model not in refined_plans:
|
218
|
+
refined_plans.append(model)
|
131
219
|
|
132
|
-
|
133
|
-
for res in results:
|
134
|
-
if hasattr(res, "instruct_models"):
|
135
|
-
for i in res.instruct_models:
|
136
|
-
if i and i not in all_plans:
|
137
|
-
all_plans.append(i)
|
138
|
-
out.plan = all_plans
|
220
|
+
out.plan = refined_plans
|
139
221
|
|
222
|
+
# -----------------------------------------------------------------
|
223
|
+
# 3. Execute the Plan Steps (auto_execute=True)
|
224
|
+
# -----------------------------------------------------------------
|
140
225
|
if auto_execute:
|
141
226
|
if verbose:
|
142
|
-
print("\nStarting execution of all steps...")
|
143
|
-
|
227
|
+
print("\nStarting execution of all plan steps...")
|
228
|
+
|
229
|
+
# We now handle multiple strategies:
|
144
230
|
match execution_strategy:
|
231
|
+
|
232
|
+
# ---------------------------------------------------------
|
233
|
+
# Strategy A: SEQUENTIAL
|
234
|
+
# ---------------------------------------------------------
|
145
235
|
case "sequential":
|
146
|
-
|
236
|
+
seq_results = []
|
237
|
+
for i, plan_step in enumerate(refined_plans, start=1):
|
238
|
+
if verbose:
|
239
|
+
snippet = (
|
240
|
+
plan_step.instruction[:100] + "..."
|
241
|
+
if len(plan_step.instruction) > 100
|
242
|
+
else plan_step.instruction
|
243
|
+
)
|
244
|
+
print(
|
245
|
+
f"\n------ Executing step {i}/{len(refined_plans)} ------"
|
246
|
+
)
|
247
|
+
print(f"Instruction: {snippet}")
|
248
|
+
|
249
|
+
step_response = await execute_branch.instruct(
|
250
|
+
plan_step, **(execution_kwargs or {})
|
251
|
+
)
|
252
|
+
seq_results.append(
|
253
|
+
InstructResponse(
|
254
|
+
instruct=plan_step, response=step_response
|
255
|
+
)
|
256
|
+
)
|
257
|
+
|
258
|
+
out.execute = seq_results
|
259
|
+
if verbose:
|
260
|
+
print("\nAll steps executed successfully (sequential)!")
|
261
|
+
|
262
|
+
# ---------------------------------------------------------
|
263
|
+
# Strategy B: CONCURRENT
|
264
|
+
# ---------------------------------------------------------
|
265
|
+
case "concurrent":
|
266
|
+
|
267
|
+
async def execute_step_concurrently(plan_step: Instruct):
|
268
|
+
if verbose:
|
269
|
+
snippet = (
|
270
|
+
plan_step.instruction[:100] + "..."
|
271
|
+
if len(plan_step.instruction) > 100
|
272
|
+
else plan_step.instruction
|
273
|
+
)
|
274
|
+
print(f"\n------ Executing step (concurrently) ------")
|
275
|
+
print(f"Instruction: {snippet}")
|
276
|
+
local_branch = session.split(execute_branch)
|
277
|
+
resp = await local_branch.instruct(
|
278
|
+
plan_step, **(execution_kwargs or {})
|
279
|
+
)
|
280
|
+
return InstructResponse(instruct=plan_step, response=resp)
|
281
|
+
|
282
|
+
# Launch all steps in parallel
|
283
|
+
concurrent_res = await alcall(
|
284
|
+
refined_plans, execute_step_concurrently
|
285
|
+
)
|
286
|
+
out.execute = concurrent_res
|
287
|
+
if verbose:
|
288
|
+
print("\nAll steps executed successfully (concurrent)!")
|
289
|
+
|
290
|
+
# ---------------------------------------------------------
|
291
|
+
# Strategy C: SEQUENTIAL_CONCURRENT_CHUNK
|
292
|
+
# - process plan steps in chunks (one chunk after another),
|
293
|
+
# - each chunk’s steps run in parallel.
|
294
|
+
# ---------------------------------------------------------
|
295
|
+
case "sequential_concurrent_chunk":
|
296
|
+
chunk_size = (execution_kwargs or {}).get("chunk_size", 5)
|
297
|
+
all_exec_responses = []
|
298
|
+
|
299
|
+
async def execute_chunk_concurrently(
|
300
|
+
sub_steps: list[Instruct],
|
301
|
+
):
|
147
302
|
if verbose:
|
148
303
|
print(
|
149
|
-
f"\n
|
304
|
+
f"\n--- Executing a chunk of size {len(sub_steps)} concurrently ---"
|
150
305
|
)
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
306
|
+
|
307
|
+
async def _execute(plan_step: Instruct):
|
308
|
+
local_branch = session.split(execute_branch)
|
309
|
+
resp = await local_branch.instruct(
|
310
|
+
plan_step, **(execution_kwargs or {})
|
311
|
+
)
|
312
|
+
return InstructResponse(
|
313
|
+
instruct=plan_step, response=resp
|
155
314
|
)
|
156
|
-
|
157
|
-
|
158
|
-
|
315
|
+
|
316
|
+
# run each chunk in parallel
|
317
|
+
return await alcall(sub_steps, _execute)
|
318
|
+
|
319
|
+
# process each chunk sequentially
|
320
|
+
for chunk in chunked(refined_plans, chunk_size):
|
321
|
+
chunk_responses = await execute_chunk_concurrently(chunk)
|
322
|
+
all_exec_responses.extend(chunk_responses)
|
323
|
+
|
324
|
+
out.execute = all_exec_responses
|
325
|
+
if verbose:
|
326
|
+
print(
|
327
|
+
"\nAll steps executed successfully (sequential concurrent chunk)!"
|
159
328
|
)
|
160
|
-
|
161
|
-
|
162
|
-
|
329
|
+
|
330
|
+
# ---------------------------------------------------------
|
331
|
+
# Strategy D: CONCURRENT_SEQUENTIAL_CHUNK
|
332
|
+
# - split plan steps into chunks,
|
333
|
+
# - run all chunks in parallel,
|
334
|
+
# - but each chunk’s steps run sequentially.
|
335
|
+
# ---------------------------------------------------------
|
336
|
+
case "concurrent_sequential_chunk":
|
337
|
+
chunk_size = (execution_kwargs or {}).get("chunk_size", 5)
|
338
|
+
all_chunks = list(chunked(refined_plans, chunk_size))
|
339
|
+
|
340
|
+
async def execute_chunk_sequentially(
|
341
|
+
sub_steps: list[Instruct],
|
342
|
+
):
|
343
|
+
chunk_result = []
|
344
|
+
local_branch = session.split(execute_branch)
|
345
|
+
for plan_step in sub_steps:
|
346
|
+
if verbose:
|
347
|
+
snippet = (
|
348
|
+
plan_step.instruction[:100] + "..."
|
349
|
+
if len(plan_step.instruction) > 100
|
350
|
+
else plan_step.instruction
|
351
|
+
)
|
352
|
+
print(
|
353
|
+
f"\n--- Executing step (sequential in chunk) ---\nInstruction: {snippet}"
|
354
|
+
)
|
355
|
+
resp = await local_branch.instruct(
|
356
|
+
plan_step, **(execution_kwargs or {})
|
357
|
+
)
|
358
|
+
chunk_result.append(
|
359
|
+
InstructResponse(instruct=plan_step, response=resp)
|
360
|
+
)
|
361
|
+
return chunk_result
|
362
|
+
|
363
|
+
# run all chunks in parallel, each chunk sequentially
|
364
|
+
parallel_chunk_results = await alcall(
|
365
|
+
all_chunks,
|
366
|
+
execute_chunk_sequentially,
|
367
|
+
flatten=True,
|
368
|
+
dropna=True,
|
369
|
+
)
|
370
|
+
|
371
|
+
out.execute = parallel_chunk_results
|
163
372
|
if verbose:
|
164
|
-
print(
|
373
|
+
print(
|
374
|
+
"\nAll steps executed successfully (concurrent sequential chunk)!"
|
375
|
+
)
|
376
|
+
|
165
377
|
case _:
|
166
378
|
raise ValueError(
|
167
379
|
f"Invalid execution strategy: {execution_strategy}"
|
168
380
|
)
|
169
381
|
|
170
|
-
|
171
|
-
|
172
|
-
|
382
|
+
# -----------------------------------------------------------------
|
383
|
+
# 4. Final Return
|
384
|
+
# -----------------------------------------------------------------
|
385
|
+
return (out, session) if return_session else out
|
lionagi/service/imodel.py
CHANGED
@@ -121,5 +121,10 @@ class iModel:
|
|
121
121
|
def allowed_roles(self):
|
122
122
|
return self.service.allowed_roles
|
123
123
|
|
124
|
+
@property
|
125
|
+
def sequential_exchange(self):
|
126
|
+
"""whether the service requires user/assistant exchange"""
|
127
|
+
return self.service.sequential_exchange
|
128
|
+
|
124
129
|
|
125
130
|
__all__ = ["iModel"]
|
lionagi/version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "0.5.
|
1
|
+
__version__ = "0.5.5"
|
@@ -1,10 +1,10 @@
|
|
1
1
|
lionagi/__init__.py,sha256=oybfu2VsZc4ElN7ZeaW3KQrz8T8EcSDHPA8lUE-8G2I,537
|
2
2
|
lionagi/settings.py,sha256=BOjxRV4N9zQJervvajPhbaHmgZ-nhbCy7AaQJi3Avug,2726
|
3
|
-
lionagi/version.py,sha256=
|
3
|
+
lionagi/version.py,sha256=78mfpLewKVki6c9UONSUdlVme_JsN9ZwIfp4Hf4jmG0,22
|
4
4
|
lionagi/core/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
5
5
|
lionagi/core/_class_registry.py,sha256=srSWefqCS9EZrMvyA8zCrZ9KFvzAhTIj8g6mJG5KlIc,1982
|
6
6
|
lionagi/core/action/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
7
|
-
lionagi/core/action/action_manager.py,sha256=
|
7
|
+
lionagi/core/action/action_manager.py,sha256=R7gXNu--lM8DSFNMP1fUOcNzWPsAE49qizpw8pP11fo,10236
|
8
8
|
lionagi/core/action/base.py,sha256=M2K3irxpp7fDwczefzZdUsHhGd_21l1xhFT9iNSbiK0,4110
|
9
9
|
lionagi/core/action/function_calling.py,sha256=7N5UFLbHPy6yvPrR6RH9SNSwt73s0X_3iAnOV8YMy0Q,6001
|
10
10
|
lionagi/core/action/tool.py,sha256=pDiyttx0obu0qDskz7POHxbyqTwHuD7HCeE31LkzRMg,6884
|
@@ -12,14 +12,14 @@ lionagi/core/action/types.py,sha256=KqW5ZHXssfxuf1kIiIUoj-r3KIZEoQ_GkZ04tL6fUPQ,
|
|
12
12
|
lionagi/core/communication/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
13
13
|
lionagi/core/communication/action_request.py,sha256=gicqQIs7VLOhnkQYsUuRmSLGittODiG3Pk74FayPSeM,5006
|
14
14
|
lionagi/core/communication/action_response.py,sha256=QONcRtvV7Ympbzkdg5gnV0DqCC97Y-gvaA0D7EkjswM,4505
|
15
|
-
lionagi/core/communication/assistant_response.py,sha256=
|
15
|
+
lionagi/core/communication/assistant_response.py,sha256=IidDoxC54lxnDT194w5exWC9hDb0TxTY2mmwzbQwbOg,6004
|
16
16
|
lionagi/core/communication/base_mail.py,sha256=Et-WrHlw2x75mLIKy-wMUj3YlGGL_2BF9r1REuiZtM8,1704
|
17
17
|
lionagi/core/communication/instruction.py,sha256=Iy3G1sp0lPuA4Ak9pLobIipou4ePLsMdGGvRq6L58UU,12454
|
18
|
-
lionagi/core/communication/message.py,sha256=
|
19
|
-
lionagi/core/communication/message_manager.py,sha256=
|
18
|
+
lionagi/core/communication/message.py,sha256=ZE4hJfDN1LLOGYM-nxFZDIP6v8b3fMNcziS7Pj1gYNA,8556
|
19
|
+
lionagi/core/communication/message_manager.py,sha256=MRl7_RdgOOHedUOVZ7FT9DtsWK4C4nj5NzTXgJV71aw,17478
|
20
20
|
lionagi/core/communication/system.py,sha256=5DoDODZePy4EDpE3oI5RpkzBXVp9WC2Mf4B3fPImItI,3824
|
21
21
|
lionagi/core/communication/types.py,sha256=rBGMpE3NW0SGKc-wJdJvT-VrO3wpjwjQhoWzSC2Dqh0,745
|
22
|
-
lionagi/core/communication/utils.py,sha256=
|
22
|
+
lionagi/core/communication/utils.py,sha256=5W0xW6Tx-DF4Vzpjpk01g1960mNjvBUWrDLGU-thBaQ,7031
|
23
23
|
lionagi/core/communication/templates/README.md,sha256=Ch4JrKSjd85fLitAYO1OhZjNOGKHoEwaKQlcV16jiUI,1286
|
24
24
|
lionagi/core/communication/templates/action_request.jinja2,sha256=d6OmxHKyvvNDSK4bnBM3TGSUk_HeE_Q2EtLAQ0ZBEJg,120
|
25
25
|
lionagi/core/communication/templates/action_response.jinja2,sha256=Mg0UxmXlIvtP_KPB0GcJxE1TP6lml9BwdPkW1PZxkg8,142
|
@@ -55,8 +55,8 @@ lionagi/core/models/schema_model.py,sha256=H2tSX3r0U6MDNi929rWmAZy3nUKMP47RG-Ma0
|
|
55
55
|
lionagi/core/models/types.py,sha256=elcUuz_9dx4AhZddnICF-Cs62VJWIBqMET7MiRe4c1I,447
|
56
56
|
lionagi/core/session/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
57
57
|
lionagi/core/session/branch.py,sha256=r6yNXwTm0oYA-9YOereuvLJtDnhj9IJWJ5fjTyKN88U,4406
|
58
|
-
lionagi/core/session/branch_mixins.py,sha256=
|
59
|
-
lionagi/core/session/session.py,sha256=
|
58
|
+
lionagi/core/session/branch_mixins.py,sha256=7uVqxkgvbj53JlDk9roL032c2YeNl_bq5UQSxw24raQ,21240
|
59
|
+
lionagi/core/session/session.py,sha256=cutece_iTs5K_m5soRfU9oTfHmw1icDEvx77E1RelIM,5129
|
60
60
|
lionagi/core/session/types.py,sha256=MUGTSa2HWK79p7z-CG22RFP07N5AKnPVNXZwZt_wIvU,202
|
61
61
|
lionagi/core/typing/__init__.py,sha256=Y9BK1OUXzjQgIo3epCVwWqUYhFwQQ_ayhRwI1UOmINg,228
|
62
62
|
lionagi/core/typing/_concepts.py,sha256=uIzqfwtPBsIREhvT7NDAowc4i-u-69n_DRzLHzvHZO4,3730
|
@@ -65,12 +65,12 @@ lionagi/core/typing/_pydantic.py,sha256=xMNyT0rDhSpKkEDfzj6GHobqlTtRn48svhmA75LP
|
|
65
65
|
lionagi/core/typing/_typing.py,sha256=VJj5W6y-JGK1ZzSbyDs4qAuq0cA5wp5HtRgZUsZ50E0,869
|
66
66
|
lionagi/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
67
67
|
lionagi/integrations/_services.py,sha256=qxffUaPKvp2Bb_LI3Uiyokm7l6ZAbRi0xKxZXCYs67c,498
|
68
|
-
lionagi/integrations/anthropic_/AnthropicModel.py,sha256=
|
69
|
-
lionagi/integrations/anthropic_/AnthropicService.py,sha256=
|
68
|
+
lionagi/integrations/anthropic_/AnthropicModel.py,sha256=6O9-GAQnq-Erhx62pxQDRuVL-vpCFUWMEe3V6HmR1MA,9076
|
69
|
+
lionagi/integrations/anthropic_/AnthropicService.py,sha256=t1LL_64xmz4vFDGVGm8M7Stion8LwfJVVEPvdtYwDtg,4074
|
70
70
|
lionagi/integrations/anthropic_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
71
|
-
lionagi/integrations/anthropic_/anthropic_max_output_token_data.yaml,sha256=
|
72
|
-
lionagi/integrations/anthropic_/anthropic_price_data.yaml,sha256=
|
73
|
-
lionagi/integrations/anthropic_/version.py,sha256=
|
71
|
+
lionagi/integrations/anthropic_/anthropic_max_output_token_data.yaml,sha256=MCgwrtCwaVE9SJar3NNKI51uuwQ2Nzko979lTd5mEeU,360
|
72
|
+
lionagi/integrations/anthropic_/anthropic_price_data.yaml,sha256=tviu4PY-xjo-9vhx--Vsp0ZNYVhgcaQAAfZvD7GxErc,756
|
73
|
+
lionagi/integrations/anthropic_/version.py,sha256=25-aR3n_vIk4o62w93ToEUcEYbH9ZPmmJjwkf4fvZls,135
|
74
74
|
lionagi/integrations/anthropic_/api_endpoints/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
75
75
|
lionagi/integrations/anthropic_/api_endpoints/api_request.py,sha256=JWp-m4DFFwrR13ckTMnfg2ULOJAmHDheuXDNR24vk4s,10368
|
76
76
|
lionagi/integrations/anthropic_/api_endpoints/data_models.py,sha256=qX6U9G_ZNlASyvhQ3WzgdR0kMvSWbO-kbQyo6R7pIAc,978
|
@@ -84,19 +84,19 @@ lionagi/integrations/anthropic_/api_endpoints/messages/response/content_models.p
|
|
84
84
|
lionagi/integrations/anthropic_/api_endpoints/messages/response/response_body.py,sha256=96e2GAAiTGkExRullvAsAOGhPPHNByhnv6DK_wz1j40,3213
|
85
85
|
lionagi/integrations/anthropic_/api_endpoints/messages/response/usage_models.py,sha256=s6oe5iOU027M7YPS10upnvcPsuFbTV1ZM00lInHPKvA,695
|
86
86
|
lionagi/integrations/groq_/GroqModel.py,sha256=y2KaFe8GmlTBnBRvB09dNjYYhNEjq2wujYfB8YzGNHI,11950
|
87
|
-
lionagi/integrations/groq_/GroqService.py,sha256=
|
87
|
+
lionagi/integrations/groq_/GroqService.py,sha256=GKYDONxZpzcH7XgVlLg0Tw2wWBAAcOywsBcahbOQPCU,4781
|
88
88
|
lionagi/integrations/groq_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
89
89
|
lionagi/integrations/groq_/groq_max_output_token_data.yaml,sha256=Y0PbyZ7pyyV1zi7ZJSJGVPsZDhSeN2JMOyghzkqqoKc,559
|
90
90
|
lionagi/integrations/groq_/groq_price_data.yaml,sha256=HmN_2-QliKvaC8Ghq7QJAD97ubMYzPSy7EGgqzgCz48,1234
|
91
91
|
lionagi/integrations/groq_/groq_rate_limits.yaml,sha256=x8hiZx_-H3ZyMznnxgFCQidWMEK5jIzBvrYpxb8gFYI,2863
|
92
|
-
lionagi/integrations/groq_/version.py,sha256=
|
92
|
+
lionagi/integrations/groq_/version.py,sha256=25-aR3n_vIk4o62w93ToEUcEYbH9ZPmmJjwkf4fvZls,135
|
93
93
|
lionagi/integrations/groq_/api_endpoints/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
94
94
|
lionagi/integrations/groq_/api_endpoints/data_models.py,sha256=bwqAbERAT1xAzLkMKkBrHOLr-hLDcblW4pcQ5Oqmyq8,5424
|
95
95
|
lionagi/integrations/groq_/api_endpoints/groq_request.py,sha256=u-GJuu0ZsY7jMWaXtNXnud7Pw3Pxs6Cc280UnTHk3YE,10383
|
96
96
|
lionagi/integrations/groq_/api_endpoints/match_response.py,sha256=95vRKsR1QYPPmBY36dC5azdKn5UlXNRrTQqTUZro_YM,3756
|
97
97
|
lionagi/integrations/groq_/api_endpoints/response_utils.py,sha256=P5kRsGHe-Rx9xejfRcU8q680yotcWLTSaSUuqXATcho,3710
|
98
98
|
lionagi/integrations/litellm_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
99
|
-
lionagi/integrations/litellm_/imodel.py,sha256=
|
99
|
+
lionagi/integrations/litellm_/imodel.py,sha256=M52linffjZuytzHzhJStPLcMa90oCs-pfVljzdK-0Es,2365
|
100
100
|
lionagi/integrations/ollama_/OllamaModel.py,sha256=5kBYIWShsSpQpSgOxdbRk2_4jmss6Y8iISjUcS3KoWw,8341
|
101
101
|
lionagi/integrations/ollama_/OllamaService.py,sha256=bJ4kk1FPjn_kecLzxTJgVj05KZPzF5FclHoDA3jdAlg,4080
|
102
102
|
lionagi/integrations/ollama_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
@@ -125,12 +125,12 @@ lionagi/integrations/ollama_/api_endpoints/model/list_model.py,sha256=OrGdJa0evj
|
|
125
125
|
lionagi/integrations/ollama_/api_endpoints/model/pull_model.py,sha256=fiZJcQSRn73SJA9GdlfPBG5RiMISQwBc0y7S_zAlOGA,923
|
126
126
|
lionagi/integrations/ollama_/api_endpoints/model/push_model.py,sha256=yDOMVu3ajdNFT1cuzb4R_3qDxlk0qT4aM2oget3aHZ4,961
|
127
127
|
lionagi/integrations/ollama_/api_endpoints/model/show_model.py,sha256=CclV6pEmm5iYM25ePnMAiicVJmZzolDim7BsQoEJAw0,864
|
128
|
-
lionagi/integrations/openai_/OpenAIModel.py,sha256=
|
129
|
-
lionagi/integrations/openai_/OpenAIService.py,sha256=
|
128
|
+
lionagi/integrations/openai_/OpenAIModel.py,sha256=6cv6NIkCMjZ7k2v-kvtCVXjIOHgwwPfGuJV7ttL7Q4M,15889
|
129
|
+
lionagi/integrations/openai_/OpenAIService.py,sha256=H48XqIv1IBzraPUGqryQzy83dMxBq4_qmTAo65MCBQE,13736
|
130
130
|
lionagi/integrations/openai_/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
131
131
|
lionagi/integrations/openai_/openai_max_output_token_data.yaml,sha256=3gd8TGnFjy4AdHzvsV9wZjK_r_o26pe3Th75n5eN2zs,263
|
132
132
|
lionagi/integrations/openai_/openai_price_data.yaml,sha256=O8mRk8_-MVVLMMlRRIggVirq-d91U6aNhpl9F3L7EL4,780
|
133
|
-
lionagi/integrations/openai_/version.py,sha256=
|
133
|
+
lionagi/integrations/openai_/version.py,sha256=2plzdEEb24FLjE2I2XyBBcJEPYWHccNL4SgtLC_6erg,22
|
134
134
|
lionagi/integrations/openai_/api_endpoints/__init__.py,sha256=Ave6AXUYRyaRqCwSS3usjr8Jvog26Rjc5HdsCPRAGLk,68
|
135
135
|
lionagi/integrations/openai_/api_endpoints/api_request.py,sha256=2hS3tEQlglgvw5Jw6c0JL5GXfQDv0gRo5TWcyTsCUJA,10567
|
136
136
|
lionagi/integrations/openai_/api_endpoints/data_models.py,sha256=Jiob2b_2W8idS-mNOjnK8cP_vbphArqoOAC6e5DXmc8,627
|
@@ -218,11 +218,11 @@ lionagi/integrations/pandas_/search_keywords.py,sha256=AJfN8QVu6rUni6As8AOTnzne_
|
|
218
218
|
lionagi/integrations/pandas_/to_df.py,sha256=3vAOCj0Ib2PZNCblg1oA20PjRIrUXR86FHICQLNhLu0,5757
|
219
219
|
lionagi/integrations/pandas_/update_cells.py,sha256=7X1bGcPvnEINrLM_zFCWUXIkrFdMGV3TjoEYBq_SThs,1658
|
220
220
|
lionagi/integrations/perplexity_/PerplexityModel.py,sha256=06kURklzmc3f-TPwdB3a2zbYttfBTSlBrgZG_Tkw680,9335
|
221
|
-
lionagi/integrations/perplexity_/PerplexityService.py,sha256=
|
221
|
+
lionagi/integrations/perplexity_/PerplexityService.py,sha256=fyI7S1I9Pxdr1DHxFTkUmTP6djIQLPLlkKyEKGz5QLg,3778
|
222
222
|
lionagi/integrations/perplexity_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
223
223
|
lionagi/integrations/perplexity_/perplexity_max_output_token_data.yaml,sha256=SY6nDrDRhI_HzEBYHaANR_Mc5GRa0SY9Pw_wRuVBlV4,121
|
224
224
|
lionagi/integrations/perplexity_/perplexity_price_data.yaml,sha256=eyp_jZktnEbsEv9VJ0TLNzjp99VdOEA0E-el_dAzCTc,284
|
225
|
-
lionagi/integrations/perplexity_/version.py,sha256=
|
225
|
+
lionagi/integrations/perplexity_/version.py,sha256=d4QHYmS_30j0hPN8NmNPnQ_Z0TphDRbu4MtQj9cT9e8,22
|
226
226
|
lionagi/integrations/perplexity_/api_endpoints/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
227
227
|
lionagi/integrations/perplexity_/api_endpoints/api_request.py,sha256=jcBlSkTta0HIyg-mbY_iNpSoQwuDuQKryFTb0p5JjCM,6657
|
228
228
|
lionagi/integrations/perplexity_/api_endpoints/data_models.py,sha256=kQQorMksoibGopJm5oXpoR5pH_ZyLdnpmA5xd855P2c,1689
|
@@ -257,7 +257,7 @@ lionagi/libs/func/throttle.py,sha256=iOOmS6i81NGRCClf4WaIhMMXwPt_rZ6WXYBqMJ_weEE
|
|
257
257
|
lionagi/libs/func/types.py,sha256=wdjGNmY82pVACQBuDMUt3XJO0G9_wKHR7VesOeMxo_A,831
|
258
258
|
lionagi/libs/func/utils.py,sha256=-LMdUEjksXP6JjjcUBh0XEQiXF30Zmv3xpKbmXjfya8,2674
|
259
259
|
lionagi/libs/func/async_calls/__init__.py,sha256=qDHIkH7B-Ka5NJqeeyu_YL3TY36xL8kBwTjtCR4H8AU,495
|
260
|
-
lionagi/libs/func/async_calls/alcall.py,sha256=
|
260
|
+
lionagi/libs/func/async_calls/alcall.py,sha256=GhPXId0YLvfxpDSYOABwr_3D-nRu8aLWshaYboJQzmQ,7436
|
261
261
|
lionagi/libs/func/async_calls/bcall.py,sha256=gwfKpRZkExjVn-1YGZfaboFES8RqUgyaBrdpNtz1IdY,4436
|
262
262
|
lionagi/libs/func/async_calls/mcall.py,sha256=9O5gWbBT4iIqXfcdZjgAdqsOSLWrNS4_tt6ou231ozA,4607
|
263
263
|
lionagi/libs/func/async_calls/pcall.py,sha256=6u3RJPV-3yOkWxC9iSoVFl1veFfZFJpR0QRyGtfBODI,5831
|
@@ -317,10 +317,10 @@ lionagi/libs/string_similarity/utils.py,sha256=NdD0qF5tuytWBsm0WMrH0gRBBSxw2p4-m
|
|
317
317
|
lionagi/operations/__init__.py,sha256=Dt7o6DFP7zVR-uxZ4xsGHQcse3XVlF6S8Y9NhaUTn_4,68
|
318
318
|
lionagi/operations/utils.py,sha256=kn5SkZRczl1aQ-vJBeVPlMdeyUUD0s5iyuAW4P6KOvQ,1164
|
319
319
|
lionagi/operations/brainstorm/__init__.py,sha256=amsoH65wepsx58DfgH-TRTN1wDH5TC24qYI_f02zkVg,61
|
320
|
-
lionagi/operations/brainstorm/brainstorm.py,sha256=
|
320
|
+
lionagi/operations/brainstorm/brainstorm.py,sha256=1Uuc11OH34jEYfDdombX5ui9b-bJTn4bVSLt0jjQUIc,16747
|
321
321
|
lionagi/operations/brainstorm/prompt.py,sha256=3a7LsmtiqGAK5mtWoX-2qhsjETBzBx8FxM3cFCBEoOo,497
|
322
322
|
lionagi/operations/plan/__init__.py,sha256=SVqoVmlSGz9lsLztm487H2qOLwgyFxSi2yZ8ubn-bgE,43
|
323
|
-
lionagi/operations/plan/plan.py,sha256=
|
323
|
+
lionagi/operations/plan/plan.py,sha256=AX4h_TeyhcXag572ywN9rmvo3fEiuGER7skAKf25_LY,15329
|
324
324
|
lionagi/operations/plan/prompt.py,sha256=sKHa_jDahzCJ60oILj1XNYCIlbS-H8ybKRXpf9zd5x0,880
|
325
325
|
lionagi/operations/select/__init__.py,sha256=dUd-KS1l404_ueYlIQsVNhS9jAqjn5pJbtUEbbh6KlI,49
|
326
326
|
lionagi/operations/select/prompt.py,sha256=wbmuDC96fcQ4LFKjqmeErOQwVRpGWRqcwUeLTWnbeNs,186
|
@@ -351,7 +351,7 @@ lionagi/protocols/registries/_component_registry.py,sha256=C04oie1-CQJCoXsjZ6hYA
|
|
351
351
|
lionagi/protocols/registries/_pile_registry.py,sha256=l2RbkjW6OGOHsgPIq9OitIi27r8Fk0vziMI8N_tP3nQ,656
|
352
352
|
lionagi/service/__init__.py,sha256=AQS0ezBCtjZOUbT_QEH-Ev5l41-Pk0_KgmKHUgf_Y_A,375
|
353
353
|
lionagi/service/complete_request_info.py,sha256=V9cWqmQ-fo952EWCooAvezaM_xuxYU4Xy0Jo_0XJnug,301
|
354
|
-
lionagi/service/imodel.py,sha256=
|
354
|
+
lionagi/service/imodel.py,sha256=OK71j_hCJb9LATocPJfaDRpXfVpq_Z9r3F6OeOgrgyg,4356
|
355
355
|
lionagi/service/rate_limiter.py,sha256=1cCtKfpOo_8h4ZN-oX5HXtnq0iGjF1rxi8htyu65kMU,3746
|
356
356
|
lionagi/service/service.py,sha256=58FPZzLM85fNm4WgSzmZRwbHHuLUW1K0VagKZ7A2ZAs,1077
|
357
357
|
lionagi/service/service_match_util.py,sha256=gjGzfQeQqkyxMrKx8aINS47r3Pmugbcx8JjmvbEBg7Y,4305
|
@@ -368,7 +368,7 @@ lionagi/strategies/sequential_chunk.py,sha256=jG_WZXG-Ra3yd30CmX4b3XeCNAUrZGA2-i
|
|
368
368
|
lionagi/strategies/sequential_concurrent_chunk.py,sha256=H7GShaqYlD5XxNJMG2GdOR4Vl8JHDhZb5jxNq8zY0hI,3365
|
369
369
|
lionagi/strategies/types.py,sha256=fEvE4d1H4SeCcXcd2dz3q4k8jFIBtxYzjxDN7eJRLtI,769
|
370
370
|
lionagi/strategies/utils.py,sha256=DX1dvxia8cNRqEJJbssJ3mgRzo7kgWCTA4y5DYLCCZE,1281
|
371
|
-
lionagi-0.5.
|
372
|
-
lionagi-0.5.
|
373
|
-
lionagi-0.5.
|
374
|
-
lionagi-0.5.
|
371
|
+
lionagi-0.5.5.dist-info/METADATA,sha256=tVI6_Y0Ykz5C8X0ptfBdfK9vke6V3m4ZyIkBrFpTIoE,22736
|
372
|
+
lionagi-0.5.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
373
|
+
lionagi-0.5.5.dist-info/licenses/LICENSE,sha256=VXFWsdoN5AAknBCgFqQNgPWYx7OPp-PFEP961zGdOjc,11288
|
374
|
+
lionagi-0.5.5.dist-info/RECORD,,
|
File without changes
|
File without changes
|