lionagi 0.5.4__py3-none-any.whl → 0.5.5__py3-none-any.whl
Sign up to get free protection for your applications and to get access to all the features.
- lionagi/core/action/action_manager.py +2 -0
- lionagi/core/communication/assistant_response.py +10 -0
- lionagi/core/communication/message.py +1 -1
- lionagi/core/communication/message_manager.py +13 -0
- lionagi/core/communication/utils.py +4 -2
- lionagi/core/session/branch_mixins.py +76 -39
- lionagi/integrations/anthropic_/AnthropicModel.py +4 -9
- lionagi/integrations/anthropic_/AnthropicService.py +10 -0
- lionagi/integrations/anthropic_/anthropic_max_output_token_data.yaml +5 -0
- lionagi/integrations/anthropic_/anthropic_price_data.yaml +26 -6
- lionagi/integrations/anthropic_/version.py +1 -1
- lionagi/integrations/groq_/GroqService.py +5 -0
- lionagi/integrations/groq_/version.py +1 -1
- lionagi/integrations/litellm_/imodel.py +5 -0
- lionagi/integrations/openai_/OpenAIModel.py +0 -4
- lionagi/integrations/openai_/OpenAIService.py +9 -0
- lionagi/integrations/openai_/version.py +1 -1
- lionagi/integrations/perplexity_/PerplexityService.py +5 -0
- lionagi/integrations/perplexity_/version.py +1 -1
- lionagi/service/imodel.py +5 -0
- lionagi/version.py +1 -1
- {lionagi-0.5.4.dist-info → lionagi-0.5.5.dist-info}/METADATA +1 -1
- {lionagi-0.5.4.dist-info → lionagi-0.5.5.dist-info}/RECORD +25 -25
- {lionagi-0.5.4.dist-info → lionagi-0.5.5.dist-info}/WHEEL +0 -0
- {lionagi-0.5.4.dist-info → lionagi-0.5.5.dist-info}/licenses/LICENSE +0 -0
@@ -253,6 +253,8 @@ class ActionManager:
|
|
253
253
|
ValueError: If a specified tool is not registered.
|
254
254
|
TypeError: If an unsupported tool type is provided.
|
255
255
|
"""
|
256
|
+
if isinstance(tools, list | tuple) and len(tools) == 1:
|
257
|
+
tools = tools[0]
|
256
258
|
if isinstance(tools, bool):
|
257
259
|
if tools:
|
258
260
|
tool_kwarg = {"tools": self.schema_list}
|
@@ -146,6 +146,16 @@ class AssistantResponse(RoledMessage):
|
|
146
146
|
"""
|
147
147
|
return copy(self.content["assistant_response"])
|
148
148
|
|
149
|
+
@response.setter
|
150
|
+
def response(self, value: str) -> None:
|
151
|
+
"""
|
152
|
+
Set the assistant response content.
|
153
|
+
|
154
|
+
Args:
|
155
|
+
value: The new response content
|
156
|
+
"""
|
157
|
+
self.content["assistant_response"] = value
|
158
|
+
|
149
159
|
@property
|
150
160
|
def model_response(self) -> dict | list[dict]:
|
151
161
|
"""
|
@@ -498,6 +498,19 @@ class MessageManager:
|
|
498
498
|
]
|
499
499
|
)
|
500
500
|
|
501
|
+
def remove_last_instruction_tool_schemas(self) -> None:
|
502
|
+
id_ = self.last_instruction.ln_id
|
503
|
+
self.messages[id_].tool_schemas = None
|
504
|
+
|
505
|
+
def concat_recent_action_responses_to_instruction(
|
506
|
+
self, instruction: Instruction
|
507
|
+
) -> None:
|
508
|
+
for i in reversed(self.messages.progress):
|
509
|
+
if isinstance(self.messages[i], ActionResponse):
|
510
|
+
instruction.context.append(self.messages[i].content.to_dict())
|
511
|
+
else:
|
512
|
+
break
|
513
|
+
|
501
514
|
def to_chat_msgs(self, progress=None) -> list[dict]:
|
502
515
|
"""
|
503
516
|
Convert messages to chat format.
|
@@ -92,9 +92,11 @@ def format_text_item(item: Any) -> str:
|
|
92
92
|
for j in item:
|
93
93
|
if isinstance(j, dict):
|
94
94
|
for k, v in j.items():
|
95
|
-
|
95
|
+
if v is not None:
|
96
|
+
msg += f"- {k}: {v} \n\n"
|
96
97
|
else:
|
97
|
-
|
98
|
+
if j is not None:
|
99
|
+
msg += f"{j}\n"
|
98
100
|
return msg
|
99
101
|
|
100
102
|
|
@@ -33,6 +33,7 @@ from ..communication.types import (
|
|
33
33
|
ActionResponse,
|
34
34
|
AssistantResponse,
|
35
35
|
Instruction,
|
36
|
+
RoledMessage,
|
36
37
|
)
|
37
38
|
|
38
39
|
|
@@ -149,6 +150,8 @@ class BranchOperationMixin(ABC):
|
|
149
150
|
if auto_retry_parse is True:
|
150
151
|
operative.auto_retry_parse = True
|
151
152
|
|
153
|
+
if actions:
|
154
|
+
tools = tools or True
|
152
155
|
if invoke_actions and tools:
|
153
156
|
tool_schemas = self.get_tool_schema(tools)
|
154
157
|
|
@@ -275,7 +278,7 @@ class BranchOperationMixin(ABC):
|
|
275
278
|
**kwargs,
|
276
279
|
) -> tuple[Instruction, AssistantResponse]:
|
277
280
|
|
278
|
-
ins = self.msgs.create_instruction(
|
281
|
+
ins: Instruction = self.msgs.create_instruction(
|
279
282
|
instruction=instruction,
|
280
283
|
guidance=guidance,
|
281
284
|
context=context,
|
@@ -289,7 +292,57 @@ class BranchOperationMixin(ABC):
|
|
289
292
|
)
|
290
293
|
|
291
294
|
progress = progress or self.msgs.progress
|
292
|
-
messages
|
295
|
+
messages: list[RoledMessage] = [
|
296
|
+
self.msgs.messages[i] for i in progress
|
297
|
+
]
|
298
|
+
|
299
|
+
use_ins = None
|
300
|
+
if imodel.sequential_exchange:
|
301
|
+
_to_use = []
|
302
|
+
_action_responses: set[ActionResponse] = set()
|
303
|
+
|
304
|
+
for i in messages:
|
305
|
+
if isinstance(i, ActionResponse):
|
306
|
+
_action_responses.add(i)
|
307
|
+
if isinstance(i, AssistantResponse):
|
308
|
+
_to_use.append(i.model_copy())
|
309
|
+
if isinstance(i, Instruction):
|
310
|
+
if _action_responses:
|
311
|
+
j = i.model_copy()
|
312
|
+
d_ = [k.content.to_dict() for k in _action_responses]
|
313
|
+
for z in d_:
|
314
|
+
if z not in j.context:
|
315
|
+
j.context.append(z)
|
316
|
+
|
317
|
+
_to_use.append(j)
|
318
|
+
_action_responses = set()
|
319
|
+
else:
|
320
|
+
_to_use.append(i)
|
321
|
+
|
322
|
+
messages = _to_use
|
323
|
+
if _action_responses:
|
324
|
+
j = ins.model_copy()
|
325
|
+
d_ = [k.content.to_dict() for k in _action_responses]
|
326
|
+
for z in d_:
|
327
|
+
if z not in j.context:
|
328
|
+
j.context.append(z)
|
329
|
+
use_ins = j
|
330
|
+
|
331
|
+
if messages and len(messages) > 1:
|
332
|
+
_msgs = [messages[0]]
|
333
|
+
|
334
|
+
for i in messages[1:]:
|
335
|
+
if isinstance(i, AssistantResponse):
|
336
|
+
if isinstance(_msgs[-1], AssistantResponse):
|
337
|
+
_msgs[-1].response = (
|
338
|
+
f"{_msgs[-1].response}\n\n{i.response}"
|
339
|
+
)
|
340
|
+
else:
|
341
|
+
_msgs.append(i)
|
342
|
+
else:
|
343
|
+
if isinstance(_msgs[-1], AssistantResponse):
|
344
|
+
_msgs.append(i)
|
345
|
+
messages = _msgs
|
293
346
|
|
294
347
|
if self.msgs.system and "system" not in imodel.allowed_roles:
|
295
348
|
messages = [msg for msg in messages if msg.role != "system"]
|
@@ -312,9 +365,10 @@ class BranchOperationMixin(ABC):
|
|
312
365
|
first_instruction.guidance or ""
|
313
366
|
)
|
314
367
|
messages[0] = first_instruction
|
368
|
+
messages.append(use_ins or ins)
|
315
369
|
|
316
370
|
else:
|
317
|
-
messages.append(ins)
|
371
|
+
messages.append(use_ins or ins)
|
318
372
|
|
319
373
|
kwargs["messages"] = [i.chat_msg for i in messages]
|
320
374
|
imodel = imodel or self.imodel
|
@@ -331,6 +385,7 @@ class BranchOperationMixin(ABC):
|
|
331
385
|
sender=self,
|
332
386
|
recipient=self.user,
|
333
387
|
)
|
388
|
+
|
334
389
|
return ins, res
|
335
390
|
|
336
391
|
async def communicate(
|
@@ -346,7 +401,6 @@ class BranchOperationMixin(ABC):
|
|
346
401
|
imodel: iModel = None,
|
347
402
|
images: list = None,
|
348
403
|
image_detail: Literal["low", "high", "auto"] = None,
|
349
|
-
tools: str | FUNCTOOL | list[FUNCTOOL | str] | bool = None,
|
350
404
|
num_parse_retries: int = 0,
|
351
405
|
retry_imodel: iModel = None,
|
352
406
|
retry_kwargs: dict = {},
|
@@ -355,7 +409,6 @@ class BranchOperationMixin(ABC):
|
|
355
409
|
] = "return_value",
|
356
410
|
skip_validation: bool = False,
|
357
411
|
clear_messages: bool = False,
|
358
|
-
invoke_action: bool = True,
|
359
412
|
response_format: (
|
360
413
|
type[BaseModel] | BaseModel
|
361
414
|
) = None, # alias of request_model
|
@@ -380,10 +433,6 @@ class BranchOperationMixin(ABC):
|
|
380
433
|
)
|
381
434
|
num_parse_retries = 5
|
382
435
|
|
383
|
-
tool_schemas = None
|
384
|
-
if invoke_action and tools:
|
385
|
-
tool_schemas = self.get_tool_schema(tools)
|
386
|
-
|
387
436
|
ins, res = await self._invoke_imodel(
|
388
437
|
instruction=instruction,
|
389
438
|
guidance=guidance,
|
@@ -395,36 +444,14 @@ class BranchOperationMixin(ABC):
|
|
395
444
|
imodel=imodel,
|
396
445
|
images=images,
|
397
446
|
image_detail=image_detail,
|
398
|
-
tool_schemas=tool_schemas,
|
399
447
|
**kwargs,
|
400
448
|
)
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
action_request_models = None
|
405
|
-
action_response_models = None
|
449
|
+
self.msgs.add_message(instruction=ins)
|
450
|
+
self.msgs.add_message(assistant_response=res)
|
406
451
|
|
407
452
|
if skip_validation:
|
408
453
|
return res.response
|
409
454
|
|
410
|
-
if invoke_action and tools:
|
411
|
-
action_request_models = ActionRequestModel.create(res.response)
|
412
|
-
|
413
|
-
if action_request_models and invoke_action:
|
414
|
-
action_response_models = await alcall(
|
415
|
-
action_request_models,
|
416
|
-
self.invoke_action,
|
417
|
-
suppress_errors=True,
|
418
|
-
)
|
419
|
-
|
420
|
-
if action_request_models and not action_response_models:
|
421
|
-
for i in action_request_models:
|
422
|
-
await self.msgs.a_add_message(
|
423
|
-
action_request_model=i,
|
424
|
-
sender=self,
|
425
|
-
recipient=None,
|
426
|
-
)
|
427
|
-
|
428
455
|
_d = None
|
429
456
|
if request_fields is not None or request_model is not None:
|
430
457
|
parse_success = None
|
@@ -475,9 +502,12 @@ class BranchOperationMixin(ABC):
|
|
475
502
|
if _d and isinstance(_d, dict):
|
476
503
|
parse_success = True
|
477
504
|
if res not in self.msgs.messages:
|
478
|
-
|
479
|
-
|
480
|
-
)
|
505
|
+
if isinstance(
|
506
|
+
self.msgs.messages[-1], AssistantResponse
|
507
|
+
):
|
508
|
+
self.msgs.messages[-1].response = res.response
|
509
|
+
else:
|
510
|
+
self.msgs.add_message(assistant_response=res)
|
481
511
|
return _d
|
482
512
|
|
483
513
|
elif request_model:
|
@@ -495,9 +525,16 @@ class BranchOperationMixin(ABC):
|
|
495
525
|
_d = request_model.model_validate(_d)
|
496
526
|
parse_success = True
|
497
527
|
if res not in self.msgs.messages:
|
498
|
-
|
499
|
-
|
500
|
-
)
|
528
|
+
if isinstance(
|
529
|
+
self.msgs.messages[-1], AssistantResponse
|
530
|
+
):
|
531
|
+
self.msgs.messages[-1].response = (
|
532
|
+
res.response
|
533
|
+
)
|
534
|
+
else:
|
535
|
+
self.msgs.add_message(
|
536
|
+
assistant_response=res
|
537
|
+
)
|
501
538
|
return _d
|
502
539
|
except Exception as e:
|
503
540
|
logging.warning(
|
@@ -4,6 +4,7 @@
|
|
4
4
|
|
5
5
|
from pathlib import Path
|
6
6
|
|
7
|
+
import yaml
|
7
8
|
from dotenv import load_dotenv
|
8
9
|
from pydantic import (
|
9
10
|
BaseModel,
|
@@ -33,12 +34,6 @@ price_config_file_name = path / "anthropic_price_data.yaml"
|
|
33
34
|
max_output_token_file_name = path / "anthropic_max_output_token_data.yaml"
|
34
35
|
|
35
36
|
|
36
|
-
class _ModuleImportClass:
|
37
|
-
from lionagi.libs.package.imports import check_import
|
38
|
-
|
39
|
-
yaml = check_import("yaml", pip_name="pyyaml")
|
40
|
-
|
41
|
-
|
42
37
|
class AnthropicModel(BaseModel):
|
43
38
|
model: str = Field(description="ID of the model to use.")
|
44
39
|
|
@@ -239,7 +234,7 @@ class AnthropicModel(BaseModel):
|
|
239
234
|
)
|
240
235
|
if estimated_output_len == 0:
|
241
236
|
with open(max_output_token_file_name) as file:
|
242
|
-
output_token_config =
|
237
|
+
output_token_config = yaml.safe_load(file)
|
243
238
|
estimated_output_len = output_token_config.get(self.model, 0)
|
244
239
|
self.estimated_output_len = estimated_output_len
|
245
240
|
|
@@ -261,13 +256,13 @@ class AnthropicModel(BaseModel):
|
|
261
256
|
num_of_input_tokens = self.text_token_calculator.calculate(input_text)
|
262
257
|
|
263
258
|
with open(price_config_file_name) as file:
|
264
|
-
price_config =
|
259
|
+
price_config = yaml.safe_load(file)
|
265
260
|
|
266
261
|
model_price_info_dict = price_config["model"][self.model]
|
267
262
|
estimated_price = (
|
268
263
|
model_price_info_dict["input_tokens"] * num_of_input_tokens
|
269
264
|
+ model_price_info_dict["output_tokens"]
|
270
265
|
* estimated_num_of_output_tokens
|
271
|
-
)
|
266
|
+
) / 1_000_000
|
272
267
|
|
273
268
|
return estimated_price
|
@@ -51,6 +51,11 @@ class AnthropicService(Service):
|
|
51
51
|
# Map model versions to their base models for shared rate limiting
|
52
52
|
shared_models = {
|
53
53
|
"claude-3-opus-20240229": "claude-3-opus",
|
54
|
+
"claude-3-sonnet-20241022": "claude-3-sonnet",
|
55
|
+
"claude-3-haiku-20241022": "claude-3-haiku",
|
56
|
+
"claude-3-opus-latest": "claude-3-opus",
|
57
|
+
"claude-3-sonnet-latest": "claude-3-sonnet",
|
58
|
+
"claude-3-haiku-latest": "claude-3-haiku",
|
54
59
|
"claude-3-sonnet-20240229": "claude-3-sonnet",
|
55
60
|
"claude-3-haiku-20240307": "claude-3-haiku",
|
56
61
|
}
|
@@ -115,3 +120,8 @@ class AnthropicService(Service):
|
|
115
120
|
@property
|
116
121
|
def allowed_roles(self):
|
117
122
|
return ["user", "assistant"]
|
123
|
+
|
124
|
+
@property
|
125
|
+
def sequential_exchange(self):
|
126
|
+
"""whether the service requires user/assistant exchange"""
|
127
|
+
return True
|
@@ -2,6 +2,11 @@
|
|
2
2
|
#
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
|
5
|
+
claude-3-5-sonnet-20241022: 8192 # Latest model
|
6
|
+
claude-3-5-haiku-20241022: 8192 # Latest model
|
5
7
|
claude-3-opus-20240229: 4096
|
6
8
|
claude-3-sonnet-20240229: 4096
|
7
9
|
claude-3-haiku-20240307: 4096
|
10
|
+
claude-2.1: 4096
|
11
|
+
claude-2.0: 4096
|
12
|
+
claude-instant-1.2: 4096
|
@@ -3,12 +3,32 @@
|
|
3
3
|
# SPDX-License-Identifier: Apache-2.0
|
4
4
|
|
5
5
|
model:
|
6
|
+
# Latest Models (3.5 series)
|
7
|
+
claude-3-5-sonnet-20241022:
|
8
|
+
input_tokens: 3.0
|
9
|
+
output_tokens: 15.0
|
10
|
+
claude-3-5-haiku-20241022:
|
11
|
+
input_tokens: 0.80
|
12
|
+
output_tokens: 4.0
|
13
|
+
|
14
|
+
# Claude 3 Models
|
6
15
|
claude-3-opus-20240229:
|
7
|
-
input_tokens: 0
|
8
|
-
output_tokens: 0
|
16
|
+
input_tokens: 15.0
|
17
|
+
output_tokens: 75.0
|
9
18
|
claude-3-sonnet-20240229:
|
10
|
-
input_tokens: 0
|
11
|
-
output_tokens: 0
|
19
|
+
input_tokens: 3.0
|
20
|
+
output_tokens: 15.0
|
12
21
|
claude-3-haiku-20240307:
|
13
|
-
input_tokens: 0.
|
14
|
-
output_tokens:
|
22
|
+
input_tokens: 0.25
|
23
|
+
output_tokens: 1.25
|
24
|
+
|
25
|
+
# Legacy Models
|
26
|
+
claude-2.1:
|
27
|
+
input_tokens: 8.0
|
28
|
+
output_tokens: 24.0
|
29
|
+
claude-2.0:
|
30
|
+
input_tokens: 8.0
|
31
|
+
output_tokens: 24.0
|
32
|
+
claude-instant-1.2:
|
33
|
+
input_tokens: 0.8
|
34
|
+
output_tokens: 2.4
|
@@ -424,3 +424,12 @@ class OpenAIService(Service):
|
|
424
424
|
method="POST",
|
425
425
|
content_type="application/json",
|
426
426
|
)
|
427
|
+
|
428
|
+
@property
|
429
|
+
def allowed_roles(self):
|
430
|
+
return ["user", "assistant", "system"]
|
431
|
+
|
432
|
+
@property
|
433
|
+
def sequential_exchange(self):
|
434
|
+
"""whether the service requires user/assistant exchange"""
|
435
|
+
return False
|
@@ -1 +1 @@
|
|
1
|
-
__version__ = "1.0.
|
1
|
+
__version__ = "1.0.3"
|
@@ -1 +1 @@
|
|
1
|
-
__version__ = "1.0.
|
1
|
+
__version__ = "1.0.1"
|
lionagi/service/imodel.py
CHANGED
@@ -121,5 +121,10 @@ class iModel:
|
|
121
121
|
def allowed_roles(self):
|
122
122
|
return self.service.allowed_roles
|
123
123
|
|
124
|
+
@property
|
125
|
+
def sequential_exchange(self):
|
126
|
+
"""whether the service requires user/assistant exchange"""
|
127
|
+
return self.service.sequential_exchange
|
128
|
+
|
124
129
|
|
125
130
|
__all__ = ["iModel"]
|
lionagi/version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "0.5.
|
1
|
+
__version__ = "0.5.5"
|
@@ -1,10 +1,10 @@
|
|
1
1
|
lionagi/__init__.py,sha256=oybfu2VsZc4ElN7ZeaW3KQrz8T8EcSDHPA8lUE-8G2I,537
|
2
2
|
lionagi/settings.py,sha256=BOjxRV4N9zQJervvajPhbaHmgZ-nhbCy7AaQJi3Avug,2726
|
3
|
-
lionagi/version.py,sha256=
|
3
|
+
lionagi/version.py,sha256=78mfpLewKVki6c9UONSUdlVme_JsN9ZwIfp4Hf4jmG0,22
|
4
4
|
lionagi/core/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
5
5
|
lionagi/core/_class_registry.py,sha256=srSWefqCS9EZrMvyA8zCrZ9KFvzAhTIj8g6mJG5KlIc,1982
|
6
6
|
lionagi/core/action/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
7
|
-
lionagi/core/action/action_manager.py,sha256=
|
7
|
+
lionagi/core/action/action_manager.py,sha256=R7gXNu--lM8DSFNMP1fUOcNzWPsAE49qizpw8pP11fo,10236
|
8
8
|
lionagi/core/action/base.py,sha256=M2K3irxpp7fDwczefzZdUsHhGd_21l1xhFT9iNSbiK0,4110
|
9
9
|
lionagi/core/action/function_calling.py,sha256=7N5UFLbHPy6yvPrR6RH9SNSwt73s0X_3iAnOV8YMy0Q,6001
|
10
10
|
lionagi/core/action/tool.py,sha256=pDiyttx0obu0qDskz7POHxbyqTwHuD7HCeE31LkzRMg,6884
|
@@ -12,14 +12,14 @@ lionagi/core/action/types.py,sha256=KqW5ZHXssfxuf1kIiIUoj-r3KIZEoQ_GkZ04tL6fUPQ,
|
|
12
12
|
lionagi/core/communication/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
13
13
|
lionagi/core/communication/action_request.py,sha256=gicqQIs7VLOhnkQYsUuRmSLGittODiG3Pk74FayPSeM,5006
|
14
14
|
lionagi/core/communication/action_response.py,sha256=QONcRtvV7Ympbzkdg5gnV0DqCC97Y-gvaA0D7EkjswM,4505
|
15
|
-
lionagi/core/communication/assistant_response.py,sha256=
|
15
|
+
lionagi/core/communication/assistant_response.py,sha256=IidDoxC54lxnDT194w5exWC9hDb0TxTY2mmwzbQwbOg,6004
|
16
16
|
lionagi/core/communication/base_mail.py,sha256=Et-WrHlw2x75mLIKy-wMUj3YlGGL_2BF9r1REuiZtM8,1704
|
17
17
|
lionagi/core/communication/instruction.py,sha256=Iy3G1sp0lPuA4Ak9pLobIipou4ePLsMdGGvRq6L58UU,12454
|
18
|
-
lionagi/core/communication/message.py,sha256=
|
19
|
-
lionagi/core/communication/message_manager.py,sha256=
|
18
|
+
lionagi/core/communication/message.py,sha256=ZE4hJfDN1LLOGYM-nxFZDIP6v8b3fMNcziS7Pj1gYNA,8556
|
19
|
+
lionagi/core/communication/message_manager.py,sha256=MRl7_RdgOOHedUOVZ7FT9DtsWK4C4nj5NzTXgJV71aw,17478
|
20
20
|
lionagi/core/communication/system.py,sha256=5DoDODZePy4EDpE3oI5RpkzBXVp9WC2Mf4B3fPImItI,3824
|
21
21
|
lionagi/core/communication/types.py,sha256=rBGMpE3NW0SGKc-wJdJvT-VrO3wpjwjQhoWzSC2Dqh0,745
|
22
|
-
lionagi/core/communication/utils.py,sha256=
|
22
|
+
lionagi/core/communication/utils.py,sha256=5W0xW6Tx-DF4Vzpjpk01g1960mNjvBUWrDLGU-thBaQ,7031
|
23
23
|
lionagi/core/communication/templates/README.md,sha256=Ch4JrKSjd85fLitAYO1OhZjNOGKHoEwaKQlcV16jiUI,1286
|
24
24
|
lionagi/core/communication/templates/action_request.jinja2,sha256=d6OmxHKyvvNDSK4bnBM3TGSUk_HeE_Q2EtLAQ0ZBEJg,120
|
25
25
|
lionagi/core/communication/templates/action_response.jinja2,sha256=Mg0UxmXlIvtP_KPB0GcJxE1TP6lml9BwdPkW1PZxkg8,142
|
@@ -55,7 +55,7 @@ lionagi/core/models/schema_model.py,sha256=H2tSX3r0U6MDNi929rWmAZy3nUKMP47RG-Ma0
|
|
55
55
|
lionagi/core/models/types.py,sha256=elcUuz_9dx4AhZddnICF-Cs62VJWIBqMET7MiRe4c1I,447
|
56
56
|
lionagi/core/session/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
57
57
|
lionagi/core/session/branch.py,sha256=r6yNXwTm0oYA-9YOereuvLJtDnhj9IJWJ5fjTyKN88U,4406
|
58
|
-
lionagi/core/session/branch_mixins.py,sha256=
|
58
|
+
lionagi/core/session/branch_mixins.py,sha256=7uVqxkgvbj53JlDk9roL032c2YeNl_bq5UQSxw24raQ,21240
|
59
59
|
lionagi/core/session/session.py,sha256=cutece_iTs5K_m5soRfU9oTfHmw1icDEvx77E1RelIM,5129
|
60
60
|
lionagi/core/session/types.py,sha256=MUGTSa2HWK79p7z-CG22RFP07N5AKnPVNXZwZt_wIvU,202
|
61
61
|
lionagi/core/typing/__init__.py,sha256=Y9BK1OUXzjQgIo3epCVwWqUYhFwQQ_ayhRwI1UOmINg,228
|
@@ -65,12 +65,12 @@ lionagi/core/typing/_pydantic.py,sha256=xMNyT0rDhSpKkEDfzj6GHobqlTtRn48svhmA75LP
|
|
65
65
|
lionagi/core/typing/_typing.py,sha256=VJj5W6y-JGK1ZzSbyDs4qAuq0cA5wp5HtRgZUsZ50E0,869
|
66
66
|
lionagi/integrations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
67
67
|
lionagi/integrations/_services.py,sha256=qxffUaPKvp2Bb_LI3Uiyokm7l6ZAbRi0xKxZXCYs67c,498
|
68
|
-
lionagi/integrations/anthropic_/AnthropicModel.py,sha256=
|
69
|
-
lionagi/integrations/anthropic_/AnthropicService.py,sha256=
|
68
|
+
lionagi/integrations/anthropic_/AnthropicModel.py,sha256=6O9-GAQnq-Erhx62pxQDRuVL-vpCFUWMEe3V6HmR1MA,9076
|
69
|
+
lionagi/integrations/anthropic_/AnthropicService.py,sha256=t1LL_64xmz4vFDGVGm8M7Stion8LwfJVVEPvdtYwDtg,4074
|
70
70
|
lionagi/integrations/anthropic_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
71
|
-
lionagi/integrations/anthropic_/anthropic_max_output_token_data.yaml,sha256=
|
72
|
-
lionagi/integrations/anthropic_/anthropic_price_data.yaml,sha256=
|
73
|
-
lionagi/integrations/anthropic_/version.py,sha256=
|
71
|
+
lionagi/integrations/anthropic_/anthropic_max_output_token_data.yaml,sha256=MCgwrtCwaVE9SJar3NNKI51uuwQ2Nzko979lTd5mEeU,360
|
72
|
+
lionagi/integrations/anthropic_/anthropic_price_data.yaml,sha256=tviu4PY-xjo-9vhx--Vsp0ZNYVhgcaQAAfZvD7GxErc,756
|
73
|
+
lionagi/integrations/anthropic_/version.py,sha256=25-aR3n_vIk4o62w93ToEUcEYbH9ZPmmJjwkf4fvZls,135
|
74
74
|
lionagi/integrations/anthropic_/api_endpoints/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
75
75
|
lionagi/integrations/anthropic_/api_endpoints/api_request.py,sha256=JWp-m4DFFwrR13ckTMnfg2ULOJAmHDheuXDNR24vk4s,10368
|
76
76
|
lionagi/integrations/anthropic_/api_endpoints/data_models.py,sha256=qX6U9G_ZNlASyvhQ3WzgdR0kMvSWbO-kbQyo6R7pIAc,978
|
@@ -84,19 +84,19 @@ lionagi/integrations/anthropic_/api_endpoints/messages/response/content_models.p
|
|
84
84
|
lionagi/integrations/anthropic_/api_endpoints/messages/response/response_body.py,sha256=96e2GAAiTGkExRullvAsAOGhPPHNByhnv6DK_wz1j40,3213
|
85
85
|
lionagi/integrations/anthropic_/api_endpoints/messages/response/usage_models.py,sha256=s6oe5iOU027M7YPS10upnvcPsuFbTV1ZM00lInHPKvA,695
|
86
86
|
lionagi/integrations/groq_/GroqModel.py,sha256=y2KaFe8GmlTBnBRvB09dNjYYhNEjq2wujYfB8YzGNHI,11950
|
87
|
-
lionagi/integrations/groq_/GroqService.py,sha256=
|
87
|
+
lionagi/integrations/groq_/GroqService.py,sha256=GKYDONxZpzcH7XgVlLg0Tw2wWBAAcOywsBcahbOQPCU,4781
|
88
88
|
lionagi/integrations/groq_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
89
89
|
lionagi/integrations/groq_/groq_max_output_token_data.yaml,sha256=Y0PbyZ7pyyV1zi7ZJSJGVPsZDhSeN2JMOyghzkqqoKc,559
|
90
90
|
lionagi/integrations/groq_/groq_price_data.yaml,sha256=HmN_2-QliKvaC8Ghq7QJAD97ubMYzPSy7EGgqzgCz48,1234
|
91
91
|
lionagi/integrations/groq_/groq_rate_limits.yaml,sha256=x8hiZx_-H3ZyMznnxgFCQidWMEK5jIzBvrYpxb8gFYI,2863
|
92
|
-
lionagi/integrations/groq_/version.py,sha256=
|
92
|
+
lionagi/integrations/groq_/version.py,sha256=25-aR3n_vIk4o62w93ToEUcEYbH9ZPmmJjwkf4fvZls,135
|
93
93
|
lionagi/integrations/groq_/api_endpoints/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
94
94
|
lionagi/integrations/groq_/api_endpoints/data_models.py,sha256=bwqAbERAT1xAzLkMKkBrHOLr-hLDcblW4pcQ5Oqmyq8,5424
|
95
95
|
lionagi/integrations/groq_/api_endpoints/groq_request.py,sha256=u-GJuu0ZsY7jMWaXtNXnud7Pw3Pxs6Cc280UnTHk3YE,10383
|
96
96
|
lionagi/integrations/groq_/api_endpoints/match_response.py,sha256=95vRKsR1QYPPmBY36dC5azdKn5UlXNRrTQqTUZro_YM,3756
|
97
97
|
lionagi/integrations/groq_/api_endpoints/response_utils.py,sha256=P5kRsGHe-Rx9xejfRcU8q680yotcWLTSaSUuqXATcho,3710
|
98
98
|
lionagi/integrations/litellm_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
99
|
-
lionagi/integrations/litellm_/imodel.py,sha256=
|
99
|
+
lionagi/integrations/litellm_/imodel.py,sha256=M52linffjZuytzHzhJStPLcMa90oCs-pfVljzdK-0Es,2365
|
100
100
|
lionagi/integrations/ollama_/OllamaModel.py,sha256=5kBYIWShsSpQpSgOxdbRk2_4jmss6Y8iISjUcS3KoWw,8341
|
101
101
|
lionagi/integrations/ollama_/OllamaService.py,sha256=bJ4kk1FPjn_kecLzxTJgVj05KZPzF5FclHoDA3jdAlg,4080
|
102
102
|
lionagi/integrations/ollama_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
@@ -125,12 +125,12 @@ lionagi/integrations/ollama_/api_endpoints/model/list_model.py,sha256=OrGdJa0evj
|
|
125
125
|
lionagi/integrations/ollama_/api_endpoints/model/pull_model.py,sha256=fiZJcQSRn73SJA9GdlfPBG5RiMISQwBc0y7S_zAlOGA,923
|
126
126
|
lionagi/integrations/ollama_/api_endpoints/model/push_model.py,sha256=yDOMVu3ajdNFT1cuzb4R_3qDxlk0qT4aM2oget3aHZ4,961
|
127
127
|
lionagi/integrations/ollama_/api_endpoints/model/show_model.py,sha256=CclV6pEmm5iYM25ePnMAiicVJmZzolDim7BsQoEJAw0,864
|
128
|
-
lionagi/integrations/openai_/OpenAIModel.py,sha256=
|
129
|
-
lionagi/integrations/openai_/OpenAIService.py,sha256=
|
128
|
+
lionagi/integrations/openai_/OpenAIModel.py,sha256=6cv6NIkCMjZ7k2v-kvtCVXjIOHgwwPfGuJV7ttL7Q4M,15889
|
129
|
+
lionagi/integrations/openai_/OpenAIService.py,sha256=H48XqIv1IBzraPUGqryQzy83dMxBq4_qmTAo65MCBQE,13736
|
130
130
|
lionagi/integrations/openai_/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
131
131
|
lionagi/integrations/openai_/openai_max_output_token_data.yaml,sha256=3gd8TGnFjy4AdHzvsV9wZjK_r_o26pe3Th75n5eN2zs,263
|
132
132
|
lionagi/integrations/openai_/openai_price_data.yaml,sha256=O8mRk8_-MVVLMMlRRIggVirq-d91U6aNhpl9F3L7EL4,780
|
133
|
-
lionagi/integrations/openai_/version.py,sha256=
|
133
|
+
lionagi/integrations/openai_/version.py,sha256=2plzdEEb24FLjE2I2XyBBcJEPYWHccNL4SgtLC_6erg,22
|
134
134
|
lionagi/integrations/openai_/api_endpoints/__init__.py,sha256=Ave6AXUYRyaRqCwSS3usjr8Jvog26Rjc5HdsCPRAGLk,68
|
135
135
|
lionagi/integrations/openai_/api_endpoints/api_request.py,sha256=2hS3tEQlglgvw5Jw6c0JL5GXfQDv0gRo5TWcyTsCUJA,10567
|
136
136
|
lionagi/integrations/openai_/api_endpoints/data_models.py,sha256=Jiob2b_2W8idS-mNOjnK8cP_vbphArqoOAC6e5DXmc8,627
|
@@ -218,11 +218,11 @@ lionagi/integrations/pandas_/search_keywords.py,sha256=AJfN8QVu6rUni6As8AOTnzne_
|
|
218
218
|
lionagi/integrations/pandas_/to_df.py,sha256=3vAOCj0Ib2PZNCblg1oA20PjRIrUXR86FHICQLNhLu0,5757
|
219
219
|
lionagi/integrations/pandas_/update_cells.py,sha256=7X1bGcPvnEINrLM_zFCWUXIkrFdMGV3TjoEYBq_SThs,1658
|
220
220
|
lionagi/integrations/perplexity_/PerplexityModel.py,sha256=06kURklzmc3f-TPwdB3a2zbYttfBTSlBrgZG_Tkw680,9335
|
221
|
-
lionagi/integrations/perplexity_/PerplexityService.py,sha256=
|
221
|
+
lionagi/integrations/perplexity_/PerplexityService.py,sha256=fyI7S1I9Pxdr1DHxFTkUmTP6djIQLPLlkKyEKGz5QLg,3778
|
222
222
|
lionagi/integrations/perplexity_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
223
223
|
lionagi/integrations/perplexity_/perplexity_max_output_token_data.yaml,sha256=SY6nDrDRhI_HzEBYHaANR_Mc5GRa0SY9Pw_wRuVBlV4,121
|
224
224
|
lionagi/integrations/perplexity_/perplexity_price_data.yaml,sha256=eyp_jZktnEbsEv9VJ0TLNzjp99VdOEA0E-el_dAzCTc,284
|
225
|
-
lionagi/integrations/perplexity_/version.py,sha256=
|
225
|
+
lionagi/integrations/perplexity_/version.py,sha256=d4QHYmS_30j0hPN8NmNPnQ_Z0TphDRbu4MtQj9cT9e8,22
|
226
226
|
lionagi/integrations/perplexity_/api_endpoints/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
227
227
|
lionagi/integrations/perplexity_/api_endpoints/api_request.py,sha256=jcBlSkTta0HIyg-mbY_iNpSoQwuDuQKryFTb0p5JjCM,6657
|
228
228
|
lionagi/integrations/perplexity_/api_endpoints/data_models.py,sha256=kQQorMksoibGopJm5oXpoR5pH_ZyLdnpmA5xd855P2c,1689
|
@@ -351,7 +351,7 @@ lionagi/protocols/registries/_component_registry.py,sha256=C04oie1-CQJCoXsjZ6hYA
|
|
351
351
|
lionagi/protocols/registries/_pile_registry.py,sha256=l2RbkjW6OGOHsgPIq9OitIi27r8Fk0vziMI8N_tP3nQ,656
|
352
352
|
lionagi/service/__init__.py,sha256=AQS0ezBCtjZOUbT_QEH-Ev5l41-Pk0_KgmKHUgf_Y_A,375
|
353
353
|
lionagi/service/complete_request_info.py,sha256=V9cWqmQ-fo952EWCooAvezaM_xuxYU4Xy0Jo_0XJnug,301
|
354
|
-
lionagi/service/imodel.py,sha256=
|
354
|
+
lionagi/service/imodel.py,sha256=OK71j_hCJb9LATocPJfaDRpXfVpq_Z9r3F6OeOgrgyg,4356
|
355
355
|
lionagi/service/rate_limiter.py,sha256=1cCtKfpOo_8h4ZN-oX5HXtnq0iGjF1rxi8htyu65kMU,3746
|
356
356
|
lionagi/service/service.py,sha256=58FPZzLM85fNm4WgSzmZRwbHHuLUW1K0VagKZ7A2ZAs,1077
|
357
357
|
lionagi/service/service_match_util.py,sha256=gjGzfQeQqkyxMrKx8aINS47r3Pmugbcx8JjmvbEBg7Y,4305
|
@@ -368,7 +368,7 @@ lionagi/strategies/sequential_chunk.py,sha256=jG_WZXG-Ra3yd30CmX4b3XeCNAUrZGA2-i
|
|
368
368
|
lionagi/strategies/sequential_concurrent_chunk.py,sha256=H7GShaqYlD5XxNJMG2GdOR4Vl8JHDhZb5jxNq8zY0hI,3365
|
369
369
|
lionagi/strategies/types.py,sha256=fEvE4d1H4SeCcXcd2dz3q4k8jFIBtxYzjxDN7eJRLtI,769
|
370
370
|
lionagi/strategies/utils.py,sha256=DX1dvxia8cNRqEJJbssJ3mgRzo7kgWCTA4y5DYLCCZE,1281
|
371
|
-
lionagi-0.5.
|
372
|
-
lionagi-0.5.
|
373
|
-
lionagi-0.5.
|
374
|
-
lionagi-0.5.
|
371
|
+
lionagi-0.5.5.dist-info/METADATA,sha256=tVI6_Y0Ykz5C8X0ptfBdfK9vke6V3m4ZyIkBrFpTIoE,22736
|
372
|
+
lionagi-0.5.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
373
|
+
lionagi-0.5.5.dist-info/licenses/LICENSE,sha256=VXFWsdoN5AAknBCgFqQNgPWYx7OPp-PFEP961zGdOjc,11288
|
374
|
+
lionagi-0.5.5.dist-info/RECORD,,
|
File without changes
|
File without changes
|