lionagi 0.8.4__py3-none-any.whl → 0.8.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lionagi/operations/ReAct/ReAct.py +9 -1
- lionagi/service/endpoints/base.py +11 -0
- lionagi/service/imodel.py +11 -0
- lionagi/service/providers/exa_/search.py +4 -4
- lionagi/service/providers/perplexity_/chat_completions.py +4 -0
- lionagi/service/providers/perplexity_/models.py +144 -0
- lionagi/session/branch.py +48 -5
- lionagi/version.py +1 -1
- {lionagi-0.8.4.dist-info → lionagi-0.8.5.dist-info}/METADATA +1 -1
- {lionagi-0.8.4.dist-info → lionagi-0.8.5.dist-info}/RECORD +12 -11
- {lionagi-0.8.4.dist-info → lionagi-0.8.5.dist-info}/WHEEL +0 -0
- {lionagi-0.8.4.dist-info → lionagi-0.8.5.dist-info}/licenses/LICENSE +0 -0
@@ -21,6 +21,10 @@ async def ReAct(
|
|
21
21
|
branch: "Branch",
|
22
22
|
instruct: Instruct | dict[str, Any],
|
23
23
|
interpret: bool = False,
|
24
|
+
interpret_domain: str | None = None,
|
25
|
+
interpret_style: str | None = None,
|
26
|
+
interpret_sample: str | None = None,
|
27
|
+
interpret_kwargs: dict | None = None,
|
24
28
|
tools: Any = None,
|
25
29
|
tool_schemas: Any = None,
|
26
30
|
response_format: type[BaseModel] | BaseModel = None,
|
@@ -43,7 +47,11 @@ async def ReAct(
|
|
43
47
|
instruct.to_dict()
|
44
48
|
if isinstance(instruct, Instruct)
|
45
49
|
else instruct
|
46
|
-
)
|
50
|
+
),
|
51
|
+
domain=interpret_domain,
|
52
|
+
style=interpret_style,
|
53
|
+
sample_writing=interpret_sample,
|
54
|
+
**(interpret_kwargs or {}),
|
47
55
|
)
|
48
56
|
|
49
57
|
# Convert Instruct to dict if necessary
|
@@ -61,6 +61,7 @@ class EndpointConfig(BaseModel):
|
|
61
61
|
use_enum_values=True,
|
62
62
|
)
|
63
63
|
|
64
|
+
name: str | None = None
|
64
65
|
provider: str | None = None
|
65
66
|
base_url: str | None = None
|
66
67
|
endpoint: str
|
@@ -75,6 +76,7 @@ class EndpointConfig(BaseModel):
|
|
75
76
|
requires_tokens: bool = False
|
76
77
|
api_version: str | None = None
|
77
78
|
allowed_roles: list[str] | None = None
|
79
|
+
request_options: type | None = None
|
78
80
|
|
79
81
|
|
80
82
|
class EndPoint(ABC):
|
@@ -100,6 +102,11 @@ class EndPoint(ABC):
|
|
100
102
|
config.update(kwargs)
|
101
103
|
self.config = EndpointConfig(**config)
|
102
104
|
|
105
|
+
@property
|
106
|
+
def name(self) -> str | None:
|
107
|
+
"""str | None: The name of the endpoint, if any."""
|
108
|
+
return self.config.name or self.endpoint
|
109
|
+
|
103
110
|
@property
|
104
111
|
def is_streamable(self) -> bool:
|
105
112
|
"""bool: Whether this endpoint supports streaming responses."""
|
@@ -185,6 +192,10 @@ class EndPoint(ABC):
|
|
185
192
|
"""bool: Indicates if this endpoint uses role-based messages."""
|
186
193
|
return self.allowed_roles is not None
|
187
194
|
|
195
|
+
@property
|
196
|
+
def request_options(self) -> type | None:
|
197
|
+
return self.config.request_options
|
198
|
+
|
188
199
|
def create_payload(self, **kwargs) -> dict:
|
189
200
|
"""Generates a request payload (and headers) for this endpoint.
|
190
201
|
|
lionagi/service/imodel.py
CHANGED
@@ -5,6 +5,8 @@
|
|
5
5
|
import os
|
6
6
|
import warnings
|
7
7
|
|
8
|
+
from pydantic import BaseModel
|
9
|
+
|
8
10
|
from .endpoints.base import APICalling, EndPoint
|
9
11
|
from .endpoints.match_endpoint import match_endpoint
|
10
12
|
from .endpoints.rate_limited_processor import RateLimitedAPIExecutor
|
@@ -266,6 +268,15 @@ class iModel:
|
|
266
268
|
"""
|
267
269
|
return self.kwargs.get("model", "")
|
268
270
|
|
271
|
+
@property
|
272
|
+
def request_options(self) -> type[BaseModel] | None:
|
273
|
+
"""type[BaseModel] | None: The request options model for the endpoint.
|
274
|
+
|
275
|
+
Returns:
|
276
|
+
The request options model if available; otherwise, None.
|
277
|
+
"""
|
278
|
+
return self.endpoint.request_options
|
279
|
+
|
269
280
|
def to_dict(self):
|
270
281
|
kwargs = self.kwargs
|
271
282
|
if "kwargs" in self.kwargs:
|
@@ -1,10 +1,8 @@
|
|
1
|
-
from typing import
|
1
|
+
from typing import Literal
|
2
2
|
|
3
3
|
from lionagi.service.endpoints.base import EndPoint
|
4
4
|
|
5
|
-
|
6
|
-
from .models import ExaSearchRequest
|
7
|
-
|
5
|
+
from .models import ExaSearchRequest
|
8
6
|
|
9
7
|
CATEGORY_OPTIONS = Literal[
|
10
8
|
"article",
|
@@ -21,6 +19,7 @@ CATEGORY_OPTIONS = Literal[
|
|
21
19
|
]
|
22
20
|
|
23
21
|
SEARCH_CONFIG = {
|
22
|
+
"name": "search_exa",
|
24
23
|
"provider": "exa",
|
25
24
|
"base_url": "https://api.exa.ai",
|
26
25
|
"endpoint": "search",
|
@@ -47,6 +46,7 @@ SEARCH_CONFIG = {
|
|
47
46
|
"type", # keyword, neural, auto
|
48
47
|
"useAutoPrompt",
|
49
48
|
},
|
49
|
+
"request_options": ExaSearchRequest,
|
50
50
|
}
|
51
51
|
|
52
52
|
|
@@ -4,7 +4,10 @@
|
|
4
4
|
|
5
5
|
from lionagi.service.endpoints.chat_completion import ChatCompletionEndPoint
|
6
6
|
|
7
|
+
from .models import PerplexityChatCompletionRequest
|
8
|
+
|
7
9
|
CHAT_COMPLETION_CONFIG = {
|
10
|
+
"name": "search_perplexity",
|
8
11
|
"provider": "perplexity",
|
9
12
|
"base_url": "https://api.perplexity.ai",
|
10
13
|
"endpoint": "chat/completions",
|
@@ -31,6 +34,7 @@ CHAT_COMPLETION_CONFIG = {
|
|
31
34
|
"frequency_penalty",
|
32
35
|
},
|
33
36
|
"allowed_roles": ["user", "assistant"],
|
37
|
+
"request_options": PerplexityChatCompletionRequest,
|
34
38
|
}
|
35
39
|
|
36
40
|
|
@@ -0,0 +1,144 @@
|
|
1
|
+
from enum import Enum
|
2
|
+
from typing import Any
|
3
|
+
|
4
|
+
from pydantic import BaseModel, Field, model_validator
|
5
|
+
|
6
|
+
|
7
|
+
class PerplexityRole(str, Enum):
|
8
|
+
"""Roles allowed in Perplexity's messages."""
|
9
|
+
|
10
|
+
system = "system"
|
11
|
+
user = "user"
|
12
|
+
assistant = "assistant"
|
13
|
+
|
14
|
+
|
15
|
+
class PerplexityMessage(BaseModel):
|
16
|
+
"""
|
17
|
+
A single message in the conversation.
|
18
|
+
`role` can be 'system', 'user', or 'assistant'.
|
19
|
+
`content` is the text for that conversation turn.
|
20
|
+
"""
|
21
|
+
|
22
|
+
role: PerplexityRole = Field(
|
23
|
+
...,
|
24
|
+
description="The role of the speaker. Must be system, user, or assistant.",
|
25
|
+
)
|
26
|
+
content: str = Field(..., description="The text content of this message.")
|
27
|
+
|
28
|
+
|
29
|
+
class PerplexityChatCompletionRequest(BaseModel):
|
30
|
+
"""
|
31
|
+
Represents the request body for Perplexity's Chat Completions endpoint.
|
32
|
+
Endpoint: POST https://api.perplexity.ai/chat/completions
|
33
|
+
"""
|
34
|
+
|
35
|
+
model: str = Field(
|
36
|
+
...,
|
37
|
+
description="The model name, e.g. 'llama-3.1-sonar-small-128k-online'.",
|
38
|
+
)
|
39
|
+
messages: list[PerplexityMessage] = Field(
|
40
|
+
..., description="A list of messages forming the conversation so far."
|
41
|
+
)
|
42
|
+
|
43
|
+
# Optional parameters
|
44
|
+
frequency_penalty: float | None = Field(
|
45
|
+
default=None,
|
46
|
+
gt=0,
|
47
|
+
description=(
|
48
|
+
"Multiplicative penalty > 0. Values > 1.0 penalize repeated tokens more strongly. "
|
49
|
+
"Value=1.0 means no penalty. Incompatible with presence_penalty."
|
50
|
+
),
|
51
|
+
)
|
52
|
+
presence_penalty: float | None = Field(
|
53
|
+
default=None,
|
54
|
+
ge=-2.0,
|
55
|
+
le=2.0,
|
56
|
+
description=(
|
57
|
+
"Penalizes tokens that have appeared so far (range -2 to 2). "
|
58
|
+
"Positive values encourage talking about new topics. Incompatible with frequency_penalty."
|
59
|
+
),
|
60
|
+
)
|
61
|
+
max_tokens: int | None = Field(
|
62
|
+
default=None,
|
63
|
+
description=(
|
64
|
+
"Maximum number of completion tokens. If omitted, model generates tokens until it "
|
65
|
+
"hits stop or context limit."
|
66
|
+
),
|
67
|
+
)
|
68
|
+
return_images: bool | None = Field(
|
69
|
+
default=None,
|
70
|
+
description="If True, attempt to return images (closed beta feature).",
|
71
|
+
)
|
72
|
+
return_related_questions: bool | None = Field(
|
73
|
+
default=None,
|
74
|
+
description="If True, attempt to return related questions (closed beta feature).",
|
75
|
+
)
|
76
|
+
search_domain_filter: list[Any] | None = Field(
|
77
|
+
default=None,
|
78
|
+
description=(
|
79
|
+
"List of domains to limit or exclude in the online search. Example: ['example.com', '-twitter.com']. "
|
80
|
+
"Supports up to 3 entries. (Closed beta feature.)"
|
81
|
+
),
|
82
|
+
)
|
83
|
+
search_recency_filter: str | None = Field(
|
84
|
+
default=None,
|
85
|
+
description=(
|
86
|
+
"Returns search results within a specified time interval: 'month', 'week', 'day', or 'hour'."
|
87
|
+
),
|
88
|
+
)
|
89
|
+
stream: bool | None = Field(
|
90
|
+
default=None,
|
91
|
+
description=(
|
92
|
+
"If True, response is returned incrementally via Server-Sent Events (SSE)."
|
93
|
+
),
|
94
|
+
)
|
95
|
+
temperature: float | None = Field(
|
96
|
+
default=None,
|
97
|
+
ge=0.0,
|
98
|
+
lt=2.0,
|
99
|
+
description=(
|
100
|
+
"Controls randomness of sampling, range [0, 2). Higher => more random. "
|
101
|
+
"Defaults to 0.2."
|
102
|
+
),
|
103
|
+
)
|
104
|
+
top_k: int | None = Field(
|
105
|
+
default=None,
|
106
|
+
ge=0,
|
107
|
+
le=2048,
|
108
|
+
description=(
|
109
|
+
"Top-K filtering. 0 disables top-k filtering. If set, only the top K tokens are considered. "
|
110
|
+
"We recommend altering either top_k or top_p, but not both."
|
111
|
+
),
|
112
|
+
)
|
113
|
+
top_p: float | None = Field(
|
114
|
+
default=None,
|
115
|
+
ge=0.0,
|
116
|
+
le=1.0,
|
117
|
+
description=(
|
118
|
+
"Nucleus sampling threshold. We recommend altering either top_k or top_p, but not both."
|
119
|
+
),
|
120
|
+
)
|
121
|
+
|
122
|
+
@model_validator(mode="before")
|
123
|
+
def validate_penalties(cls, values):
|
124
|
+
"""
|
125
|
+
Disallow using both frequency_penalty != 1.0 and presence_penalty != 0.0 at once,
|
126
|
+
since the docs say they're incompatible.
|
127
|
+
"""
|
128
|
+
freq_pen = values.get("frequency_penalty", 1.0)
|
129
|
+
pres_pen = values.get("presence_penalty", 0.0)
|
130
|
+
|
131
|
+
# The doc states frequency_penalty is incompatible with presence_penalty.
|
132
|
+
# We'll enforce that if presence_penalty != 0, frequency_penalty must be 1.0
|
133
|
+
# or vice versa. Adjust logic as needed.
|
134
|
+
if pres_pen != 0.0 and freq_pen != 1.0:
|
135
|
+
raise ValueError(
|
136
|
+
"presence_penalty is incompatible with frequency_penalty. "
|
137
|
+
"Please use only one: either presence_penalty=0 with freq_pen !=1, "
|
138
|
+
"or presence_penalty!=0 with freq_pen=1."
|
139
|
+
)
|
140
|
+
return values
|
141
|
+
|
142
|
+
def to_dict(self) -> dict:
|
143
|
+
"""Return a dict suitable for JSON serialization and sending to Perplexity API."""
|
144
|
+
return self.model_dump(exclude_none=True)
|
lionagi/session/branch.py
CHANGED
@@ -45,6 +45,7 @@ from lionagi.protocols.types import (
|
|
45
45
|
SenderRecipient,
|
46
46
|
System,
|
47
47
|
)
|
48
|
+
from lionagi.service.endpoints.base import EndPoint
|
48
49
|
from lionagi.service.types import iModel, iModelManager
|
49
50
|
from lionagi.settings import Settings
|
50
51
|
from lionagi.utils import UNDEFINED, alcall, bcall, copy
|
@@ -548,12 +549,38 @@ class Branch(Element, Communicatable, Relational):
|
|
548
549
|
|
549
550
|
def connect(
|
550
551
|
self,
|
551
|
-
|
552
|
-
|
553
|
-
|
552
|
+
provider: str = None,
|
553
|
+
base_url: str = None,
|
554
|
+
endpoint: str | EndPoint = "chat",
|
555
|
+
endpoint_params: list[str] | None = None,
|
556
|
+
api_key: str = None,
|
557
|
+
queue_capacity: int = 100,
|
558
|
+
capacity_refresh_time: float = 60,
|
559
|
+
interval: float | None = None,
|
560
|
+
limit_requests: int = None,
|
561
|
+
limit_tokens: int = None,
|
562
|
+
invoke_with_endpoint: bool = False,
|
563
|
+
imodel: iModel = None,
|
564
|
+
name: str = None,
|
565
|
+
request_options: type[BaseModel] = None,
|
554
566
|
description: str = None,
|
555
567
|
update: bool = False,
|
556
568
|
):
|
569
|
+
if not imodel:
|
570
|
+
imodel = iModel(
|
571
|
+
provider=provider,
|
572
|
+
base_url=base_url,
|
573
|
+
endpoint=endpoint,
|
574
|
+
endpoint_params=endpoint_params,
|
575
|
+
api_key=api_key,
|
576
|
+
queue_capacity=queue_capacity,
|
577
|
+
capacity_refresh_time=capacity_refresh_time,
|
578
|
+
interval=interval,
|
579
|
+
limit_requests=limit_requests,
|
580
|
+
limit_tokens=limit_tokens,
|
581
|
+
invoke_with_endpoint=invoke_with_endpoint,
|
582
|
+
)
|
583
|
+
|
557
584
|
if not update and name in self.tools:
|
558
585
|
raise ValueError(f"Tool with name '{name}' already exists.")
|
559
586
|
|
@@ -563,13 +590,13 @@ class Branch(Element, Communicatable, Relational):
|
|
563
590
|
self._log_manager.log(Log.create(api_call))
|
564
591
|
return api_call.response
|
565
592
|
|
566
|
-
_connect.__name__ = name
|
593
|
+
_connect.__name__ = name or imodel.endpoint.name
|
567
594
|
if description:
|
568
595
|
_connect.__doc__ = description
|
569
596
|
|
570
597
|
tool = Tool(
|
571
598
|
func_callable=_connect,
|
572
|
-
request_options=request_options,
|
599
|
+
request_options=request_options or imodel.request_options,
|
573
600
|
)
|
574
601
|
self._action_manager.register_tools(tool, update=update)
|
575
602
|
|
@@ -1521,6 +1548,10 @@ class Branch(Element, Communicatable, Relational):
|
|
1521
1548
|
self,
|
1522
1549
|
instruct: Instruct | dict[str, Any],
|
1523
1550
|
interpret: bool = False,
|
1551
|
+
interpret_domain: str | None = None,
|
1552
|
+
interpret_style: str | None = None,
|
1553
|
+
interpret_sample: str | None = None,
|
1554
|
+
interpret_kwargs: dict | None = None,
|
1524
1555
|
tools: Any = None,
|
1525
1556
|
tool_schemas: Any = None,
|
1526
1557
|
response_format: type[BaseModel] = None,
|
@@ -1547,6 +1578,14 @@ class Branch(Element, Communicatable, Relational):
|
|
1547
1578
|
interpret (bool, optional):
|
1548
1579
|
If `True`, first interprets (`branch.interpret`) the instructions to refine them
|
1549
1580
|
before proceeding. Defaults to `False`.
|
1581
|
+
interpret_domain (str | None, optional):
|
1582
|
+
Optional domain hint for the interpretation step.
|
1583
|
+
interpret_style (str | None, optional):
|
1584
|
+
Optional style hint for the interpretation step.
|
1585
|
+
interpret_sample (str | None, optional):
|
1586
|
+
Optional sample hint for the interpretation step.
|
1587
|
+
interpret_kwargs (dict | None, optional):
|
1588
|
+
Additional arguments for the interpretation step.
|
1550
1589
|
tools (Any, optional):
|
1551
1590
|
Tools to be made available for the ReAct process. If omitted or `None`,
|
1552
1591
|
and if no `tool_schemas` are provided, it defaults to `True` (all tools).
|
@@ -1595,6 +1634,10 @@ class Branch(Element, Communicatable, Relational):
|
|
1595
1634
|
self,
|
1596
1635
|
instruct,
|
1597
1636
|
interpret=interpret,
|
1637
|
+
interpret_domain=interpret_domain,
|
1638
|
+
interpret_style=interpret_style,
|
1639
|
+
interpret_sample=interpret_sample,
|
1640
|
+
interpret_kwargs=interpret_kwargs,
|
1598
1641
|
tools=tools,
|
1599
1642
|
tool_schemas=tool_schemas,
|
1600
1643
|
response_format=response_format,
|
lionagi/version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "0.8.
|
1
|
+
__version__ = "0.8.5"
|
@@ -4,7 +4,7 @@ lionagi/_errors.py,sha256=wNKdnVQvE_CHEstK7htrrj334RA_vbGcIds-3pUiRkc,455
|
|
4
4
|
lionagi/_types.py,sha256=9g7iytvSj3UjZxD-jL06_fxuNfgZyWT3Qnp0XYp1wQU,63
|
5
5
|
lionagi/settings.py,sha256=k9zRJXv57TveyfHO3Vr9VGiKrSwlRUUVKt5zf6v9RU4,1627
|
6
6
|
lionagi/utils.py,sha256=QbF4E1PG-BaRcEVH3kJIYCJVNq-oRNoTxjda5k8NYW4,73177
|
7
|
-
lionagi/version.py,sha256=
|
7
|
+
lionagi/version.py,sha256=K0kGrhh1kzVisZcoSkeuJdC06rTwxufV05Vy2hOVGoo,22
|
8
8
|
lionagi/libs/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
9
9
|
lionagi/libs/parse.py,sha256=tpEbmIRGuHhLCJlUlm6fjmqm_Z6XJLAXGNFHNuk422I,1011
|
10
10
|
lionagi/libs/file/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
@@ -48,7 +48,7 @@ lionagi/operations/__init__.py,sha256=O7nV0tedpUe7_OlUWmCcduGPFtqtzWZcR_SIOnjLsr
|
|
48
48
|
lionagi/operations/manager.py,sha256=H7UY86PIxvxKdzJY9YVsWyJcqlwLWhVyvm4sYePH_uY,565
|
49
49
|
lionagi/operations/types.py,sha256=LIa68xcyKLVafof-DSFwKtSkneuYPFqrtGyClohYI6o,704
|
50
50
|
lionagi/operations/utils.py,sha256=Twy6L_UFt9JqJFRYuKKTKVZIXsePidNl5ipcYcCbesI,1220
|
51
|
-
lionagi/operations/ReAct/ReAct.py,sha256=
|
51
|
+
lionagi/operations/ReAct/ReAct.py,sha256=pmg50Bz859dfoFW4S-fh5-2FUZKkleCVISMp4d6ye5E,4018
|
52
52
|
lionagi/operations/ReAct/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
53
53
|
lionagi/operations/ReAct/utils.py,sha256=yBsbaZm3NNb-LhdjdK3jVmxSYbp0enWzl8d09iv8oSo,1099
|
54
54
|
lionagi/operations/_act/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
@@ -161,11 +161,11 @@ lionagi/protocols/messages/templates/instruction_message.jinja2,sha256=L-ptw5OHx
|
|
161
161
|
lionagi/protocols/messages/templates/system_message.jinja2,sha256=JRKJ0aFpYfaXSFouKc_N4unZ35C3yZTOWhIrIdCB5qk,215
|
162
162
|
lionagi/protocols/messages/templates/tool_schemas.jinja2,sha256=ozIaSDCRjIAhLyA8VM6S-YqS0w2NcctALSwx4LjDwII,126
|
163
163
|
lionagi/service/__init__.py,sha256=DMGXIqPsmut9H5GT0ZeSzQIzYzzPwI-2gLXydpbwiV8,21
|
164
|
-
lionagi/service/imodel.py,sha256=
|
164
|
+
lionagi/service/imodel.py,sha256=zQq9cdVPpEAPB7IscntExvtHOYA5ToiWonmD2n93pEw,12273
|
165
165
|
lionagi/service/manager.py,sha256=MKSYBkg23s7YhZy5GEFdnpspEnhPVfFhpkpoJe20D7k,1435
|
166
166
|
lionagi/service/types.py,sha256=v9SAn5-GTmds4Mar13Or_VFrRHCinBK99dmeDUd-QNk,486
|
167
167
|
lionagi/service/endpoints/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
168
|
-
lionagi/service/endpoints/base.py,sha256=
|
168
|
+
lionagi/service/endpoints/base.py,sha256=SaYobDBCdKd4398TyFPp5u3PKyMnzEbm6PsoUxVkZDA,18605
|
169
169
|
lionagi/service/endpoints/chat_completion.py,sha256=9ltSQaKPH43WdEDW32_-f5x07I9hOU8g-T_PAG-nYsQ,2529
|
170
170
|
lionagi/service/endpoints/match_endpoint.py,sha256=hIGYyok1y53FfI6av5NfYMygRIpDWYZbdCj0pJJfmPY,1874
|
171
171
|
lionagi/service/endpoints/rate_limited_processor.py,sha256=umri0FofbyBSFdAQBEsviDB5K6N12LkRiXQgSOorGKg,4663
|
@@ -176,7 +176,7 @@ lionagi/service/providers/anthropic_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKU
|
|
176
176
|
lionagi/service/providers/anthropic_/messages.py,sha256=PTZZ2VXVMRHWY84YFIzrft9gVrcH2V-NIq_Phi9_-xI,1760
|
177
177
|
lionagi/service/providers/exa_/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
178
178
|
lionagi/service/providers/exa_/models.py,sha256=263KP-JSxbxmomNrFeYjB_cebquoMOsCJeWsiKZ0mL4,5420
|
179
|
-
lionagi/service/providers/exa_/search.py,sha256=
|
179
|
+
lionagi/service/providers/exa_/search.py,sha256=Z3pyJH8KiWiquJSJw8Rd6D7x43BwTFHb2ESsgSicCk0,1932
|
180
180
|
lionagi/service/providers/exa_/types.py,sha256=8ODjXpFajBE9-DGqBJNS--GObwmLSDi667xS84z_AgA,139
|
181
181
|
lionagi/service/providers/groq_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
182
182
|
lionagi/service/providers/groq_/chat_completions.py,sha256=578NqQYyrIYjIemyL3bagvFGE6ear_w4S1HNlPWA5mg,1343
|
@@ -185,11 +185,12 @@ lionagi/service/providers/openai_/chat_completions.py,sha256=SfRcEnMTn3MD59YuZCl
|
|
185
185
|
lionagi/service/providers/openrouter_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
186
186
|
lionagi/service/providers/openrouter_/chat_completions.py,sha256=MRf4ZbMCgzNIL4gxUZTD-KeFe8JYDn1Fu40Jph3bCH8,1525
|
187
187
|
lionagi/service/providers/perplexity_/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
188
|
-
lionagi/service/providers/perplexity_/chat_completions.py,sha256=
|
188
|
+
lionagi/service/providers/perplexity_/chat_completions.py,sha256=jhE-KHWRX6yYEeKWLMCKLgK3bQzieSv2viqQWDP8q0Q,1197
|
189
|
+
lionagi/service/providers/perplexity_/models.py,sha256=gXH4XGkhZ4aFxvMSDTlHq9Rz1mhu3aTENXAtE-BIr6U,4866
|
189
190
|
lionagi/session/__init__.py,sha256=v8vNyJVIVj8_Oz9RJdVe6ZKUQMYTgDh1VQpnr1KdLaw,112
|
190
|
-
lionagi/session/branch.py,sha256=
|
191
|
+
lionagi/session/branch.py,sha256=JFRX_amOH5puYBIFifvXlk6YztUTi8D2Zy-eck0AFn8,65757
|
191
192
|
lionagi/session/session.py,sha256=po6C7PnM0iu_ISHUo4PBzzQ61HFOgcsAUfPoO--eLak,8987
|
192
|
-
lionagi-0.8.
|
193
|
-
lionagi-0.8.
|
194
|
-
lionagi-0.8.
|
195
|
-
lionagi-0.8.
|
193
|
+
lionagi-0.8.5.dist-info/METADATA,sha256=nTeNxoGDS7shH8qi3iSb03arU23BDaQ2DgB9_UJvTEQ,22819
|
194
|
+
lionagi-0.8.5.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
195
|
+
lionagi-0.8.5.dist-info/licenses/LICENSE,sha256=VXFWsdoN5AAknBCgFqQNgPWYx7OPp-PFEP961zGdOjc,11288
|
196
|
+
lionagi-0.8.5.dist-info/RECORD,,
|
File without changes
|
File without changes
|