lionagi 0.9.9__py3-none-any.whl → 0.9.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -137,8 +137,9 @@ def function_to_schema(
137
137
  # Default type to string and update if type hint is available
138
138
  param_type = "string"
139
139
  if param.annotation is not inspect.Parameter.empty:
140
- param_type = py_json_msp[param.annotation.__name__]
141
-
140
+ param_type = py_json_msp.get(
141
+ param.annotation.__name__, param.annotation.__name__
142
+ )
142
143
  # Extract parameter description from docstring, if available
143
144
  param_description = parametert_description.get(name)
144
145
 
@@ -87,7 +87,7 @@ async def run_instruct(
87
87
  # Prepare config for the branch operation
88
88
  config = {**ins.model_dump(), **kwargs}
89
89
  result = await branch.operate(**config)
90
- branch._log_manager.dump()
90
+ branch.dump_logs()
91
91
 
92
92
  # Extract any newly generated instructions
93
93
  instructs = []
@@ -93,7 +93,7 @@ async def run_step(
93
93
 
94
94
  # Run the step
95
95
  result = await branch.operate(**config)
96
- branch.msgs.logger.dump() # Dump logs if needed
96
+ branch.dump_logs() # Dump logs if needed
97
97
  return result
98
98
 
99
99
 
@@ -92,11 +92,9 @@ class Tool(Element):
92
92
  @model_validator(mode="after")
93
93
  def _validate_tool_schema(self) -> Self:
94
94
  if self.tool_schema is None:
95
- self.tool_schema = function_to_schema(self.func_callable)
96
- if self.request_options is not None:
97
- schema_ = self.request_options.model_json_schema()
98
- schema_.pop("title", None)
99
- self.tool_schema["function"]["parameters"] = schema_
95
+ self.tool_schema = function_to_schema(
96
+ self.func_callable, request_options=self.request_options
97
+ )
100
98
 
101
99
  return self
102
100
 
@@ -80,7 +80,7 @@ class ActionRequest(RoledMessage):
80
80
 
81
81
  @action_response_id.setter
82
82
  def action_response_id(self, action_response_id: IDType) -> None:
83
- self.content["action_response_id"] = action_response_id
83
+ self.content["action_response_id"] = str(action_response_id)
84
84
 
85
85
  @property
86
86
  def request(self) -> dict[str, Any]:
@@ -10,7 +10,7 @@ from typing import Any
10
10
 
11
11
  from pydantic import BaseModel
12
12
 
13
- from lionagi.utils import copy, is_same_dtype
13
+ from lionagi.utils import copy
14
14
 
15
15
  from .base import MessageRole, SenderRecipient
16
16
  from .message import MessageRole, RoledMessage, Template, jinja_env
@@ -19,57 +19,60 @@ from .message import MessageRole, RoledMessage, Template, jinja_env
19
19
  def prepare_assistant_response(
20
20
  assistant_response: BaseModel | list[BaseModel] | dict | str | Any, /
21
21
  ) -> dict:
22
- if assistant_response:
23
- content = {}
24
- # Handle model.choices[0].message.content format
25
- if isinstance(assistant_response, BaseModel):
26
- content["assistant_response"] = (
27
- assistant_response.choices[0].message.content or ""
28
- )
29
- content["model_response"] = assistant_response.model_dump(
30
- exclude_none=True, exclude_unset=True
31
- )
32
- # Handle streaming response[i].choices[0].delta.content format
33
- elif isinstance(assistant_response, list):
34
- if is_same_dtype(assistant_response, BaseModel):
35
- msg = "".join(
36
- [
37
- i.choices[0].delta.content or ""
38
- for i in assistant_response
39
- ]
22
+
23
+ assistant_response = (
24
+ [assistant_response]
25
+ if not isinstance(assistant_response, list)
26
+ else assistant_response
27
+ )
28
+
29
+ text_contents = []
30
+ model_responses = []
31
+
32
+ for i in assistant_response:
33
+
34
+ if isinstance(i, BaseModel):
35
+ i = i.model_dump(exclude_none=True, exclude_unset=True)
36
+
37
+ model_responses.append(i)
38
+
39
+ if isinstance(i, dict):
40
+ # anthropic standard
41
+ if "content" in i:
42
+ content = i["content"]
43
+ content = (
44
+ [content] if not isinstance(content, list) else content
40
45
  )
41
- content["assistant_response"] = msg
42
- content["model_response"] = [
43
- i.model_dump(
44
- exclude_none=True,
45
- exclude_unset=True,
46
- )
47
- for i in assistant_response
48
- ]
49
- elif is_same_dtype(assistant_response, dict):
50
- msg = "".join(
51
- [
52
- i["choices"][0]["delta"]["content"] or ""
53
- for i in assistant_response
54
- ]
46
+ for j in content:
47
+ if isinstance(j, dict):
48
+ if j.get("type") == "text":
49
+ text_contents.append(j["text"])
50
+ elif isinstance(j, str):
51
+ text_contents.append(j)
52
+
53
+ # openai standard
54
+ elif "choices" in i:
55
+ choices = i["choices"]
56
+ choices = (
57
+ [choices] if not isinstance(choices, list) else choices
55
58
  )
56
- content["assistant_response"] = msg
57
- content["model_response"] = assistant_response
58
- elif isinstance(assistant_response, dict):
59
- if "content" in assistant_response:
60
- content["assistant_response"] = assistant_response["content"]
61
- elif "choices" in assistant_response:
62
- content["assistant_response"] = assistant_response["choices"][
63
- 0
64
- ]["message"]["content"]
65
- content["model_response"] = assistant_response
66
- elif isinstance(assistant_response, str):
67
- content["assistant_response"] = assistant_response
68
- else:
69
- content["assistant_response"] = str(assistant_response)
70
- return content
71
- else:
72
- return {"assistant_response": ""}
59
+ for j in choices:
60
+ if "message" in j:
61
+ text_contents.append(j["message"]["content"] or "")
62
+ elif "delta" in j:
63
+ text_contents.append(j["delta"]["content"] or "")
64
+
65
+ elif isinstance(i, str):
66
+ text_contents.append(i)
67
+
68
+ text_contents = "".join(text_contents)
69
+ model_responses = (
70
+ model_responses[0] if len(model_responses) == 1 else model_responses
71
+ )
72
+ return {
73
+ "assistant_response": text_contents,
74
+ "model_response": model_responses,
75
+ }
73
76
 
74
77
 
75
78
  class AssistantResponse(RoledMessage):
@@ -371,25 +371,43 @@ class APICalling(Event):
371
371
  if self.payload.get("stream") is True:
372
372
  self.streaming = True
373
373
 
374
- if self.include_token_usage_to_model:
374
+ if self.include_token_usage_to_model and self.endpoint.requires_tokens:
375
375
  if isinstance(self.payload["messages"][-1], dict):
376
376
  required_tokens = self.required_tokens
377
- self.payload["messages"][-1][
378
- "content"
379
- ] += f"\n\nEstimated Current Token Usage: {required_tokens}"
377
+ content = self.payload["messages"][-1]["content"]
378
+ token_msg = (
379
+ f"\n\nEstimated Current Token Usage: {required_tokens}"
380
+ )
381
+
380
382
  if "model" in self.payload:
381
383
  if (
382
384
  self.payload["model"].startswith("gpt-4")
383
385
  or "o1mini" in self.payload["model"]
384
386
  or "o1-preview" in self.payload["model"]
385
387
  ):
386
- self.payload["messages"][-1]["content"] += "/128_000"
388
+ token_msg += "/128_000"
387
389
  elif "o1" in self.payload["model"]:
388
- self.payload["messages"][-1]["content"] += "/200_000"
390
+ token_msg += "/200_000"
389
391
  elif "sonnet" in self.payload["model"]:
390
- self.payload["messages"][-1]["content"] += "/200_000"
392
+ token_msg += "/200_000"
391
393
  elif "haiku" in self.payload["model"]:
392
- self.payload["messages"][-1]["content"] += "/200_000"
394
+ token_msg += "/200_000"
395
+ elif "gemini" in self.payload["model"]:
396
+ token_msg += "/1_000_000"
397
+ elif "qwen-turbo" in self.payload["model"]:
398
+ token_msg += "/1_000_000"
399
+
400
+ if isinstance(content, str):
401
+ content += token_msg
402
+ elif isinstance(content, dict):
403
+ if "text" in content:
404
+ content["text"] += token_msg
405
+ elif isinstance(content, list):
406
+ for i in reversed(content):
407
+ if "text" in i:
408
+ i["text"] += token_msg
409
+ break
410
+ self.payload["messages"][-1]["content"] = content
393
411
 
394
412
  return self
395
413
 
lionagi/service/imodel.py CHANGED
@@ -9,9 +9,9 @@ from collections.abc import AsyncGenerator, Callable
9
9
  from pydantic import BaseModel
10
10
 
11
11
  from lionagi.protocols.generic.event import EventStatus
12
- from lionagi.utils import is_coro_func
12
+ from lionagi.utils import is_coro_func, to_dict
13
13
 
14
- from .endpoints.base import APICalling, EndPoint
14
+ from .endpoints.base import APICalling, EndPoint, EndpointConfig
15
15
  from .endpoints.match_endpoint import match_endpoint
16
16
  from .endpoints.rate_limited_processor import RateLimitedAPIExecutor
17
17
 
@@ -352,9 +352,10 @@ class iModel:
352
352
  kwargs = self.kwargs
353
353
  if "kwargs" in self.kwargs:
354
354
  kwargs = self.kwargs["kwargs"]
355
+
355
356
  return {
356
357
  "provider": self.endpoint.config.provider,
357
- "endpoint": self.endpoint.config.model_dump(),
358
+ "endpoint": self.endpoint.config.model_dump_json(),
358
359
  "api_key": (
359
360
  self.api_key_scheme
360
361
  if hasattr(self, "api_key_scheme")
@@ -370,18 +371,20 @@ class iModel:
370
371
  provider = data.pop("provider", None)
371
372
  base_url = data.pop("base_url", None)
372
373
  api_key = data.pop("api_key", None)
373
- processor_config = data.pop("processor_config", {})
374
374
 
375
+ processor_config = data.pop("processor_config", {})
375
376
  endpoint_config_params = data.pop("endpoint", {})
376
- endpoint_ = endpoint_config_params.pop("endpoint", None)
377
- endpoint_params = endpoint_config_params.get("endpoint_params", None)
377
+ endpoint_config_params = to_dict(endpoint_config_params)
378
378
 
379
- endpoint = match_endpoint(
380
- provider=provider,
381
- base_url=base_url,
382
- endpoint=endpoint_,
383
- endpoint_params=endpoint_params,
379
+ endpoint_config_params["endpoint"] = endpoint_config_params.get(
380
+ "endpoint", "chat"
384
381
  )
382
+ match_params = {}
383
+
384
+ for i in ("provider", "base_url", "endpoint", "endpoint_params"):
385
+ if endpoint_config_params.get(i):
386
+ match_params[i] = endpoint_config_params.pop(i)
387
+ endpoint = match_endpoint(**match_params)
385
388
  endpoint.update_config(**endpoint_config_params)
386
389
  return cls(
387
390
  provider=provider,
@@ -50,10 +50,15 @@ class AnthropicChatCompletionEndPoint(ChatCompletionEndPoint):
50
50
  for k, v in kwargs.items():
51
51
  if k in self.acceptable_kwargs:
52
52
  payload[k] = v
53
+
54
+ for i in self.required_kwargs:
55
+ if i not in payload:
56
+ raise ValueError(f"Missing required argument: {i}")
57
+
53
58
  if "cache_control" in payload:
54
59
  cache_control = payload.pop("cache_control")
55
60
  if cache_control:
56
- cache_control = "ephemeral"
61
+ cache_control = {"type": "ephemeral"}
57
62
  last_message = payload["messages"][-1]["content"]
58
63
  if isinstance(last_message, str):
59
64
  last_message = {
@@ -65,6 +70,20 @@ class AnthropicChatCompletionEndPoint(ChatCompletionEndPoint):
65
70
  last_message[-1], dict
66
71
  ):
67
72
  last_message[-1]["cache_control"] = cache_control
73
+ payload["messages"][-1]["content"] = (
74
+ [last_message]
75
+ if not isinstance(last_message, list)
76
+ else last_message
77
+ )
78
+
79
+ first_message = payload["messages"][0]
80
+ system = None
81
+ if first_message.get("role") == "system":
82
+ system = first_message["content"]
83
+ system = [{"type": "text", "text": system}]
84
+ payload["messages"] = payload["messages"][1:]
85
+ payload["system"] = system
86
+
68
87
  if "api_key" in kwargs:
69
88
  headers["x-api-key"] = kwargs["api_key"]
70
89
  headers["anthropic-version"] = kwargs.pop(
@@ -33,8 +33,8 @@ class PerplexityChatCompletionRequest(BaseModel):
33
33
  """
34
34
 
35
35
  model: str = Field(
36
- ...,
37
- description="The model name, e.g. 'llama-3.1-sonar-small-128k-online'.",
36
+ "sonar",
37
+ description="The model name, e.g. 'sonar', (the only model available at the time when this request model was updated, check doc for latest info).",
38
38
  )
39
39
  messages: list[PerplexityMessage] = Field(
40
40
  ..., description="A list of messages forming the conversation so far."
lionagi/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.9.9"
1
+ __version__ = "0.9.11"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lionagi
3
- Version: 0.9.9
3
+ Version: 0.9.11
4
4
  Summary: An Intelligence Operating System.
5
5
  Author-email: HaiyangLi <quantocean.li@gmail.com>
6
6
  License: Apache License
@@ -4,7 +4,7 @@ lionagi/_errors.py,sha256=JlBTFJnRWtVYcRxKb7fWFiJHLbykl1E19mSJ8sXYVxg,455
4
4
  lionagi/_types.py,sha256=9g7iytvSj3UjZxD-jL06_fxuNfgZyWT3Qnp0XYp1wQU,63
5
5
  lionagi/settings.py,sha256=W52mM34E6jXF3GyqCFzVREKZrmnUqtZm_BVDsUiDI_s,1627
6
6
  lionagi/utils.py,sha256=K36D9AAGiMPR4eM9tYoiVgvH-NdPPSeMQPls09s7keQ,73223
7
- lionagi/version.py,sha256=nhsA3KKA-CXSYpbzuChuLyxpDepY_-JffnUNClcYEaU,22
7
+ lionagi/version.py,sha256=hCWvJmnndbpxCyOQ7z-g5qleaxwixXNqkmkxuORqf1I,23
8
8
  lionagi/libs/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
9
9
  lionagi/libs/parse.py,sha256=JRS3bql0InHJqATnAatl-hQv4N--XXw4P77JHhTFnrc,1011
10
10
  lionagi/libs/file/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
@@ -32,7 +32,7 @@ lionagi/libs/schema/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFC
32
32
  lionagi/libs/schema/as_readable.py,sha256=W4fi98WVkP5rfZ6A-iWqP5YFJexYCjt9Hf-l0iNs-2Q,5916
33
33
  lionagi/libs/schema/extract_code_block.py,sha256=PuJbJj1JnqR5fSZudowPcVPpEoKISLr0MjTOOVXSzwY,2394
34
34
  lionagi/libs/schema/extract_docstring.py,sha256=aYyLSRlB8lTH9QF9-6a56uph3AAkNuTyZ0S_duf5-fw,5729
35
- lionagi/libs/schema/function_to_schema.py,sha256=qLsM-_1ERlLdP_zBwz7ttxMZQ8jr5eTJN8IX1QSkSig,5560
35
+ lionagi/libs/schema/function_to_schema.py,sha256=Ak21_0xCFP71qgb6_wNzaRSVsdkf1ieRjJ92hXo7qPE,5628
36
36
  lionagi/libs/schema/json_schema.py,sha256=cuHcaMr748O9g6suNGmRx4tRXcidd5-c7AMGjTIZyHM,7670
37
37
  lionagi/libs/token_transform/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
38
38
  lionagi/libs/token_transform/llmlingua.py,sha256=DkeLUlrb7rGx3nZ04aADU9HXXu5mZTf_DBwT0xhzIv4,7
@@ -54,7 +54,7 @@ lionagi/operations/ReAct/utils.py,sha256=84Giel5ToqfbN5F6Tm0uw8yZTTnxiM_jWuFEhnK
54
54
  lionagi/operations/_act/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
55
55
  lionagi/operations/_act/act.py,sha256=CunHTTZcS6xNUe0xKSDgtMJ7-ucSvHeW4BtmVjXnaxk,2958
56
56
  lionagi/operations/brainstorm/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
57
- lionagi/operations/brainstorm/brainstorm.py,sha256=iRZUW_V-0Ncw7Av0_WBk3oNRWb4LqQU5i2gQDxulYWY,17222
57
+ lionagi/operations/brainstorm/brainstorm.py,sha256=_rkU5U-tNpOB3iyeP5gu0hLVIH2FKMCwCU8n2lsg_Do,17214
58
58
  lionagi/operations/brainstorm/prompt.py,sha256=Dqi4NNeztdI4iutggRqjnOrG4a4E2JtwIAtRnjZ_ghQ,610
59
59
  lionagi/operations/chat/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
60
60
  lionagi/operations/chat/chat.py,sha256=xJAH2H0zyVvxiL3XtW3MC6YrwCCB1uCkwcQIJ1YsIOk,5466
@@ -69,7 +69,7 @@ lionagi/operations/operate/operate.py,sha256=j5dGWhHlcWnO-aaLZ4Xe0Hb1M7FGp9BGm35
69
69
  lionagi/operations/parse/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
70
70
  lionagi/operations/parse/parse.py,sha256=i2q6YGRwsp2WEu0aySfZ6io7iLNAqPAx1wWd2hUjpgM,3296
71
71
  lionagi/operations/plan/__init__.py,sha256=yGBPll6lOqVjadbTvDLGrTlMx3FfBW-e00z7AMvg7Uo,156
72
- lionagi/operations/plan/plan.py,sha256=uhYbo2hVsNdQ3INUIikiaVSvCUiBUfo9HVsdNN4l0Lg,15313
72
+ lionagi/operations/plan/plan.py,sha256=uLILbTcJ0ioomlflhhe0FeQbi2p6iz6lucppP3K3LWI,15306
73
73
  lionagi/operations/plan/prompt.py,sha256=GUNZ8RpHIa89D-_y7GK--Spg0JADI3K13sjf_w3a2mI,993
74
74
  lionagi/operations/select/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
75
75
  lionagi/operations/select/select.py,sha256=BRy17O-FL0rPPjmoBBhiLzFV-z6mfpc8N-DGl3r8Y2U,2492
@@ -85,7 +85,7 @@ lionagi/operatives/action/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGv
85
85
  lionagi/operatives/action/function_calling.py,sha256=PQ2-O6JsJDsBzkJ1ltZEhys5iVFWRZhoid4c2lKf4Dk,5067
86
86
  lionagi/operatives/action/manager.py,sha256=RXZrw7bKK1Xai9LzDhFvSs5kGln1Vujkxrx4_FO3CIY,8824
87
87
  lionagi/operatives/action/request_response_model.py,sha256=HfdXtnXcRXKEcEtxUgo3Zl8x_DkqSOJ6TLLgBzp5lEM,3378
88
- lionagi/operatives/action/tool.py,sha256=0Lj6rNE87XWdBItdsFz0hKXa6kKqa0JB35ez7oAaRGc,5584
88
+ lionagi/operatives/action/tool.py,sha256=iidnGtCu7tSWowQPxck7oS0I7jC_u2L9SxzXhfSKod8,5440
89
89
  lionagi/operatives/action/utils.py,sha256=kUk_4SRclCJdlTeHYji2Nt_FVT-TMSO-k-o8xJnHuQk,4330
90
90
  lionagi/operatives/forms/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
91
91
  lionagi/operatives/forms/base.py,sha256=hitr0eKk7Yts2VfpuBGl0YxpClMJEpsy7xWodhg2AhQ,2720
@@ -145,9 +145,9 @@ lionagi/protocols/mail/mailbox.py,sha256=2ETolKA4O2AFD1NzHt9bJG3F7xmwgoyFRLceBKs
145
145
  lionagi/protocols/mail/manager.py,sha256=WTM1COl8JeKt683M24gKy9Q_8owDGo2DWOXb64cfaxU,7105
146
146
  lionagi/protocols/mail/package.py,sha256=CLpBinYo8FseQKgP8p-65Ne7c8ymw2O-bTpb5259ukA,2838
147
147
  lionagi/protocols/messages/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
148
- lionagi/protocols/messages/action_request.py,sha256=5p2F4V9prEinp09xR3WrBnlJrXcjHaKRH7zv7MVPcUc,7081
148
+ lionagi/protocols/messages/action_request.py,sha256=_iFBLy6rVhyBpZUKVSInqNkEeut4pf9tOqZlT9SzGFE,7086
149
149
  lionagi/protocols/messages/action_response.py,sha256=fvie0jRAln6MqAY25fOCY1yBMNQQffXMTl0bVkAe6QA,5401
150
- lionagi/protocols/messages/assistant_response.py,sha256=3IMsHaHDayJ87NrA9TxIVKV4QxeAooaSinC7dLlXzeA,6412
150
+ lionagi/protocols/messages/assistant_response.py,sha256=Qu0pVcF5e4zDfXI0j--4BNSzAue7DknVqdW3Sdu-pGE,5925
151
151
  lionagi/protocols/messages/base.py,sha256=jPHtC7aVWTxb_Gg5dcMOGKzFa3FOJUTbRpLztcDHLBU,2482
152
152
  lionagi/protocols/messages/instruction.py,sha256=IIoyF0WkNaIhYAKTpp_aFuu4Iv6EPy_ccZwj1aPOMQw,21307
153
153
  lionagi/protocols/messages/manager.py,sha256=dCiHwPop4dy8soksoHrsrcGXI6D87ecsAowuq2v52Po,17370
@@ -161,11 +161,11 @@ lionagi/protocols/messages/templates/instruction_message.jinja2,sha256=L-ptw5OHx
161
161
  lionagi/protocols/messages/templates/system_message.jinja2,sha256=JRKJ0aFpYfaXSFouKc_N4unZ35C3yZTOWhIrIdCB5qk,215
162
162
  lionagi/protocols/messages/templates/tool_schemas.jinja2,sha256=ozIaSDCRjIAhLyA8VM6S-YqS0w2NcctALSwx4LjDwII,126
163
163
  lionagi/service/__init__.py,sha256=DMGXIqPsmut9H5GT0ZeSzQIzYzzPwI-2gLXydpbwiV8,21
164
- lionagi/service/imodel.py,sha256=GIb0v0gSa9WJA5fmif8nAe-y-j8qHlZNb8FEIuzgE9s,15157
164
+ lionagi/service/imodel.py,sha256=uNSb9XjibkDBC4WbX1HdmXcrd4rVUNkqJPwYQjTYp18,15301
165
165
  lionagi/service/manager.py,sha256=FkuqAtLErqLmXNnDtuAdTUFo4uuE_VL660BBGBhzInU,1435
166
166
  lionagi/service/types.py,sha256=CHPi8Bxl_yJ1pl2jYZBOrTHbT8_oO9sK75d4LMB651g,486
167
167
  lionagi/service/endpoints/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
168
- lionagi/service/endpoints/base.py,sha256=sArF7dpSfer3OsBLMFqHGslAd4dgJUkCLtYkBWRmxOM,25441
168
+ lionagi/service/endpoints/base.py,sha256=VGnwMHR-ZK4pjA6oKf5LQW7tsXVrNh9tqKObqzz_Odo,26134
169
169
  lionagi/service/endpoints/chat_completion.py,sha256=nihV7kCYm7ixdm8dH0JW7vKjqH9yIom4QDXGeDwuO6E,2964
170
170
  lionagi/service/endpoints/match_endpoint.py,sha256=x2T-ftzdqCrdITRLkH8UNRDY2Pm359DnX2RDXTBnbpc,2082
171
171
  lionagi/service/endpoints/rate_limited_processor.py,sha256=P0CsMyhuG8OHCPYe2qez92Bm7v2ZRq4L5I6LOiAoGYs,5199
@@ -173,7 +173,7 @@ lionagi/service/endpoints/token_calculator.py,sha256=-AKwDvV7C8k8MTmd62ymT0ETSUP
173
173
  lionagi/service/providers/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
174
174
  lionagi/service/providers/types.py,sha256=NS91ysRFwOs0cpNeQgFhmtl7JrSz2pJm-tt7sZILmQY,683
175
175
  lionagi/service/providers/anthropic_/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
176
- lionagi/service/providers/anthropic_/messages.py,sha256=QIczhn5CfrufcNGF-QqGqKnd5xOpsWEDor5g07jf-2k,2472
176
+ lionagi/service/providers/anthropic_/messages.py,sha256=EnV2vh60k0aQvtnUitHzTlSmyrFxTVxcXAldANg7Rzc,3148
177
177
  lionagi/service/providers/exa_/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
178
178
  lionagi/service/providers/exa_/models.py,sha256=263KP-JSxbxmomNrFeYjB_cebquoMOsCJeWsiKZ0mL4,5420
179
179
  lionagi/service/providers/exa_/search.py,sha256=Z3pyJH8KiWiquJSJw8Rd6D7x43BwTFHb2ESsgSicCk0,1932
@@ -189,7 +189,7 @@ lionagi/service/providers/openrouter_/__init__.py,sha256=5y5joOZzfFWERl75auAcNcK
189
189
  lionagi/service/providers/openrouter_/chat_completions.py,sha256=0pdXjJCXmCPPbKKVubrnqofaodTOxWTJam8fd3NgrNk,1525
190
190
  lionagi/service/providers/perplexity_/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
191
191
  lionagi/service/providers/perplexity_/chat_completions.py,sha256=O4MIS_3xIINGjkAZdlw0Bu_jAfBDR4VZA1F8JW2EU1M,1197
192
- lionagi/service/providers/perplexity_/models.py,sha256=gXH4XGkhZ4aFxvMSDTlHq9Rz1mhu3aTENXAtE-BIr6U,4866
192
+ lionagi/service/providers/perplexity_/models.py,sha256=Fm5NbmWMdFkDKS0Cec__bNvs3St27lgqxFbHKyNCLsw,4945
193
193
  lionagi/session/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
194
194
  lionagi/session/branch.py,sha256=dKlaM6hh_q7OoXkz4E5S3aS4ksqC2yzdhjzI7xe6pzU,72439
195
195
  lionagi/session/prompts.py,sha256=AhuHL19s0TijVZX3tMKUKMi6l88xeVdpkuEn2vJSRyU,3236
@@ -214,7 +214,7 @@ lionagi/tools/file/writer.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,
214
214
  lionagi/tools/file/providers/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
215
215
  lionagi/tools/file/providers/docling_.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
216
216
  lionagi/tools/query/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
217
- lionagi-0.9.9.dist-info/METADATA,sha256=gaYeleZglGI0tRleBY61jftb61YB83alH8cwS2EuE9o,18436
218
- lionagi-0.9.9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
219
- lionagi-0.9.9.dist-info/licenses/LICENSE,sha256=VXFWsdoN5AAknBCgFqQNgPWYx7OPp-PFEP961zGdOjc,11288
220
- lionagi-0.9.9.dist-info/RECORD,,
217
+ lionagi-0.9.11.dist-info/METADATA,sha256=uzxQLrI_k3Nr_WsS47z4MrA6lcYJihUE5GfPB2usnsI,18437
218
+ lionagi-0.9.11.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
219
+ lionagi-0.9.11.dist-info/licenses/LICENSE,sha256=VXFWsdoN5AAknBCgFqQNgPWYx7OPp-PFEP961zGdOjc,11288
220
+ lionagi-0.9.11.dist-info/RECORD,,