lionagi 0.14.6__py3-none-any.whl → 0.14.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -11,7 +11,6 @@ from lionagi.libs.validate.common_field_validators import (
11
11
  validate_nullable_jsonvalue_field,
12
12
  )
13
13
  from lionagi.models import FieldModel, HashableModel
14
- from lionagi.utils import to_num
15
14
 
16
15
  __all__ = (
17
16
  "Instruct",
@@ -0,0 +1,3 @@
1
+ from .manager import HashUtils
2
+
3
+ __all__ = ("HashUtils",)
@@ -0,0 +1,108 @@
1
+ import copy
2
+
3
+ from pydantic import BaseModel as PydanticBaseModel
4
+
5
+ __all__ = ("hash_dict",)
6
+
7
+ # --- Canonical Representation Generator ---
8
+ _PRIMITIVE_TYPES = (str, int, float, bool, type(None))
9
+ _TYPE_MARKER_DICT = 0
10
+ _TYPE_MARKER_LIST = 1
11
+ _TYPE_MARKER_TUPLE = 2
12
+ _TYPE_MARKER_SET = 3
13
+ _TYPE_MARKER_FROZENSET = 4
14
+ _TYPE_MARKER_PYDANTIC = 5 # Distinguishes dumped Pydantic models
15
+
16
+
17
+ def _generate_hashable_representation(item: any) -> any:
18
+ """
19
+ Recursively converts a Python object into a stable, hashable representation.
20
+ This ensures that logically identical but structurally different inputs
21
+ (e.g., dicts with different key orders) produce the same representation.
22
+ """
23
+ if isinstance(item, _PRIMITIVE_TYPES):
24
+ return item
25
+
26
+ if isinstance(item, PydanticBaseModel):
27
+ # Process the Pydantic model by first dumping it to a dict, then processing that dict.
28
+ # The type marker distinguishes this from a regular dictionary.
29
+ return (
30
+ _TYPE_MARKER_PYDANTIC,
31
+ _generate_hashable_representation(item.model_dump()),
32
+ )
33
+
34
+ if isinstance(item, dict):
35
+ # Sort dictionary items by key (stringified) for order-insensitivity.
36
+ return (
37
+ _TYPE_MARKER_DICT,
38
+ tuple(
39
+ (str(k), _generate_hashable_representation(v))
40
+ for k, v in sorted(item.items(), key=lambda x: str(x[0]))
41
+ ),
42
+ )
43
+
44
+ if isinstance(item, list):
45
+ return (
46
+ _TYPE_MARKER_LIST,
47
+ tuple(_generate_hashable_representation(elem) for elem in item),
48
+ )
49
+
50
+ if isinstance(item, tuple):
51
+ return (
52
+ _TYPE_MARKER_TUPLE,
53
+ tuple(_generate_hashable_representation(elem) for elem in item),
54
+ )
55
+
56
+ # frozenset must be checked before set
57
+ if isinstance(item, frozenset):
58
+ try: # Attempt direct sort for comparable elements
59
+ sorted_elements = sorted(list(item))
60
+ except TypeError: # Fallback for unorderable mixed types
61
+ sorted_elements = sorted(
62
+ list(item), key=lambda x: (str(type(x)), str(x))
63
+ )
64
+ return (
65
+ _TYPE_MARKER_FROZENSET,
66
+ tuple(
67
+ _generate_hashable_representation(elem)
68
+ for elem in sorted_elements
69
+ ),
70
+ )
71
+
72
+ if isinstance(item, set):
73
+ try:
74
+ sorted_elements = sorted(list(item))
75
+ except TypeError:
76
+ sorted_elements = sorted(
77
+ list(item), key=lambda x: (str(type(x)), str(x))
78
+ )
79
+ return (
80
+ _TYPE_MARKER_SET,
81
+ tuple(
82
+ _generate_hashable_representation(elem)
83
+ for elem in sorted_elements
84
+ ),
85
+ )
86
+
87
+ # Fallback for other types (e.g., custom objects not derived from the above)
88
+ try:
89
+ return str(item)
90
+ except Exception: # If str() fails for some reason
91
+ return repr(item)
92
+
93
+
94
+ def hash_dict(data: any, strict: bool = False) -> int:
95
+ data_to_process = data
96
+ if strict:
97
+ data_to_process = copy.deepcopy(data)
98
+
99
+ hashable_repr = _generate_hashable_representation(data_to_process)
100
+
101
+ try:
102
+ return hash(hashable_repr)
103
+ except TypeError as e:
104
+ raise TypeError(
105
+ f"The generated representation for the input data was not hashable. "
106
+ f"Input type: {type(data).__name__}, Representation type: {type(hashable_repr).__name__}. "
107
+ f"Original error: {e}"
108
+ )
@@ -0,0 +1,26 @@
1
+ class HashUtils:
2
+ @staticmethod
3
+ def hash_dict(data: any, strict: bool = False) -> int:
4
+ """
5
+ Computes a deterministic hash for various Python data structures including
6
+ dictionaries, Pydantic BaseModels, lists, tuples, sets, frozensets, and primitives.
7
+
8
+ The hash is deterministic within the same Python process run (respecting
9
+ PYTHONHASHSEED for built-in hash behavior on strings, bytes, etc.).
10
+ It's suitable for tasks like finding unique objects within a collection
11
+ during a single program execution.
12
+
13
+ Args:
14
+ data: The Python object to hash.
15
+ strict: if True, will make a deep copy of the input data to ensure immutability.
16
+
17
+ Returns:
18
+ An integer hash value.
19
+
20
+ Raises:
21
+ TypeError: If the generated internal representation of the data is not hashable,
22
+ though this is unlikely with the current _generate_hashable_representation.
23
+ """
24
+ from .hash_dict import hash_dict as _hash_dict
25
+
26
+ return _hash_dict(data, strict=strict)
@@ -1,7 +1,8 @@
1
1
  from pydantic import BaseModel
2
2
  from typing_extensions import Self
3
3
 
4
- from lionagi.utils import UNDEFINED, hash_dict
4
+ from lionagi.libs.hash.hash_dict import hash_dict
5
+ from lionagi.utils import UNDEFINED
5
6
 
6
7
 
7
8
  class HashableModel(BaseModel):
@@ -2,6 +2,8 @@
2
2
  #
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
+ from __future__ import annotations
6
+
5
7
  import json
6
8
  from pathlib import Path
7
9
  from typing import Any
@@ -147,7 +149,7 @@ class RoledMessage(Node, Sendable):
147
149
  """
148
150
  return self._flag == MessageFlag.MESSAGE_CLONE
149
151
 
150
- def clone(self, keep_role: bool = True) -> "RoledMessage":
152
+ def clone(self, keep_role: bool = True) -> RoledMessage:
151
153
  """
152
154
  Create a shallow copy of this message, possibly resetting the role.
153
155
 
@@ -43,7 +43,7 @@ CLAUDE_CODE_OPTION_PARAMS = {
43
43
  # --------------------------------------------------------------------------- request model
44
44
  class ClaudeCodeRequest(BaseModel):
45
45
  # -- conversational bits -------------------------------------------------
46
- prompt: str = Field(description="The prompt for Claude Code")
46
+ prompt: str = Field(description="The prompt for Claude Code")
47
47
  system_prompt: str | None = None
48
48
  append_system_prompt: str | None = None
49
49
  max_turns: int | None = None
@@ -61,6 +61,7 @@ class ClaudeCodeRequest(BaseModel):
61
61
  max_thinking_tokens: int | None = None
62
62
  mcp_tools: list[str] = Field(default_factory=list)
63
63
  mcp_servers: dict[str, Any] = Field(default_factory=dict)
64
+ mcp_config: str | Path | None = Field(None, exclude=True)
64
65
  permission_mode: ClaudePermission | None = None
65
66
  permission_prompt_tool_name: str | None = None
66
67
  disallowed_tools: list[str] = Field(default_factory=list)
@@ -68,11 +69,10 @@ class ClaudeCodeRequest(BaseModel):
68
69
  # -- internal use --------------------------------------------------------
69
70
  auto_finish: bool = Field(
70
71
  default=False,
71
- exclude=True,
72
72
  description="Automatically finish the conversation after the first response",
73
73
  )
74
- verbose_output: bool = Field(default=False, exclude=True)
75
- cli_display_theme: Literal["light", "dark"] = "light"
74
+ verbose_output: bool = Field(default=False)
75
+ cli_display_theme: Literal["light", "dark"] = "dark"
76
76
 
77
77
  # ------------------------ validators & helpers --------------------------
78
78
  @field_validator("permission_mode", mode="before")
@@ -164,6 +164,15 @@ class ClaudeCodeRequest(BaseModel):
164
164
  if self.add_dir:
165
165
  args += ["--add-dir", self.add_dir]
166
166
 
167
+ if self.permission_prompt_tool_name:
168
+ args += [
169
+ "--permission-prompt-tool",
170
+ self.permission_prompt_tool_name,
171
+ ]
172
+
173
+ if self.mcp_config:
174
+ args += ["--mcp-config", str(self.mcp_config)]
175
+
167
176
  args += ["--model", self.model or "sonnet", "--verbose"]
168
177
  return args
169
178
 
@@ -348,3 +348,12 @@ async def stream_claude_code_cli( # noqa: C901 (complexity from branching is f
348
348
  _pp_final(session, theme)
349
349
 
350
350
  yield session
351
+
352
+
353
+ __all__ = (
354
+ "CLAUDE",
355
+ "stream_claude_code_cli",
356
+ "ndjson_from_cli",
357
+ "ClaudeChunk",
358
+ "ClaudeSession",
359
+ )
lionagi/service/types.py CHANGED
@@ -3,7 +3,7 @@
3
3
  # SPDX-License-Identifier: Apache-2.0
4
4
 
5
5
  from .connections.api_calling import APICalling
6
- from .connections.endpoint import Endpoint
6
+ from .connections.endpoint import Endpoint, EndpointConfig
7
7
  from .imodel import iModel
8
8
  from .manager import iModelManager
9
9
  from .rate_limited_processor import RateLimitedAPIExecutor
@@ -12,6 +12,7 @@ from .token_calculator import TokenCalculator
12
12
  __all__ = (
13
13
  "APICalling",
14
14
  "Endpoint",
15
+ "EndpointConfig",
15
16
  "RateLimitedAPIExecutor",
16
17
  "TokenCalculator",
17
18
  "iModel",
lionagi/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.14.6"
1
+ __version__ = "0.14.8"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lionagi
3
- Version: 0.14.6
3
+ Version: 0.14.8
4
4
  Summary: An Intelligence Operating System.
5
5
  Author-email: HaiyangLi <quantocean.li@gmail.com>, Liangbingyan Luo <llby_luo@outlook.com>
6
6
  License: Apache License
@@ -234,9 +234,9 @@ Requires-Dist: tiktoken>=0.8.0
234
234
  Requires-Dist: toml>=0.9.0
235
235
  Provides-Extra: all
236
236
  Requires-Dist: aiosqlite>=0.21.0; extra == 'all'
237
- Requires-Dist: claude-code-sdk>=0.0.14; extra == 'all'
237
+ Requires-Dist: claude-code-sdk>=0.0.15; extra == 'all'
238
238
  Requires-Dist: datamodel-code-generator>=0.31.2; extra == 'all'
239
- Requires-Dist: docling>=2.15.1; extra == 'all'
239
+ Requires-Dist: docling>=2.15.0; extra == 'all'
240
240
  Requires-Dist: fastmcp>=2.10.5; extra == 'all'
241
241
  Requires-Dist: matplotlib>=3.7.0; extra == 'all'
242
242
  Requires-Dist: networkx>=3.0.0; extra == 'all'
@@ -244,36 +244,26 @@ Requires-Dist: ollama>=0.4.0; extra == 'all'
244
244
  Requires-Dist: pydapter[postgres]; extra == 'all'
245
245
  Requires-Dist: rich>=13.0.0; extra == 'all'
246
246
  Provides-Extra: claude-code
247
- Requires-Dist: claude-code-sdk>=0.0.14; extra == 'claude-code'
248
- Provides-Extra: docs
249
- Requires-Dist: furo>=2024.8.6; extra == 'docs'
250
- Requires-Dist: sphinx-autobuild>=2024.10.3; extra == 'docs'
251
- Requires-Dist: sphinx>=8.1.3; extra == 'docs'
247
+ Requires-Dist: claude-code-sdk>=0.0.15; extra == 'claude-code'
252
248
  Provides-Extra: graph
253
249
  Requires-Dist: matplotlib>=3.7.0; extra == 'graph'
254
250
  Requires-Dist: networkx>=3.0.0; extra == 'graph'
255
- Provides-Extra: lint
256
- Requires-Dist: black[jupyter]>=24.10.0; extra == 'lint'
257
- Requires-Dist: isort>=5.13.2; extra == 'lint'
258
- Requires-Dist: pre-commit>=4.0.1; extra == 'lint'
259
251
  Provides-Extra: mcp
260
252
  Requires-Dist: fastmcp>=2.10.5; extra == 'mcp'
261
253
  Provides-Extra: ollama
262
254
  Requires-Dist: ollama>=0.4.0; extra == 'ollama'
263
255
  Provides-Extra: postgres
264
- Requires-Dist: aiosqlite>=0.21.0; extra == 'postgres'
265
256
  Requires-Dist: pydapter[postgres]; extra == 'postgres'
266
257
  Provides-Extra: reader
267
- Requires-Dist: docling>=2.15.1; extra == 'reader'
258
+ Requires-Dist: docling>=2.15.0; extra == 'reader'
268
259
  Provides-Extra: rich
269
260
  Requires-Dist: rich>=13.0.0; extra == 'rich'
270
261
  Provides-Extra: schema
271
262
  Requires-Dist: datamodel-code-generator>=0.31.2; extra == 'schema'
272
- Provides-Extra: test
273
- Requires-Dist: pytest-asyncio>=1.0.0; extra == 'test'
274
- Requires-Dist: pytest>=8.3.4; extra == 'test'
263
+ Provides-Extra: sqlite
264
+ Requires-Dist: aiosqlite>=0.21.0; extra == 'sqlite'
275
265
  Provides-Extra: tools
276
- Requires-Dist: docling>=2.15.1; extra == 'tools'
266
+ Requires-Dist: docling>=2.15.0; extra == 'tools'
277
267
  Description-Content-Type: text/markdown
278
268
 
279
269
  ![PyPI - Version](https://img.shields.io/pypi/v/lionagi?labelColor=233476aa&color=231fc935)
@@ -305,7 +295,9 @@ integrations, and custom validations in a single coherent pipeline.
305
295
  ## Installation
306
296
 
307
297
  ```
308
- pip install lionagi
298
+ uv add lionagi # recommended to use pyproject and uv for dependency management
299
+
300
+ pip install lionagi # or install directly
309
301
  ```
310
302
 
311
303
  ## Quick Start
@@ -314,12 +306,12 @@ pip install lionagi
314
306
  from lionagi import Branch, iModel
315
307
 
316
308
  # Pick a model
317
- gpt4o = iModel(provider="openai", model="gpt-4o")
309
+ gpt41 = iModel(provider="openai", model="gpt-4.1-mini")
318
310
 
319
311
  # Create a Branch (conversation context)
320
312
  hunter = Branch(
321
313
  system="you are a hilarious dragon hunter who responds in 10 words rhymes.",
322
- chat_model=gpt4o,
314
+ chat_model=gpt41,
323
315
  )
324
316
 
325
317
  # Communicate asynchronously
@@ -400,41 +392,75 @@ print(df.tail())
400
392
  ```python
401
393
  from lionagi import Branch, iModel
402
394
 
403
- gpt4o = iModel(provider="openai", model="gpt-4o")
404
395
  sonnet = iModel(
405
396
  provider="anthropic",
406
397
  model="claude-3-5-sonnet-20241022",
407
398
  max_tokens=1000, # max_tokens is required for anthropic models
408
399
  )
409
400
 
410
- branch = Branch(chat_model=gpt4o)
411
- # Switch mid-flow
412
- analysis = await branch.communicate("Analyze these stats", imodel=sonnet)
401
+ branch = Branch(chat_model=gpt41)
402
+ analysis = await branch.communicate("Analyze these stats", chat_model=sonnet) # Switch mid-flow
413
403
  ```
414
404
 
415
405
  Seamlessly route to different models in the same workflow.
416
406
 
417
407
  ### Claude Code Integration
418
408
 
419
- LionAGI now supports Anthropic's [Claude Code SDK](https://github.com/anthropics/claude-code-sdk), enabling autonomous coding capabilities with persistent session management:
409
+ LionAGI now supports Anthropic's Claude Code [Python SDK](https://github.com/anthropics/claude-code-sdk-python), and [CLI SDK](https://docs.anthropic.com/en/docs/claude-code/sdk) enabling autonomous coding capabilities with persistent session management. The CLI endpoint
410
+ directly connects to claude code, and is recommended, you can either use it via a [proxy server](https://github.com/khive-ai/lionagi/tree/main/cookbooks/claude_proxy) or directly with `query_cli` endpoint, provided you have already logged onto claude code cli in your terminal.
420
411
 
421
412
  ```python
422
413
  from lionagi import iModel, Branch
423
414
 
424
- # Create a Claude Code model
425
- model = iModel(
426
- provider="claude_code",
427
- endpoint="query_cli",
428
- model="sonnet",
429
- allowed_tools=["Write", "Read", "Edit"], # Control which tools Claude can use
430
- permission_mode = "bypassPermissions", # Bypass tool permission checks (use with caution!),
431
- verbose_output=True, # Enable detailed output for debugging
432
- )
415
+ def create_cc_model():
416
+ return iModel(
417
+ provider="claude_code",
418
+ endpoint="query_cli",
419
+ model="sonnet",
420
+ verbose_output=True, # Enable detailed output for debugging
421
+ )
433
422
 
434
423
  # Start a coding session
435
- branch = Branch(chat_model=model)
436
- response = await branch.communicate("Explain the architecture of protocols, operations, and branch")
437
- response2 = await branch.communicate("how do these parts form lionagi system")
424
+ orchestrator = Branch(chat_model=create_cc_model())
425
+ response = await orchestrator.communicate("Explain the architecture of protocols, operations, and branch")
426
+
427
+ # continue the session with more queries
428
+ response2 = await orchestrator.communicate("how do these parts form lionagi system")
429
+ ```
430
+
431
+ ### Fan out fan in pattern orchestration with claude code
432
+
433
+ ```python
434
+ # use structured outputs with claude code
435
+ from lionagi.fields import LIST_INSTRUCT_FIELD_MODEL, Instruct
436
+
437
+ response3 = await orchestrator.operate(
438
+ instruct=Instruct(
439
+ instruction="create 4 research questions for parallel discovery",
440
+ guidance="put into `instruct_models` field as part of your structured result message",
441
+ context="I'd like to create an orchestration system for AI agents using lionagi"
442
+ ),
443
+ field_models=[LIST_INSTRUCT_FIELD_MODEL],
444
+ )
445
+
446
+ len(response3.instruct_models) # should be 4
447
+
448
+ async def handle_instruct(instruct):
449
+ sub_branch = Branch(
450
+ system="You are an diligent research expert.",
451
+ chat_model=create_cc_model(),
452
+ )
453
+ return await sub_branch.operate(instruct=instruct)
454
+
455
+ # run in parallel across all instruct models
456
+ from lionagi.utils import alcall
457
+ responses = await alcall(response3.instruct_models, handle_instruct)
458
+
459
+ # now hand these reports back to the orchestrator
460
+ final_response = await orchestrator.communicate(
461
+ "please synthesize these research findings into a final report",
462
+ context=responses,
463
+ )
438
464
  ```
439
465
 
440
466
  Key features:
@@ -446,9 +472,14 @@ Key features:
446
472
  ### optional dependencies
447
473
 
448
474
  ```
449
- pip install "lionagi[reader]"
450
- pip install "lionagi[ollama]"
451
- pip install "lionagi[claude-code]"
475
+ "lionagi[reader]" - Reader tool for any unstructured data and web pages
476
+ "lionagi[ollama]" - Ollama model support for local inference
477
+ "lionagi[claude-code]" - Claude code python SDK integration (cli endpoint does not require this)
478
+ "lionagi[rich]" - Rich output formatting for better console display
479
+ "lionagi[schema]" - Convert pydantic schema to make the Model class persistent
480
+ "lionagi[postgres]" - Postgres database support for storing and retrieving structured data
481
+ "lionagi[graph]" - Graph display for visualizing complex workflows
482
+ "lionagi[sqlite]" - SQLite database support for lightweight data storage (also need `postgres` option)
452
483
  ```
453
484
 
454
485
  ## Community & Contributing
@@ -6,7 +6,7 @@ lionagi/config.py,sha256=Dxs5FA9UCv1YX5H54qOJcPsDrIF9wFokWEPZ212eH-k,3715
6
6
  lionagi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
7
  lionagi/settings.py,sha256=HDuKCEJCpc4HudKodBnhoQUGuTGhRHdlIFhbtf3VBtY,1633
8
8
  lionagi/utils.py,sha256=n2aUMSnLLgy7HWFlfzDV1OqMDbatLNX0QYc7jIjXwQA,75023
9
- lionagi/version.py,sha256=8YscPb5efWnULR4pGFhtuY0RRhqCGPpeGi69mc5MYv4,23
9
+ lionagi/version.py,sha256=FrbFRQ_ImRf8C9xWDXvZmcPev0r6romK0PIGZ4NmgIg,23
10
10
  lionagi/adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
11
  lionagi/adapters/async_postgres_adapter.py,sha256=Kf2YCzwRqKpEHY3GQCXEiMl201CCIkDvXcvddwZNkkE,12723
12
12
  lionagi/adapters/postgres_model_adapter.py,sha256=e_wfJNyihbpLCXuAs_W9tbLoMXAXbAXtkQDaHfqWz3o,4555
@@ -15,7 +15,7 @@ lionagi/fields/action.py,sha256=OziEpbaUeEVo34KdtbzDxXJBgkf3QLxlcKIQAfHe4O0,5791
15
15
  lionagi/fields/base.py,sha256=mvgqxLonCROszMjnG8QWt00l-MvIr_mnGvCtaH-SQ_k,3814
16
16
  lionagi/fields/code.py,sha256=TFym51obzaSfCmeRoHZJyBtjfDI4tvl9F-1sjFc9rMw,7713
17
17
  lionagi/fields/file.py,sha256=DhQ_HE0RvTNzkvBGQHRgbMYSokDkzE8GEu814i6jw5Q,7297
18
- lionagi/fields/instruct.py,sha256=cpAfKAhQek3Tu6nja5l04zpoBlTZSn10SxEPD7YFahA,4367
18
+ lionagi/fields/instruct.py,sha256=2koYdY7XyJh5lrd7tD_BA9bqCbmpsdTDaASEv_dX4i8,4334
19
19
  lionagi/fields/reason.py,sha256=eTGI9jDaaZJInUjCR9lEpYvw2_1UUF-xzCVCFP3-JRI,1437
20
20
  lionagi/fields/research.py,sha256=eEPKocx8eQy2E9FExRWVIo6MK_xvmwBAoRZciBY3RG0,1421
21
21
  lionagi/libs/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
@@ -35,6 +35,9 @@ lionagi/libs/file/file_ops.py,sha256=HBiIh1EljIJ5VTIXuyvJM0ppSs0YYOPUWmgDMJT634U
35
35
  lionagi/libs/file/params.py,sha256=SZ5DkoffWfxWudOAYCfCxpL8UIm-1UjeyTtploo-Lqs,5824
36
36
  lionagi/libs/file/process.py,sha256=EsnEJcQUm4ReP7qkCeMvL4Qe6fLRcENVWZndh9TSUsc,8692
37
37
  lionagi/libs/file/save.py,sha256=5PvX1o1danZEq6S7V-GHIbZ6n-m-1IDdWhXPRbUXSVE,2865
38
+ lionagi/libs/hash/__init__.py,sha256=z3H4Wu7VBiyZe54vMIA_Kpcbexl-GY0KGEJkIcP1S6M,57
39
+ lionagi/libs/hash/hash_dict.py,sha256=g20yJfuVhAsfsBOWlkO889DHte6cbUCl6vV5QMT8nUo,3499
40
+ lionagi/libs/hash/manager.py,sha256=DhFHC_yh5p9_yCgc8QNgsUDFOJAQY2qNxjw8rhh69G8,1089
38
41
  lionagi/libs/nested/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
39
42
  lionagi/libs/nested/flatten.py,sha256=sB4jxZRoaUbjak9RbIWVWNKz2hzkhQJPFffV_Ws1GA0,5479
40
43
  lionagi/libs/nested/nfilter.py,sha256=kF7AWjLFHr22SOjRBSTx-7iRPaR7gs0FY5Y4XF2sWJ8,1768
@@ -87,7 +90,7 @@ lionagi/libs/validate/string_similarity.py,sha256=ymuqTKd95_h8ywD9VTzf24gIPyMz1M
87
90
  lionagi/libs/validate/validate_boolean.py,sha256=bjiX_WZ3Bg8XcqoWLzE1G9BpO0AisrlZUxrpye_mlGk,3614
88
91
  lionagi/models/__init__.py,sha256=R7DEGWuhH-izP7eN6SOw24-I4Mr2IVPXF4gNysmF2zQ,457
89
92
  lionagi/models/field_model.py,sha256=pRFUYs72BRB6jo4NkgeqslCp0xwqzvh00ibh8uaMAws,23674
90
- lionagi/models/hashable_model.py,sha256=oOqR3OJCU9cJfWHiG0WaEw0GMqfE2WTt4cy7WsAsiRg,829
93
+ lionagi/models/hashable_model.py,sha256=fpBv4_BcD5F5Laxa_uQ13EPMnVUBP-J0IMxzV9-35Cc,868
91
94
  lionagi/models/model_params.py,sha256=zVU-PHp3skjK5ZVjpsPs1k_VJiS8X0hgct0xs6Z6W_Y,10955
92
95
  lionagi/models/note.py,sha256=okWJL4mGqt0bUVxHRyqsfJr7tEh6wwgYhF1CegxudIA,12202
93
96
  lionagi/models/operable_model.py,sha256=fXbcpBjO-SoaeF8fn-F1_KIcYw9_L73VIUM1BDg5hj4,19905
@@ -163,7 +166,7 @@ lionagi/protocols/messages/assistant_response.py,sha256=jrzRPVHHDnPw86Xp0IHnPy0t
163
166
  lionagi/protocols/messages/base.py,sha256=Ng1Q8yIIIFauUv53LnwDeyOrM-cSCfsHM1GwkxChf2o,2317
164
167
  lionagi/protocols/messages/instruction.py,sha256=0dUsUYd6xYsbOHU7GafvqBkpDQQoFOXJXT-UoJArYWU,21146
165
168
  lionagi/protocols/messages/manager.py,sha256=e1jW5JH_3jZpZbFCvWZX0TG0DCgzANRUejW_6NqbtDc,17182
166
- lionagi/protocols/messages/message.py,sha256=GJLDgluTc_dBikTjGM3Im73kQQaCGsvSoJcaxAJwtFo,7723
169
+ lionagi/protocols/messages/message.py,sha256=RPP-zLs1Ly_-bBua9FIzSnDyyK9s4X0_WaQVyZJqToM,7757
167
170
  lionagi/protocols/messages/system.py,sha256=x0F1C57SFHaO2-Z9cy1QshYlxv8wjl7VppooaGKbMIg,4658
168
171
  lionagi/protocols/messages/templates/README.md,sha256=Ch4JrKSjd85fLitAYO1OhZjNOGKHoEwaKQlcV16jiUI,1286
169
172
  lionagi/protocols/messages/templates/action_request.jinja2,sha256=d6OmxHKyvvNDSK4bnBM3TGSUk_HeE_Q2EtLAQ0ZBEJg,120
@@ -181,7 +184,7 @@ lionagi/service/manager.py,sha256=tN3p0kM7pg_CEs6wXK62_B_h49Q3nrU-9qniFhw2ABE,11
181
184
  lionagi/service/rate_limited_processor.py,sha256=JhkuzJMHUCdndkRbAUf9wUQI9zOw-dutRy_nHf8EE5I,6101
182
185
  lionagi/service/resilience.py,sha256=uYJYZQ9M-tje8ME3vJmYabXwKHF1c3Ij4-WrdCwogcs,18742
183
186
  lionagi/service/token_calculator.py,sha256=piTidArzUkIMCtOLC_HBLoZNYZcENQywgeKM31bxezM,6457
184
- lionagi/service/types.py,sha256=6zavqBxK1Fj0nB9eZgJn3JICxmdT-n0nn8YWZFzM5LU,508
187
+ lionagi/service/types.py,sha256=zZQL9tTTuqAPbUTi2U4wl5tKBNWl51qbPKwDVGhTeqs,546
185
188
  lionagi/service/connections/__init__.py,sha256=yHQZ7OJpCftd6CStYR8inbxjJydYdmv9kCvbUBhJ2zU,362
186
189
  lionagi/service/connections/api_calling.py,sha256=XetCrjMhOHNKGGv-NzHhBhVS7XjKPalrS_iExzU-4S4,8005
187
190
  lionagi/service/connections/endpoint.py,sha256=yNIjq9wETMnytynGbq3qY_dkyaMlaHrcfiZjS-tnmLg,14756
@@ -197,8 +200,8 @@ lionagi/service/connections/providers/oai_.py,sha256=FmQMEmOY7H7dZd4og-_cdd1Unzy
197
200
  lionagi/service/connections/providers/ollama_.py,sha256=jdx6dGeChwVk5TFfFRbpnrpKzj8YQZw6D5iWJ6zYmfk,4096
198
201
  lionagi/service/connections/providers/perplexity_.py,sha256=9MH9YmMy9Jg7JDMJHQxxMYHyjJ4NP0OlN7sCuhla85I,917
199
202
  lionagi/service/connections/providers/_claude_code/__init__.py,sha256=3lzOakDoBWmMaNnT2g-YwktPKa_Wme4lnPRSmOQfayY,105
200
- lionagi/service/connections/providers/_claude_code/models.py,sha256=NfKvD9ccjSFHn0iqcI5KnNbMLbqw1dnXqHn2a5y9vds,8032
201
- lionagi/service/connections/providers/_claude_code/stream_cli.py,sha256=lNXln8_o-vRAgZMPGF0j_7j-GS6mDA3CJsdkCiHYSmM,12265
203
+ lionagi/service/connections/providers/_claude_code/models.py,sha256=Iyx8YPv56n_GZN8e3mcZ6e7sYllhlVxX2Mp4zWd0BzM,8320
204
+ lionagi/service/connections/providers/_claude_code/stream_cli.py,sha256=0brD9mWRXPIiRSmha5n77JudW5tmhLNZmsjGgUWM060,12388
202
205
  lionagi/service/third_party/README.md,sha256=qFjWnI8rmLivIyr6Tc-hRZh-rQwntROp76af4MBNJJc,2214
203
206
  lionagi/service/third_party/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
204
207
  lionagi/service/third_party/anthropic_models.py,sha256=oqSPSlcayYG-fS5BLiLeTtkrpaxgkPhEK_YgneumrOo,4004
@@ -215,7 +218,7 @@ lionagi/tools/types.py,sha256=XtJLY0m-Yi_ZLWhm0KycayvqMCZd--HxfQ0x9vFUYDE,230
215
218
  lionagi/tools/file/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
216
219
  lionagi/tools/file/reader.py,sha256=0TdnfVGVCKuM58MmGM-NyVjhU9BFoitkNYEepdc0z_Y,9529
217
220
  lionagi/tools/memory/tools.py,sha256=zTGBenVsF8Wuh303kWntmQSGlAFKonHNdh5ePuQ26KE,15948
218
- lionagi-0.14.6.dist-info/METADATA,sha256=uUjYz80ekrDdAq2blmqNu-K2AR0geC4oykPbMC61cbw,21236
219
- lionagi-0.14.6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
220
- lionagi-0.14.6.dist-info/licenses/LICENSE,sha256=VXFWsdoN5AAknBCgFqQNgPWYx7OPp-PFEP961zGdOjc,11288
221
- lionagi-0.14.6.dist-info/RECORD,,
221
+ lionagi-0.14.8.dist-info/METADATA,sha256=_XF7GKTpMRcN_jpow1LJhSTdESVCqcN4el14Sc5-Lxc,22794
222
+ lionagi-0.14.8.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
223
+ lionagi-0.14.8.dist-info/licenses/LICENSE,sha256=VXFWsdoN5AAknBCgFqQNgPWYx7OPp-PFEP961zGdOjc,11288
224
+ lionagi-0.14.8.dist-info/RECORD,,