lionagi 0.14.7__py3-none-any.whl → 0.14.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lionagi/fields/instruct.py +0 -1
- lionagi/service/connections/providers/_claude_code/models.py +11 -1
- lionagi/version.py +1 -1
- {lionagi-0.14.7.dist-info → lionagi-0.14.8.dist-info}/METADATA +65 -24
- {lionagi-0.14.7.dist-info → lionagi-0.14.8.dist-info}/RECORD +7 -7
- {lionagi-0.14.7.dist-info → lionagi-0.14.8.dist-info}/WHEEL +0 -0
- {lionagi-0.14.7.dist-info → lionagi-0.14.8.dist-info}/licenses/LICENSE +0 -0
lionagi/fields/instruct.py
CHANGED
@@ -61,6 +61,7 @@ class ClaudeCodeRequest(BaseModel):
|
|
61
61
|
max_thinking_tokens: int | None = None
|
62
62
|
mcp_tools: list[str] = Field(default_factory=list)
|
63
63
|
mcp_servers: dict[str, Any] = Field(default_factory=dict)
|
64
|
+
mcp_config: str | Path | None = Field(None, exclude=True)
|
64
65
|
permission_mode: ClaudePermission | None = None
|
65
66
|
permission_prompt_tool_name: str | None = None
|
66
67
|
disallowed_tools: list[str] = Field(default_factory=list)
|
@@ -71,7 +72,7 @@ class ClaudeCodeRequest(BaseModel):
|
|
71
72
|
description="Automatically finish the conversation after the first response",
|
72
73
|
)
|
73
74
|
verbose_output: bool = Field(default=False)
|
74
|
-
cli_display_theme: Literal["light", "dark"] = "
|
75
|
+
cli_display_theme: Literal["light", "dark"] = "dark"
|
75
76
|
|
76
77
|
# ------------------------ validators & helpers --------------------------
|
77
78
|
@field_validator("permission_mode", mode="before")
|
@@ -163,6 +164,15 @@ class ClaudeCodeRequest(BaseModel):
|
|
163
164
|
if self.add_dir:
|
164
165
|
args += ["--add-dir", self.add_dir]
|
165
166
|
|
167
|
+
if self.permission_prompt_tool_name:
|
168
|
+
args += [
|
169
|
+
"--permission-prompt-tool",
|
170
|
+
self.permission_prompt_tool_name,
|
171
|
+
]
|
172
|
+
|
173
|
+
if self.mcp_config:
|
174
|
+
args += ["--mcp-config", str(self.mcp_config)]
|
175
|
+
|
166
176
|
args += ["--model", self.model or "sonnet", "--verbose"]
|
167
177
|
return args
|
168
178
|
|
lionagi/version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "0.14.
|
1
|
+
__version__ = "0.14.8"
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: lionagi
|
3
|
-
Version: 0.14.
|
3
|
+
Version: 0.14.8
|
4
4
|
Summary: An Intelligence Operating System.
|
5
5
|
Author-email: HaiyangLi <quantocean.li@gmail.com>, Liangbingyan Luo <llby_luo@outlook.com>
|
6
6
|
License: Apache License
|
@@ -295,7 +295,9 @@ integrations, and custom validations in a single coherent pipeline.
|
|
295
295
|
## Installation
|
296
296
|
|
297
297
|
```
|
298
|
-
|
298
|
+
uv add lionagi # recommended to use pyproject and uv for dependency management
|
299
|
+
|
300
|
+
pip install lionagi # or install directly
|
299
301
|
```
|
300
302
|
|
301
303
|
## Quick Start
|
@@ -304,12 +306,12 @@ pip install lionagi
|
|
304
306
|
from lionagi import Branch, iModel
|
305
307
|
|
306
308
|
# Pick a model
|
307
|
-
|
309
|
+
gpt41 = iModel(provider="openai", model="gpt-4.1-mini")
|
308
310
|
|
309
311
|
# Create a Branch (conversation context)
|
310
312
|
hunter = Branch(
|
311
313
|
system="you are a hilarious dragon hunter who responds in 10 words rhymes.",
|
312
|
-
chat_model=
|
314
|
+
chat_model=gpt41,
|
313
315
|
)
|
314
316
|
|
315
317
|
# Communicate asynchronously
|
@@ -390,41 +392,75 @@ print(df.tail())
|
|
390
392
|
```python
|
391
393
|
from lionagi import Branch, iModel
|
392
394
|
|
393
|
-
gpt4o = iModel(provider="openai", model="gpt-4o")
|
394
395
|
sonnet = iModel(
|
395
396
|
provider="anthropic",
|
396
397
|
model="claude-3-5-sonnet-20241022",
|
397
398
|
max_tokens=1000, # max_tokens is required for anthropic models
|
398
399
|
)
|
399
400
|
|
400
|
-
branch = Branch(chat_model=
|
401
|
-
# Switch mid-flow
|
402
|
-
analysis = await branch.communicate("Analyze these stats", imodel=sonnet)
|
401
|
+
branch = Branch(chat_model=gpt41)
|
402
|
+
analysis = await branch.communicate("Analyze these stats", chat_model=sonnet) # Switch mid-flow
|
403
403
|
```
|
404
404
|
|
405
405
|
Seamlessly route to different models in the same workflow.
|
406
406
|
|
407
407
|
### Claude Code Integration
|
408
408
|
|
409
|
-
LionAGI now supports Anthropic's
|
409
|
+
LionAGI now supports Anthropic's Claude Code [Python SDK](https://github.com/anthropics/claude-code-sdk-python), and [CLI SDK](https://docs.anthropic.com/en/docs/claude-code/sdk) enabling autonomous coding capabilities with persistent session management. The CLI endpoint
|
410
|
+
directly connects to claude code, and is recommended, you can either use it via a [proxy server](https://github.com/khive-ai/lionagi/tree/main/cookbooks/claude_proxy) or directly with `query_cli` endpoint, provided you have already logged onto claude code cli in your terminal.
|
410
411
|
|
411
412
|
```python
|
412
413
|
from lionagi import iModel, Branch
|
413
414
|
|
414
|
-
|
415
|
-
|
416
|
-
|
417
|
-
|
418
|
-
|
419
|
-
|
420
|
-
|
421
|
-
verbose_output=True, # Enable detailed output for debugging
|
422
|
-
)
|
415
|
+
def create_cc_model():
|
416
|
+
return iModel(
|
417
|
+
provider="claude_code",
|
418
|
+
endpoint="query_cli",
|
419
|
+
model="sonnet",
|
420
|
+
verbose_output=True, # Enable detailed output for debugging
|
421
|
+
)
|
423
422
|
|
424
423
|
# Start a coding session
|
425
|
-
|
426
|
-
response = await
|
427
|
-
|
424
|
+
orchestrator = Branch(chat_model=create_cc_model())
|
425
|
+
response = await orchestrator.communicate("Explain the architecture of protocols, operations, and branch")
|
426
|
+
|
427
|
+
# continue the session with more queries
|
428
|
+
response2 = await orchestrator.communicate("how do these parts form lionagi system")
|
429
|
+
```
|
430
|
+
|
431
|
+
### Fan out fan in pattern orchestration with claude code
|
432
|
+
|
433
|
+
```python
|
434
|
+
# use structured outputs with claude code
|
435
|
+
from lionagi.fields import LIST_INSTRUCT_FIELD_MODEL, Instruct
|
436
|
+
|
437
|
+
response3 = await orchestrator.operate(
|
438
|
+
instruct=Instruct(
|
439
|
+
instruction="create 4 research questions for parallel discovery",
|
440
|
+
guidance="put into `instruct_models` field as part of your structured result message",
|
441
|
+
context="I'd like to create an orchestration system for AI agents using lionagi"
|
442
|
+
),
|
443
|
+
field_models=[LIST_INSTRUCT_FIELD_MODEL],
|
444
|
+
)
|
445
|
+
|
446
|
+
len(response3.instruct_models) # should be 4
|
447
|
+
|
448
|
+
async def handle_instruct(instruct):
|
449
|
+
sub_branch = Branch(
|
450
|
+
system="You are an diligent research expert.",
|
451
|
+
chat_model=create_cc_model(),
|
452
|
+
)
|
453
|
+
return await sub_branch.operate(instruct=instruct)
|
454
|
+
|
455
|
+
# run in parallel across all instruct models
|
456
|
+
from lionagi.utils import alcall
|
457
|
+
responses = await alcall(response3.instruct_models, handle_instruct)
|
458
|
+
|
459
|
+
# now hand these reports back to the orchestrator
|
460
|
+
final_response = await orchestrator.communicate(
|
461
|
+
"please synthesize these research findings into a final report",
|
462
|
+
context=responses,
|
463
|
+
)
|
428
464
|
```
|
429
465
|
|
430
466
|
Key features:
|
@@ -436,9 +472,14 @@ Key features:
|
|
436
472
|
### optional dependencies
|
437
473
|
|
438
474
|
```
|
439
|
-
|
440
|
-
|
441
|
-
|
475
|
+
"lionagi[reader]" - Reader tool for any unstructured data and web pages
|
476
|
+
"lionagi[ollama]" - Ollama model support for local inference
|
477
|
+
"lionagi[claude-code]" - Claude code python SDK integration (cli endpoint does not require this)
|
478
|
+
"lionagi[rich]" - Rich output formatting for better console display
|
479
|
+
"lionagi[schema]" - Convert pydantic schema to make the Model class persistent
|
480
|
+
"lionagi[postgres]" - Postgres database support for storing and retrieving structured data
|
481
|
+
"lionagi[graph]" - Graph display for visualizing complex workflows
|
482
|
+
"lionagi[sqlite]" - SQLite database support for lightweight data storage (also need `postgres` option)
|
442
483
|
```
|
443
484
|
|
444
485
|
## Community & Contributing
|
@@ -6,7 +6,7 @@ lionagi/config.py,sha256=Dxs5FA9UCv1YX5H54qOJcPsDrIF9wFokWEPZ212eH-k,3715
|
|
6
6
|
lionagi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
7
7
|
lionagi/settings.py,sha256=HDuKCEJCpc4HudKodBnhoQUGuTGhRHdlIFhbtf3VBtY,1633
|
8
8
|
lionagi/utils.py,sha256=n2aUMSnLLgy7HWFlfzDV1OqMDbatLNX0QYc7jIjXwQA,75023
|
9
|
-
lionagi/version.py,sha256=
|
9
|
+
lionagi/version.py,sha256=FrbFRQ_ImRf8C9xWDXvZmcPev0r6romK0PIGZ4NmgIg,23
|
10
10
|
lionagi/adapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
11
11
|
lionagi/adapters/async_postgres_adapter.py,sha256=Kf2YCzwRqKpEHY3GQCXEiMl201CCIkDvXcvddwZNkkE,12723
|
12
12
|
lionagi/adapters/postgres_model_adapter.py,sha256=e_wfJNyihbpLCXuAs_W9tbLoMXAXbAXtkQDaHfqWz3o,4555
|
@@ -15,7 +15,7 @@ lionagi/fields/action.py,sha256=OziEpbaUeEVo34KdtbzDxXJBgkf3QLxlcKIQAfHe4O0,5791
|
|
15
15
|
lionagi/fields/base.py,sha256=mvgqxLonCROszMjnG8QWt00l-MvIr_mnGvCtaH-SQ_k,3814
|
16
16
|
lionagi/fields/code.py,sha256=TFym51obzaSfCmeRoHZJyBtjfDI4tvl9F-1sjFc9rMw,7713
|
17
17
|
lionagi/fields/file.py,sha256=DhQ_HE0RvTNzkvBGQHRgbMYSokDkzE8GEu814i6jw5Q,7297
|
18
|
-
lionagi/fields/instruct.py,sha256=
|
18
|
+
lionagi/fields/instruct.py,sha256=2koYdY7XyJh5lrd7tD_BA9bqCbmpsdTDaASEv_dX4i8,4334
|
19
19
|
lionagi/fields/reason.py,sha256=eTGI9jDaaZJInUjCR9lEpYvw2_1UUF-xzCVCFP3-JRI,1437
|
20
20
|
lionagi/fields/research.py,sha256=eEPKocx8eQy2E9FExRWVIo6MK_xvmwBAoRZciBY3RG0,1421
|
21
21
|
lionagi/libs/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
|
@@ -200,7 +200,7 @@ lionagi/service/connections/providers/oai_.py,sha256=FmQMEmOY7H7dZd4og-_cdd1Unzy
|
|
200
200
|
lionagi/service/connections/providers/ollama_.py,sha256=jdx6dGeChwVk5TFfFRbpnrpKzj8YQZw6D5iWJ6zYmfk,4096
|
201
201
|
lionagi/service/connections/providers/perplexity_.py,sha256=9MH9YmMy9Jg7JDMJHQxxMYHyjJ4NP0OlN7sCuhla85I,917
|
202
202
|
lionagi/service/connections/providers/_claude_code/__init__.py,sha256=3lzOakDoBWmMaNnT2g-YwktPKa_Wme4lnPRSmOQfayY,105
|
203
|
-
lionagi/service/connections/providers/_claude_code/models.py,sha256=
|
203
|
+
lionagi/service/connections/providers/_claude_code/models.py,sha256=Iyx8YPv56n_GZN8e3mcZ6e7sYllhlVxX2Mp4zWd0BzM,8320
|
204
204
|
lionagi/service/connections/providers/_claude_code/stream_cli.py,sha256=0brD9mWRXPIiRSmha5n77JudW5tmhLNZmsjGgUWM060,12388
|
205
205
|
lionagi/service/third_party/README.md,sha256=qFjWnI8rmLivIyr6Tc-hRZh-rQwntROp76af4MBNJJc,2214
|
206
206
|
lionagi/service/third_party/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -218,7 +218,7 @@ lionagi/tools/types.py,sha256=XtJLY0m-Yi_ZLWhm0KycayvqMCZd--HxfQ0x9vFUYDE,230
|
|
218
218
|
lionagi/tools/file/__init__.py,sha256=5y5joOZzfFWERl75auAcNcKC3lImVJ5ZZGvvHZUFCJM,112
|
219
219
|
lionagi/tools/file/reader.py,sha256=0TdnfVGVCKuM58MmGM-NyVjhU9BFoitkNYEepdc0z_Y,9529
|
220
220
|
lionagi/tools/memory/tools.py,sha256=zTGBenVsF8Wuh303kWntmQSGlAFKonHNdh5ePuQ26KE,15948
|
221
|
-
lionagi-0.14.
|
222
|
-
lionagi-0.14.
|
223
|
-
lionagi-0.14.
|
224
|
-
lionagi-0.14.
|
221
|
+
lionagi-0.14.8.dist-info/METADATA,sha256=_XF7GKTpMRcN_jpow1LJhSTdESVCqcN4el14Sc5-Lxc,22794
|
222
|
+
lionagi-0.14.8.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
223
|
+
lionagi-0.14.8.dist-info/licenses/LICENSE,sha256=VXFWsdoN5AAknBCgFqQNgPWYx7OPp-PFEP961zGdOjc,11288
|
224
|
+
lionagi-0.14.8.dist-info/RECORD,,
|
File without changes
|
File without changes
|