lollms-client 0.19.0__py3-none-any.whl → 0.19.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lollms-client might be problematic. Click here for more details.
- lollms_client/__init__.py +1 -1
- {lollms_client-0.19.0.dist-info → lollms_client-0.19.1.dist-info}/METADATA +71 -16
- {lollms_client-0.19.0.dist-info → lollms_client-0.19.1.dist-info}/RECORD +6 -6
- {lollms_client-0.19.0.dist-info → lollms_client-0.19.1.dist-info}/WHEEL +0 -0
- {lollms_client-0.19.0.dist-info → lollms_client-0.19.1.dist-info}/licenses/LICENSE +0 -0
- {lollms_client-0.19.0.dist-info → lollms_client-0.19.1.dist-info}/top_level.txt +0 -0
lollms_client/__init__.py
CHANGED
|
@@ -7,7 +7,7 @@ from lollms_client.lollms_utilities import PromptReshaper # Keep general utiliti
|
|
|
7
7
|
from lollms_client.lollms_mcp_binding import LollmsMCPBinding, LollmsMCPBindingManager
|
|
8
8
|
|
|
9
9
|
|
|
10
|
-
__version__ = "0.19.
|
|
10
|
+
__version__ = "0.19.1" # Updated version
|
|
11
11
|
|
|
12
12
|
# Optionally, you could define __all__ if you want to be explicit about exports
|
|
13
13
|
__all__ = [
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: lollms_client
|
|
3
|
-
Version: 0.19.
|
|
3
|
+
Version: 0.19.1
|
|
4
4
|
Summary: A client library for LoLLMs generate endpoint
|
|
5
5
|
Author-email: ParisNeo <parisneoai@gmail.com>
|
|
6
6
|
License: Apache Software License
|
|
@@ -39,7 +39,7 @@ Dynamic: license-file
|
|
|
39
39
|
[](https://github.com/ParisNeo/lollms_client/stargazers/)
|
|
40
40
|
[](https://github.com/ParisNeo/lollms_client/issues)
|
|
41
41
|
|
|
42
|
-
**`lollms_client`** is a powerful and flexible Python library designed to simplify interactions with the **LoLLMs (Lord of Large Language Models)** ecosystem and various other Large Language Model (LLM) backends. It provides a unified API for text generation, multimodal operations (text-to-image, text-to-speech, etc.), function calling
|
|
42
|
+
**`lollms_client`** is a powerful and flexible Python library designed to simplify interactions with the **LoLLMs (Lord of Large Language Models)** ecosystem and various other Large Language Model (LLM) backends. It provides a unified API for text generation, multimodal operations (text-to-image, text-to-speech, etc.), and robust function calling through the Model Context Protocol (MCP).
|
|
43
43
|
|
|
44
44
|
Whether you're connecting to a remote LoLLMs server, an Ollama instance, the OpenAI API, or running models locally using GGUF (via `llama-cpp-python` or a managed `llama.cpp` server), Hugging Face Transformers, or vLLM, `lollms-client` offers a consistent and developer-friendly experience.
|
|
45
45
|
|
|
@@ -47,12 +47,12 @@ Whether you're connecting to a remote LoLLMs server, an Ollama instance, the Ope
|
|
|
47
47
|
|
|
48
48
|
* 🔌 **Versatile Binding System:** Seamlessly switch between different LLM backends (LoLLMs, Ollama, OpenAI, Llama.cpp, Transformers, vLLM, OpenLLM) without major code changes.
|
|
49
49
|
* 🗣️ **Multimodal Support:** Interact with models capable of processing images and generate various outputs like speech (TTS) and images (TTI).
|
|
50
|
-
*
|
|
51
|
-
*
|
|
52
|
-
* 📞 **Function Calling:** Enable LLMs to invoke your custom Python functions, bridging the gap between language models and external tools or data sources.
|
|
50
|
+
* 🤖 **Function Calling with MCP:** Empowers LLMs to use external tools and functions through the Model Context Protocol (MCP), with built-in support for local Python tool execution via `local_mcp` binding and its default tools (file I/O, internet search, Python interpreter, image generation).
|
|
51
|
+
* 🚀 **Streaming & Callbacks:** Efficiently handle real-time text generation with customizable callback functions, including during MCP interactions.
|
|
53
52
|
* 💬 **Discussion Management:** Utilities to easily manage and format conversation histories for chat applications.
|
|
54
53
|
* ⚙️ **Configuration Management:** Flexible ways to configure bindings and generation parameters.
|
|
55
|
-
* 🧩 **Extensible:** Designed to easily incorporate new LLM backends and modality services.
|
|
54
|
+
* 🧩 **Extensible:** Designed to easily incorporate new LLM backends and modality services, including custom MCP toolsets.
|
|
55
|
+
* 📝 **High-Level Operations:** Includes convenience methods for complex tasks like sequential summarization and deep text analysis directly within `LollmsClient`.
|
|
56
56
|
|
|
57
57
|
## Installation
|
|
58
58
|
|
|
@@ -119,12 +119,61 @@ except Exception as e:
|
|
|
119
119
|
|
|
120
120
|
```
|
|
121
121
|
|
|
122
|
+
### Function Calling with MCP
|
|
123
|
+
|
|
124
|
+
`lollms-client` supports robust function calling via the Model Context Protocol (MCP), allowing LLMs to interact with your custom Python tools or pre-defined utilities.
|
|
125
|
+
|
|
126
|
+
```python
|
|
127
|
+
from lollms_client import LollmsClient, MSG_TYPE
|
|
128
|
+
from ascii_colors import ASCIIColors
|
|
129
|
+
import json # For pretty printing results
|
|
130
|
+
|
|
131
|
+
# Example callback for MCP streaming
|
|
132
|
+
def mcp_stream_callback(chunk: str, msg_type: MSG_TYPE, metadata: dict = None, turn_history: list = None) -> bool:
|
|
133
|
+
if msg_type == MSG_TYPE.MSG_TYPE_CHUNK: ASCIIColors.success(chunk, end="", flush=True) # LLM's final answer or thought process
|
|
134
|
+
elif msg_type == MSG_TYPE.MSG_TYPE_STEP_START: ASCIIColors.info(f"\n>> MCP Step Start: {metadata.get('tool_name', chunk)}", flush=True)
|
|
135
|
+
elif msg_type == MSG_TYPE.MSG_TYPE_STEP_END: ASCIIColors.success(f"\n<< MCP Step End: {metadata.get('tool_name', chunk)} -> Result: {json.dumps(metadata.get('result', ''))}", flush=True)
|
|
136
|
+
elif msg_type == MSG_TYPE.MSG_TYPE_INFO and metadata and metadata.get("type") == "tool_call_request": ASCIIColors.info(f"\nAI requests: {metadata.get('name')}({metadata.get('params')})", flush=True)
|
|
137
|
+
return True
|
|
138
|
+
|
|
139
|
+
try:
|
|
140
|
+
# Initialize LollmsClient with an LLM binding and the local_mcp binding
|
|
141
|
+
lc = LollmsClient(
|
|
142
|
+
binding_name="ollama", model_name="mistral", # Example LLM
|
|
143
|
+
mcp_binding_name="local_mcp" # Enables default tools (file_writer, internet_search, etc.)
|
|
144
|
+
# or custom tools if mcp_binding_config.tools_folder_path is set.
|
|
145
|
+
)
|
|
146
|
+
|
|
147
|
+
user_query = "What were the main AI headlines last week and write a summary to 'ai_news.txt'?"
|
|
148
|
+
ASCIIColors.blue(f"User Query: {user_query}")
|
|
149
|
+
ASCIIColors.yellow("AI Processing with MCP (streaming):")
|
|
150
|
+
|
|
151
|
+
mcp_result = lc.generate_with_mcp(
|
|
152
|
+
prompt=user_query,
|
|
153
|
+
streaming_callback=mcp_stream_callback
|
|
154
|
+
)
|
|
155
|
+
print("\n--- End of MCP Interaction ---")
|
|
156
|
+
|
|
157
|
+
if mcp_result.get("error"):
|
|
158
|
+
ASCIIColors.error(f"MCP Error: {mcp_result['error']}")
|
|
159
|
+
else:
|
|
160
|
+
ASCIIColors.cyan(f"\nFinal Answer from AI: {mcp_result.get('final_answer', 'N/A')}")
|
|
161
|
+
ASCIIColors.magenta("\nTool Calls Made:")
|
|
162
|
+
for tc in mcp_result.get("tool_calls", []):
|
|
163
|
+
print(f" - Tool: {tc.get('name')}, Params: {tc.get('params')}, Result (first 50 chars): {str(tc.get('result'))[:50]}...")
|
|
164
|
+
|
|
165
|
+
except Exception as e:
|
|
166
|
+
ASCIIColors.error(f"An error occurred in MCP example: {e}")
|
|
167
|
+
trace_exception(e) # Assuming you have trace_exception utility
|
|
168
|
+
```
|
|
169
|
+
For a comprehensive guide on function calling and setting up tools, please refer to the [Usage Guide (DOC_USE.md)](DOC_USE.md).
|
|
170
|
+
|
|
122
171
|
## Documentation
|
|
123
172
|
|
|
124
173
|
For more in-depth information, please refer to:
|
|
125
174
|
|
|
126
|
-
* **[Usage Guide (DOC_USE.md)](DOC_USE.md):** Learn how to use `LollmsClient`, different bindings, modality features,
|
|
127
|
-
* **[Developer Guide (DOC_DEV.md)](DOC_DEV.md):** Understand the architecture, how to create new bindings, and contribute to the library.
|
|
175
|
+
* **[Usage Guide (DOC_USE.md)](DOC_USE.md):** Learn how to use `LollmsClient`, different bindings, modality features, function calling with MCP, and high-level operations.
|
|
176
|
+
* **[Developer Guide (DOC_DEV.md)](DOC_DEV.md):** Understand the architecture, how to create new bindings (LLM, modality, MCP), and contribute to the library.
|
|
128
177
|
|
|
129
178
|
## Core Concepts
|
|
130
179
|
|
|
@@ -134,8 +183,9 @@ graph LR
|
|
|
134
183
|
|
|
135
184
|
subgraph LollmsClient_Core
|
|
136
185
|
LC -- Manages --> LLB[LLM Binding];
|
|
137
|
-
LC --
|
|
138
|
-
LC --
|
|
186
|
+
LC -- Manages --> MCPB[MCP Binding];
|
|
187
|
+
LC -- Orchestrates --> MCP_Interaction[generate_with_mcp];
|
|
188
|
+
LC -- Provides --> HighLevelOps[High-Level Ops<br>(summarize, deep_analyze etc.)];
|
|
139
189
|
LC -- Provides Access To --> DM[DiscussionManager];
|
|
140
190
|
LC -- Provides Access To --> ModalityBindings[TTS, TTI, STT etc.];
|
|
141
191
|
end
|
|
@@ -148,14 +198,19 @@ graph LR
|
|
|
148
198
|
LLB --> LocalHF[Local HuggingFace<br>(transformers / vLLM)];
|
|
149
199
|
end
|
|
150
200
|
|
|
151
|
-
|
|
201
|
+
MCP_Interaction --> MCPB;
|
|
202
|
+
MCPB --> LocalTools[Local Python Tools<br>(via local_mcp)];
|
|
203
|
+
MCPB --> RemoteTools[Remote MCP Tool Servers<br>(Future Potential)];
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
ModalityBindings --> ModalityServices[Modality Services<br>(e.g., LoLLMs Server TTS/TTI, local Bark/XTTS)];
|
|
152
207
|
```
|
|
153
208
|
|
|
154
|
-
* **`LollmsClient`**: The central class for all interactions. It holds the currently active LLM binding and provides access to modality bindings and
|
|
209
|
+
* **`LollmsClient`**: The central class for all interactions. It holds the currently active LLM binding, an optional MCP binding, and provides access to modality bindings and high-level operations.
|
|
155
210
|
* **LLM Bindings**: These are plugins that allow `LollmsClient` to communicate with different LLM backends. You choose a binding (e.g., `"ollama"`, `"lollms"`, `"pythonllamacpp"`) when you initialize `LollmsClient`.
|
|
211
|
+
* **🔧 MCP Bindings**: Enable tool use and function calling. `lollms-client` includes `local_mcp` for executing Python tools. It discovers tools from a specified folder (or uses its default set), each defined by a `.py` script and a `.mcp.json` metadata file.
|
|
156
212
|
* **Modality Bindings**: Similar to LLM bindings, but for services like Text-to-Speech (`tts`), Text-to-Image (`tti`), etc.
|
|
157
|
-
*
|
|
158
|
-
* **`FunctionCalling_Library`**: Enables you to define Python functions that the LLM can request to execute, allowing for tool usage.
|
|
213
|
+
* **High-Level Operations**: Methods directly on `LollmsClient` (e.g., `sequential_summarize`, `deep_analyze`, `generate_code`, `yes_no`) for performing complex, multi-step AI tasks.
|
|
159
214
|
* **`LollmsDiscussion`**: Helps manage and format conversation histories for chat applications.
|
|
160
215
|
|
|
161
216
|
## Examples
|
|
@@ -164,8 +219,8 @@ The `examples/` directory in this repository contains a rich set of scripts demo
|
|
|
164
219
|
* Basic text generation with different bindings.
|
|
165
220
|
* Streaming and non-streaming examples.
|
|
166
221
|
* Multimodal generation (text with images).
|
|
167
|
-
* Using
|
|
168
|
-
* Implementing and using function calls.
|
|
222
|
+
* Using built-in methods for summarization and Q&A.
|
|
223
|
+
* Implementing and using function calls with **`generate_with_mcp`** and the `local_mcp` binding (see `examples/function_calling_with_local_custom_mcp.py` and `examples/local_mcp.py`).
|
|
169
224
|
* Text-to-Speech and Text-to-Image generation.
|
|
170
225
|
|
|
171
226
|
Explore these examples to see `lollms-client` in action!
|
|
@@ -17,7 +17,7 @@ examples/personality_test/chat_test.py,sha256=o2jlpoddFc-T592iqAiA29xk3x27KsdK5D
|
|
|
17
17
|
examples/personality_test/chat_with_aristotle.py,sha256=4X_fwubMpd0Eq2rCReS2bgVlUoAqJprjkLXk2Jz6pXU,1774
|
|
18
18
|
examples/personality_test/tesks_test.py,sha256=7LIiwrEbva9WWZOLi34fsmCBN__RZbPpxoUOKA_AtYk,1924
|
|
19
19
|
examples/test_local_models/local_chat.py,sha256=slakja2zaHOEAUsn2tn_VmI4kLx6luLBrPqAeaNsix8,456
|
|
20
|
-
lollms_client/__init__.py,sha256=
|
|
20
|
+
lollms_client/__init__.py,sha256=rbZUoiSGIFLwCBjEUsAC68azS9mIfZ9EXmyasnBDbkY,910
|
|
21
21
|
lollms_client/lollms_config.py,sha256=goEseDwDxYJf3WkYJ4IrLXwg3Tfw73CXV2Avg45M_hE,21876
|
|
22
22
|
lollms_client/lollms_core.py,sha256=psVTrEtHYhjy9h014rHLotBC4Aj72PvG2OV0UAjLcvw,102496
|
|
23
23
|
lollms_client/lollms_discussion.py,sha256=9b83m0D894jwpgssWYTQHbVxp1gJoI-J947Ui_dRXII,2073
|
|
@@ -67,8 +67,8 @@ lollms_client/tts_bindings/piper_tts/__init__.py,sha256=0IEWG4zH3_sOkSb9WbZzkeV5
|
|
|
67
67
|
lollms_client/tts_bindings/xtts/__init__.py,sha256=FgcdUH06X6ZR806WQe5ixaYx0QoxtAcOgYo87a2qxYc,18266
|
|
68
68
|
lollms_client/ttv_bindings/__init__.py,sha256=UZ8o2izQOJLQgtZ1D1cXoNST7rzqW22rL2Vufc7ddRc,3141
|
|
69
69
|
lollms_client/ttv_bindings/lollms/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
70
|
-
lollms_client-0.19.
|
|
71
|
-
lollms_client-0.19.
|
|
72
|
-
lollms_client-0.19.
|
|
73
|
-
lollms_client-0.19.
|
|
74
|
-
lollms_client-0.19.
|
|
70
|
+
lollms_client-0.19.1.dist-info/licenses/LICENSE,sha256=HrhfyXIkWY2tGFK11kg7vPCqhgh5DcxleloqdhrpyMY,11558
|
|
71
|
+
lollms_client-0.19.1.dist-info/METADATA,sha256=MKuTL8GsNdArHgSQ_xxhqViSdpEVs0cAK4akk5tNGVM,13374
|
|
72
|
+
lollms_client-0.19.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
73
|
+
lollms_client-0.19.1.dist-info/top_level.txt,sha256=NI_W8S4OYZvJjb0QWMZMSIpOrYzpqwPGYaklhyWKH2w,23
|
|
74
|
+
lollms_client-0.19.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|