lm-deluge 0.0.8__py3-none-any.whl → 0.0.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lm-deluge might be problematic. Click here for more details.

lm_deluge/tool.py CHANGED
@@ -1,40 +1,222 @@
1
- from typing import Any, Dict, Literal, Callable
1
+ from typing import Any, Literal, Callable, Coroutine, get_type_hints
2
+ import inspect
3
+ import asyncio
4
+
5
+ from fastmcp import Client # pip install fastmcp >= 2.0
6
+ from mcp.types import Tool as MCPTool
2
7
  from pydantic import BaseModel, Field
3
8
 
4
9
 
5
- class ToolSpec(BaseModel):
10
+ async def _load_all_mcp_tools(client: Client) -> list["Tool"]:
11
+ metas: list[MCPTool] = await client.list_tools()
12
+
13
+ def make_runner(name: str):
14
+ async def _async_call(**kw):
15
+ async with client:
16
+ # maybe should be call_tool_mcp if don't want to raise error
17
+ return await client.call_tool(name, kw)
18
+
19
+ return _async_call
20
+
21
+ tools: list[Tool] = []
22
+ for m in metas:
23
+ tools.append(
24
+ Tool(
25
+ name=m.name,
26
+ description=m.description,
27
+ parameters=m.inputSchema.get("properties", {}),
28
+ required=m.inputSchema.get("required", []),
29
+ additionalProperties=m.inputSchema.get("additionalProperties"),
30
+ run=make_runner(m.name),
31
+ )
32
+ )
33
+ return tools
34
+
35
+
36
+ class Tool(BaseModel):
6
37
  """
7
38
  Provider‑agnostic tool definition with no extra nesting.
8
39
  """
9
40
 
10
41
  name: str
11
- description: str
12
- parameters: Dict[str, Any]
42
+ description: str | None
43
+ parameters: dict[str, Any]
13
44
  required: list[str] = Field(default_factory=list)
14
45
  additionalProperties: bool | None = None # only
15
46
  # if desired, can provide a callable to run the tool
16
47
  run: Callable | None = None
17
48
 
49
+ def _is_async(self) -> bool:
50
+ return inspect.iscoroutinefunction(self.run)
51
+
18
52
  def call(self, **kwargs):
19
53
  if self.run is None:
20
54
  raise ValueError("No run function provided")
21
- return self.run(**kwargs)
22
55
 
23
- def _json_schema(self, include_additional_properties=False) -> Dict[str, Any]:
24
- return {
56
+ if self._is_async():
57
+ coro: Coroutine = self.run(**kwargs) # type: ignore[arg-type]
58
+ try:
59
+ loop = asyncio.get_running_loop()
60
+ except RuntimeError:
61
+ # no loop → safe to block
62
+ return asyncio.run(coro)
63
+ else:
64
+ # already inside a loop → schedule
65
+ return loop.create_task(coro)
66
+ else:
67
+ # plain function
68
+ return self.run(**kwargs)
69
+
70
+ async def acall(self, **kwargs):
71
+ if self.run is None:
72
+ raise ValueError("No run function provided")
73
+
74
+ if self._is_async():
75
+ return await self.run(**kwargs) # type: ignore[func-returns-value]
76
+ else:
77
+ loop = asyncio.get_running_loop()
78
+ assert self.run is not None, "can't run None"
79
+ return await loop.run_in_executor(None, lambda: self.run(**kwargs)) # type: ignore
80
+
81
+ @classmethod
82
+ def from_function(cls, func: Callable) -> "Tool":
83
+ """Create a Tool from a function using introspection."""
84
+ # Get function name
85
+ name = func.__name__
86
+
87
+ # Get docstring for description
88
+ description = func.__doc__ or f"Call the {name} function"
89
+ description = description.strip()
90
+
91
+ # Get function signature and type hints
92
+ sig = inspect.signature(func)
93
+ type_hints = get_type_hints(func)
94
+
95
+ # Build parameters and required list
96
+ parameters = {}
97
+ required = []
98
+
99
+ for param_name, param in sig.parameters.items():
100
+ # Skip *args and **kwargs
101
+ if param.kind in (param.VAR_POSITIONAL, param.VAR_KEYWORD):
102
+ continue
103
+
104
+ # Get type hint
105
+ param_type = type_hints.get(param_name, str)
106
+
107
+ # Convert Python types to JSON Schema types
108
+ json_type = cls._python_type_to_json_schema(param_type)
109
+
110
+ parameters[param_name] = json_type
111
+
112
+ # Add to required if no default value
113
+ if param.default is param.empty:
114
+ required.append(param_name)
115
+
116
+ return cls(
117
+ name=name,
118
+ description=description,
119
+ parameters=parameters,
120
+ required=required,
121
+ run=func,
122
+ )
123
+
124
+ @classmethod
125
+ async def from_mcp_config(
126
+ cls,
127
+ config: dict[str, Any],
128
+ *,
129
+ timeout: float | None = None,
130
+ ) -> list["Tool"]:
131
+ """
132
+ config: full Claude-Desktop-style dict *or* just its "mcpServers" block
133
+ Returns {server_key: [Tool, …], …}
134
+ """
135
+ # allow caller to pass either the whole desktop file or just the sub-dict
136
+ servers_block = config.get("mcpServers", config)
137
+
138
+ # FastMCP understands the whole config dict directly
139
+ client = Client({"mcpServers": servers_block}, timeout=timeout)
140
+ async with client:
141
+ all_tools = await _load_all_mcp_tools(client)
142
+
143
+ # bucket by prefix that FastMCP added (serverkey_toolname)
144
+ return all_tools
145
+
146
+ @classmethod
147
+ async def from_mcp(
148
+ cls,
149
+ server_name: str,
150
+ *,
151
+ tool_name: str,
152
+ timeout: float | None = None,
153
+ **server_spec, # url="…" OR command="…" args=[…]
154
+ ) -> Any: # Tool | list[Tool]
155
+ """
156
+ Thin wrapper for one server. Example uses:
157
+
158
+ Tool.from_mcp(url="https://weather.example.com/mcp")
159
+ Tool.from_mcp(command="python", args=["./assistant.py"], tool_name="answer_question")
160
+ """
161
+ # ensure at least one of command or url is defined
162
+ if not (server_spec.get("url") or server_spec.get("command")):
163
+ raise ValueError("most provide url or command")
164
+ # build a one-server desktop-style dict
165
+ cfg = {server_name: server_spec}
166
+ tools = await cls.from_mcp_config(cfg, timeout=timeout)
167
+ if tool_name is None:
168
+ return tools
169
+ for t in tools:
170
+ if t.name.endswith(f"{tool_name}"): # prefixed by FastMCP
171
+ return t
172
+ raise ValueError(f"Tool '{tool_name}' not found on that server")
173
+
174
+ @staticmethod
175
+ def _tool_from_meta(meta: dict[str, Any], runner) -> "Tool":
176
+ props = meta["inputSchema"].get("properties", {})
177
+ req = meta["inputSchema"].get("required", [])
178
+ addl = meta["inputSchema"].get("additionalProperties")
179
+ return Tool(
180
+ name=meta["name"],
181
+ description=meta.get("description", ""),
182
+ parameters=props,
183
+ required=req,
184
+ additionalProperties=addl,
185
+ run=runner,
186
+ )
187
+
188
+ @staticmethod
189
+ def _python_type_to_json_schema(python_type: type) -> dict[str, Any]:
190
+ """Convert Python type to JSON Schema type definition."""
191
+ if python_type is int:
192
+ return {"type": "integer"}
193
+ elif python_type is float:
194
+ return {"type": "number"}
195
+ elif python_type is str:
196
+ return {"type": "string"}
197
+ elif python_type is bool:
198
+ return {"type": "boolean"}
199
+ elif python_type is list:
200
+ return {"type": "array"}
201
+ elif python_type is dict:
202
+ return {"type": "object"}
203
+ else:
204
+ # Default to string for unknown types
205
+ return {"type": "string"}
206
+
207
+ def _json_schema(self, include_additional_properties=False) -> dict[str, Any]:
208
+ res = {
25
209
  "type": "object",
26
210
  "properties": self.parameters,
27
- "required": self.required or [],
28
- **(
29
- {"additionalProperties": self.additionalProperties}
30
- if self.additionalProperties is not None
31
- and include_additional_properties
32
- else {}
33
- ),
211
+ "required": self.required, # Use the tool's actual required list
34
212
  }
213
+ if include_additional_properties:
214
+ res["additionalProperties"] = False
215
+
216
+ return res
35
217
 
36
218
  # ---------- dumpers ----------
37
- def for_openai_responses(self) -> Dict[str, Any]:
219
+ def for_openai_responses(self) -> dict[str, Any]:
38
220
  return {
39
221
  "type": "function",
40
222
  "name": self.name,
@@ -42,25 +224,35 @@ class ToolSpec(BaseModel):
42
224
  "parameters": self._json_schema(include_additional_properties=True),
43
225
  }
44
226
 
45
- def for_openai_completions(self, *, strict: bool = True) -> Dict[str, Any]:
227
+ def for_openai_completions(self, *, strict: bool = True) -> dict[str, Any]:
228
+ if strict:
229
+ # For strict mode, all parameters must be required and additionalProperties must be false
230
+ schema = self._json_schema(include_additional_properties=True)
231
+ schema["required"] = list(
232
+ self.parameters.keys()
233
+ ) # All parameters required in strict mode
234
+ else:
235
+ # For non-strict mode, use the original required list
236
+ schema = self._json_schema(include_additional_properties=True)
237
+
46
238
  return {
47
239
  "type": "function",
48
240
  "function": {
49
241
  "name": self.name,
50
242
  "description": self.description,
51
- "parameters": self._json_schema(),
243
+ "parameters": schema,
52
244
  "strict": strict,
53
245
  },
54
246
  }
55
247
 
56
- def for_anthropic(self) -> Dict[str, Any]:
248
+ def for_anthropic(self) -> dict[str, Any]:
57
249
  return {
58
250
  "name": self.name,
59
251
  "description": self.description,
60
252
  "input_schema": self._json_schema(),
61
253
  }
62
254
 
63
- def for_google(self) -> Dict[str, Any]:
255
+ def for_google(self) -> dict[str, Any]:
64
256
  """
65
257
  Shape used by google.genai docs.
66
258
  """
@@ -76,7 +268,7 @@ class ToolSpec(BaseModel):
76
268
  "openai-responses", "openai-completions", "anthropic", "google"
77
269
  ],
78
270
  **kw,
79
- ) -> Dict[str, Any]:
271
+ ) -> dict[str, Any]:
80
272
  if provider == "openai-responses":
81
273
  return self.for_openai_responses()
82
274
  if provider == "openai-completions":
@@ -86,21 +278,3 @@ class ToolSpec(BaseModel):
86
278
  if provider == "google":
87
279
  return self.for_google()
88
280
  raise ValueError(provider)
89
-
90
-
91
- # ---- computer tools (for non-CUA models) ----
92
- _BUTTONS = ["left", "right", "wheel", "back", "forward"]
93
-
94
- # --- helpers ----
95
- _COORD_OBJECT = {
96
- "type": "object",
97
- "properties": {
98
- "x": {"type": "integer", "description": "X-coordinate in pixels"},
99
- "y": {"type": "integer", "description": "Y-coordinate in pixels"},
100
- },
101
- "required": ["x", "y"],
102
- }
103
-
104
-
105
- def _coord_field(desc: str):
106
- return {"type": "integer", "description": desc}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.8
3
+ Version: 0.0.10
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
@@ -21,6 +21,7 @@ Requires-Dist: bs4
21
21
  Requires-Dist: lxml
22
22
  Requires-Dist: pdf2image
23
23
  Requires-Dist: pillow
24
+ Requires-Dist: fastmcp>=2.4
24
25
  Requires-Dist: fasttext-wheel
25
26
  Requires-Dist: fasttext-langdetect
26
27
  Dynamic: license-file
@@ -32,6 +33,8 @@ Dynamic: license-file
32
33
  - **Unified client** – Send prompts to all relevant models with a single client.
33
34
  - **Massive concurrency with throttling** – Set `max_tokens_per_minute` and `max_requests_per_minute` and let it fly. The client will process as many requests as possible while respecting rate limits and retrying failures.
34
35
  - **Spray across models/providers** – Configure a client with multiple models from any provider(s), and sampling weights. The client samples a model for each request.
36
+ - **Tool Use** – Unified API for defining tools for all providers, and creating tools automatically from python functions.
37
+ - **MCP Support** – Instantiate a `Tool` from a local or remote MCP server so that any LLM can use it, whether or not that provider natively supports MCP.
35
38
  - **Caching** – Save completions in a local or distributed cache to avoid repeated LLM calls to process the same input.
36
39
  - **Convenient message constructor** – No more looking up how to build an Anthropic messages list with images. Our `Conversation` and `Message` classes work great with our client or with the `openai` and `anthropic` packages.
37
40
  - **Sync and async APIs** – Use the client from sync or async code.
@@ -1,22 +1,23 @@
1
1
  lm_deluge/__init__.py,sha256=rndOr4Rcfnpttz-onWU3vVEm-MM0WDFgz6KexKPAx0k,222
2
2
  lm_deluge/cache.py,sha256=VB1kv8rM2t5XWPR60uhszFcxLDnVKOe1oA5hYjVDjIo,4375
3
- lm_deluge/client.py,sha256=ERH0SkNvdM1zc8HYS5dxRGxVxUb4CXpUhveG3mz-w2I,28533
3
+ lm_deluge/client.py,sha256=lGD4rqT7qHkTKddjRvKK_1bh7s8GNIzXzQ52GCZhfCg,28932
4
4
  lm_deluge/embed.py,sha256=m-X8UK4gV9KKD7Wv3yarAceMQaj7gR1JwzD_sB0MOQY,13183
5
5
  lm_deluge/errors.py,sha256=oHjt7YnxWbh-eXMScIzov4NvpJMo0-2r5J6Wh5DQ1tk,209
6
6
  lm_deluge/gemini_limits.py,sha256=V9mpS9JtXYz7AY6OuKyQp5TuIMRH1BVv9YrSNmGmHNA,1569
7
7
  lm_deluge/image.py,sha256=hFbRajqEVQbkirAfOxsTPkeq-27Zl-so4AWBFeUbpBI,7161
8
- lm_deluge/models.py,sha256=w_OqA4Jxcy8LCCcdPRsGzg8iLFv4S9fPS5b4oj82Bgs,42778
9
- lm_deluge/prompt.py,sha256=bhDAlfUQ_Fq6Wh-L9jOfoiMbDGyVKGkjGicnwKJWpcI,12680
8
+ lm_deluge/models.py,sha256=6c_UZ3KlygpHpF0nq1_MRLtgOBdB1Q6FffLgm4ye_t0,44999
9
+ lm_deluge/prompt.py,sha256=_pJYwgjL39lDzMNmae8pPIBoORm_ekSM_9qU2iGGpOc,25445
10
10
  lm_deluge/rerank.py,sha256=tW1c3gQCAqaF8Ez-r-4qxYAcdKqxnLMxwHApKOUKwk4,11289
11
11
  lm_deluge/sampling_params.py,sha256=E2kewh1vz-1Qcy5xNBCzihfGgT_GcHYMfzaWb3FLiXs,739
12
- lm_deluge/tool.py,sha256=RVUW3E3FW11jCM-R7pIL1GpRs1YKCOjvTkL1D5xPetk,3196
12
+ lm_deluge/tool.py,sha256=3hlOTdm-RJMGHOU2tI_quJa2UNIrXPT8hxGb3mnheAg,9462
13
13
  lm_deluge/tracker.py,sha256=Dk99scN_NeDEO0gkLO5efXiZq11Ga-k6cerUHWN7IWY,1292
14
14
  lm_deluge/api_requests/__init__.py,sha256=_aSpD6CJL9g6OpLPoChXiHjl4MH_OlGcKgfZaW8cgLM,71
15
- lm_deluge/api_requests/anthropic.py,sha256=URbiD-ANn_P3StFJVP2JoDWuoloZVsAUly8CGSyV2Kw,6618
16
- lm_deluge/api_requests/base.py,sha256=Yt5Bxd5C5mZrbAMQYDghk0KRhUChSbTEsVI8DoThZBs,14805
17
- lm_deluge/api_requests/common.py,sha256=EjwTnKrvgBx-HnRVt0kSJZ9RM7CM-QyhlIQkr1jxP-4,220
18
- lm_deluge/api_requests/mistral.py,sha256=ThlV1jBfhpAwkaqPKhdUq-lIq2OienRbhEaSK4cctvI,5370
19
- lm_deluge/api_requests/openai.py,sha256=YgJMUio23ks6VLv6BDBZAW6Bnfd2fpidSidaHXzyXFY,6135
15
+ lm_deluge/api_requests/anthropic.py,sha256=MMI_w9hVbevQpcqP3NVVindpTmLb2KHqjJQpIzCi5RM,7240
16
+ lm_deluge/api_requests/base.py,sha256=w0MEOCIccxxy2c67Y2Y-QBox9rinIxQ7MLnp8953sjQ,15954
17
+ lm_deluge/api_requests/bedrock.py,sha256=cvB85BFvL9HKTUsP9qFUCLQzJh83IQNAcLXuW6ReZK8,10520
18
+ lm_deluge/api_requests/common.py,sha256=U0mX_wC3Tzg2-1u9nYUCTQqYzuYJqvLrICCNW_dbbJM,287
19
+ lm_deluge/api_requests/mistral.py,sha256=gCi4R61oh759ZX6TKrT-fnQwIQaOGcPXhWrDsjJwPOY,5388
20
+ lm_deluge/api_requests/openai.py,sha256=BuMiM_2zJQXfnUjTT94JxJi3ZX5V-KQQueRG-R0SGuc,7361
20
21
  lm_deluge/api_requests/deprecated/bedrock.py,sha256=WrcIShCoO8JCUSlFOCHxg6KQCNTZfw3TpYTvSpYk4mA,11320
21
22
  lm_deluge/api_requests/deprecated/cohere.py,sha256=KgDScD6_bWhAzOY5BHZQKSA3kurt4KGENqC4wLsGmcU,5142
22
23
  lm_deluge/api_requests/deprecated/deepseek.py,sha256=FEApI93VAWDwuaqTooIyKMgONYqRhdUmiAPBRme-IYs,4582
@@ -30,8 +31,8 @@ lm_deluge/util/json.py,sha256=dCeG9j1D17rXmQJbKJH79X0CGof4Wlqd55TDg4D6ky8,5388
30
31
  lm_deluge/util/logprobs.py,sha256=UkBZakOxWluaLqHrjARu7xnJ0uCHVfLGHJdnYlEcutk,11768
31
32
  lm_deluge/util/validation.py,sha256=hz5dDb3ebvZrZhnaWxOxbNSVMI6nmaOODBkk0htAUhs,1575
32
33
  lm_deluge/util/xml.py,sha256=Ft4zajoYBJR3HHCt2oHwGfymGLdvp_gegVmJ-Wqk4Ck,10547
33
- lm_deluge-0.0.8.dist-info/licenses/LICENSE,sha256=uNNXGXPCw2TC7CUs7SEBkA-Mz6QBQFWUUEWDMgEs1dU,1058
34
- lm_deluge-0.0.8.dist-info/METADATA,sha256=sRRWcI9rQ0BlCENlRF6EdY-eJY-p9CTAFmak8tstGOM,8076
35
- lm_deluge-0.0.8.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
36
- lm_deluge-0.0.8.dist-info/top_level.txt,sha256=hqU-TJX93yBwpgkDtYcXyLr3t7TLSCCZ_reytJjwBaE,10
37
- lm_deluge-0.0.8.dist-info/RECORD,,
34
+ lm_deluge-0.0.10.dist-info/licenses/LICENSE,sha256=uNNXGXPCw2TC7CUs7SEBkA-Mz6QBQFWUUEWDMgEs1dU,1058
35
+ lm_deluge-0.0.10.dist-info/METADATA,sha256=hn8Arn1L8N9PDaPJzZtnDB9WMVZsm1Ur7suIq3jYvZs,8387
36
+ lm_deluge-0.0.10.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
37
+ lm_deluge-0.0.10.dist-info/top_level.txt,sha256=hqU-TJX93yBwpgkDtYcXyLr3t7TLSCCZ_reytJjwBaE,10
38
+ lm_deluge-0.0.10.dist-info/RECORD,,