lm-deluge 0.0.65__tar.gz → 0.0.66__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lm-deluge might be problematic. Click here for more details.

Files changed (81) hide show
  1. {lm_deluge-0.0.65/src/lm_deluge.egg-info → lm_deluge-0.0.66}/PKG-INFO +1 -1
  2. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/pyproject.toml +1 -1
  3. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/anthropic.py +3 -0
  4. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/openai.py +3 -2
  5. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/prompt.py +12 -0
  6. {lm_deluge-0.0.65 → lm_deluge-0.0.66/src/lm_deluge.egg-info}/PKG-INFO +1 -1
  7. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/LICENSE +0 -0
  8. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/README.md +0 -0
  9. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/setup.cfg +0 -0
  10. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/__init__.py +0 -0
  11. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/__init__.py +0 -0
  12. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/base.py +0 -0
  13. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/bedrock.py +0 -0
  14. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/common.py +0 -0
  15. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/deprecated/bedrock.py +0 -0
  16. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/deprecated/cohere.py +0 -0
  17. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/deprecated/deepseek.py +0 -0
  18. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/deprecated/mistral.py +0 -0
  19. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/deprecated/vertex.py +0 -0
  20. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/gemini.py +0 -0
  21. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/mistral.py +0 -0
  22. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/api_requests/response.py +0 -0
  23. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/batches.py +0 -0
  24. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/built_in_tools/anthropic/__init__.py +0 -0
  25. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/built_in_tools/anthropic/bash.py +0 -0
  26. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/built_in_tools/anthropic/computer_use.py +0 -0
  27. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/built_in_tools/anthropic/editor.py +0 -0
  28. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/built_in_tools/base.py +0 -0
  29. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/built_in_tools/openai.py +0 -0
  30. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/cache.py +0 -0
  31. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/cli.py +0 -0
  32. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/client.py +0 -0
  33. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/config.py +0 -0
  34. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/embed.py +0 -0
  35. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/errors.py +0 -0
  36. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/file.py +0 -0
  37. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/image.py +0 -0
  38. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/llm_tools/__init__.py +0 -0
  39. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/llm_tools/classify.py +0 -0
  40. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/llm_tools/extract.py +0 -0
  41. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/llm_tools/locate.py +0 -0
  42. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/llm_tools/ocr.py +0 -0
  43. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/llm_tools/score.py +0 -0
  44. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/llm_tools/translate.py +0 -0
  45. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/__init__.py +0 -0
  46. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/anthropic.py +0 -0
  47. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/bedrock.py +0 -0
  48. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/cerebras.py +0 -0
  49. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/cohere.py +0 -0
  50. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/deepseek.py +0 -0
  51. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/fireworks.py +0 -0
  52. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/google.py +0 -0
  53. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/grok.py +0 -0
  54. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/groq.py +0 -0
  55. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/meta.py +0 -0
  56. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/mistral.py +0 -0
  57. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/openai.py +0 -0
  58. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/openrouter.py +0 -0
  59. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/models/together.py +0 -0
  60. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/presets/cerebras.py +0 -0
  61. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/presets/meta.py +0 -0
  62. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/request_context.py +0 -0
  63. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/rerank.py +0 -0
  64. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/tool.py +0 -0
  65. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/tracker.py +0 -0
  66. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/usage.py +0 -0
  67. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/util/harmony.py +0 -0
  68. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/util/json.py +0 -0
  69. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/util/logprobs.py +0 -0
  70. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/util/spatial.py +0 -0
  71. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/util/validation.py +0 -0
  72. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/util/xml.py +0 -0
  73. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge/warnings.py +0 -0
  74. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge.egg-info/SOURCES.txt +0 -0
  75. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge.egg-info/dependency_links.txt +0 -0
  76. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge.egg-info/requires.txt +0 -0
  77. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/src/lm_deluge.egg-info/top_level.txt +0 -0
  78. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/tests/test_builtin_tools.py +0 -0
  79. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/tests/test_file_upload.py +0 -0
  80. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/tests/test_native_mcp_server.py +0 -0
  81. {lm_deluge-0.0.65 → lm_deluge-0.0.66}/tests/test_openrouter_generic.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.65
3
+ Version: 0.0.66
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
@@ -3,7 +3,7 @@ requires = ["setuptools", "wheel"]
3
3
 
4
4
  [project]
5
5
  name = "lm_deluge"
6
- version = "0.0.65"
6
+ version = "0.0.66"
7
7
  authors = [{ name = "Benjamin Anderson", email = "ben@trytaylor.ai" }]
8
8
  description = "Python utility for using LLM API models."
9
9
  readme = "README.md"
@@ -90,6 +90,9 @@ def _build_anthropic_request(
90
90
  for tool in tools:
91
91
  if isinstance(tool, Tool):
92
92
  tool_definitions.append(tool.dump_for("anthropic"))
93
+ elif isinstance(tool, dict) and "url" in tool:
94
+ _add_beta(base_headers, "mcp-client-2025-04-04")
95
+ mcp_servers.append(tool)
93
96
  elif isinstance(tool, dict):
94
97
  tool_definitions.append(tool)
95
98
  # add betas if needed
@@ -184,7 +184,8 @@ class OpenAIRequest(APIRequestBase):
184
184
 
185
185
  content = Message("assistant", parts)
186
186
 
187
- usage = Usage.from_openai_usage(data["usage"])
187
+ if "usage" in data and data["usage"] is not None:
188
+ usage = Usage.from_openai_usage(data["usage"])
188
189
  if (
189
190
  self.context.sampling_params.logprobs
190
191
  and "logprobs" in data["choices"][0]
@@ -472,7 +473,7 @@ class OpenAIResponsesRequest(APIRequestBase):
472
473
  content = Message("assistant", parts)
473
474
 
474
475
  # Extract usage information
475
- if "usage" in data:
476
+ if "usage" in data and data["usage"] is not None:
476
477
  usage = Usage.from_openai_usage(data["usage"])
477
478
 
478
479
  except Exception as e:
@@ -329,6 +329,18 @@ class Message:
329
329
  """Get all thinking parts with proper typing."""
330
330
  return [part for part in self.parts if part.type == "thinking"] # type: ignore
331
331
 
332
+ # @staticmethod
333
+ # def dump_part(part: Part):
334
+ # if isinstance(value, Text):
335
+ # return {"type": "text", "text": value.text}
336
+ # if isinstance(value, Image):
337
+ # w, h = value.size
338
+ # return {"type": "image", "tag": f"<Image ({w}×{h})>"}
339
+ # if isinstance(value, File):
340
+ # size = value.size
341
+ # return {"type": "file", "tag": f"<File ({size} bytes)>"}
342
+ # return repr(value)
343
+
332
344
  def to_log(self) -> dict:
333
345
  """
334
346
  Return a JSON-serialisable dict that fully captures the message.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.65
3
+ Version: 0.0.66
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
File without changes
File without changes
File without changes