lm-deluge 0.0.30__tar.gz → 0.0.31__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lm-deluge might be problematic. Click here for more details.

Files changed (62) hide show
  1. {lm_deluge-0.0.30/src/lm_deluge.egg-info → lm_deluge-0.0.31}/PKG-INFO +1 -1
  2. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/pyproject.toml +1 -1
  3. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/bedrock.py +4 -1
  4. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/file.py +4 -1
  5. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/models.py +1 -1
  6. {lm_deluge-0.0.30 → lm_deluge-0.0.31/src/lm_deluge.egg-info}/PKG-INFO +1 -1
  7. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/LICENSE +0 -0
  8. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/README.md +0 -0
  9. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/setup.cfg +0 -0
  10. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/__init__.py +0 -0
  11. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/agent.py +0 -0
  12. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/__init__.py +0 -0
  13. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/anthropic.py +0 -0
  14. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/base.py +0 -0
  15. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/common.py +0 -0
  16. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/deprecated/bedrock.py +0 -0
  17. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/deprecated/cohere.py +0 -0
  18. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/deprecated/deepseek.py +0 -0
  19. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/deprecated/mistral.py +0 -0
  20. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/deprecated/vertex.py +0 -0
  21. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/gemini.py +0 -0
  22. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/mistral.py +0 -0
  23. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/openai.py +0 -0
  24. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/api_requests/response.py +0 -0
  25. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/batches.py +0 -0
  26. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/built_in_tools/anthropic/__init__.py +0 -0
  27. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/built_in_tools/anthropic/bash.py +0 -0
  28. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/built_in_tools/anthropic/computer_use.py +0 -0
  29. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/built_in_tools/anthropic/editor.py +0 -0
  30. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/built_in_tools/base.py +0 -0
  31. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/built_in_tools/openai.py +0 -0
  32. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/cache.py +0 -0
  33. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/client.py +0 -0
  34. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/config.py +0 -0
  35. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/embed.py +0 -0
  36. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/errors.py +0 -0
  37. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/gemini_limits.py +0 -0
  38. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/image.py +0 -0
  39. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/llm_tools/__init__.py +0 -0
  40. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/llm_tools/classify.py +0 -0
  41. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/llm_tools/extract.py +0 -0
  42. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/llm_tools/locate.py +0 -0
  43. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/llm_tools/ocr.py +0 -0
  44. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/llm_tools/score.py +0 -0
  45. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/llm_tools/translate.py +0 -0
  46. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/prompt.py +0 -0
  47. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/request_context.py +0 -0
  48. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/rerank.py +0 -0
  49. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/tool.py +0 -0
  50. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/tracker.py +0 -0
  51. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/usage.py +0 -0
  52. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/util/json.py +0 -0
  53. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/util/logprobs.py +0 -0
  54. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/util/spatial.py +0 -0
  55. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/util/validation.py +0 -0
  56. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge/util/xml.py +0 -0
  57. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge.egg-info/SOURCES.txt +0 -0
  58. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge.egg-info/dependency_links.txt +0 -0
  59. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge.egg-info/requires.txt +0 -0
  60. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/src/lm_deluge.egg-info/top_level.txt +0 -0
  61. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/tests/test_builtin_tools.py +0 -0
  62. {lm_deluge-0.0.30 → lm_deluge-0.0.31}/tests/test_native_mcp_server.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.30
3
+ Version: 0.0.31
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
@@ -3,7 +3,7 @@ requires = ["setuptools", "wheel"]
3
3
 
4
4
  [project]
5
5
  name = "lm_deluge"
6
- version = "0.0.30"
6
+ version = "0.0.31"
7
7
  authors = [{ name = "Benjamin Anderson", email = "ben@trytaylor.ai" }]
8
8
  description = "Python utility for using LLM API models."
9
9
  readme = "README.md"
@@ -132,7 +132,7 @@ async def _build_anthropic_bedrock_request(
132
132
  if len(mcp_servers) > 0:
133
133
  request_json["mcp_servers"] = mcp_servers
134
134
 
135
- return request_json, base_headers, auth, url
135
+ return request_json, base_headers, auth, url, region
136
136
 
137
137
 
138
138
  class BedrockRequest(APIRequestBase):
@@ -140,6 +140,7 @@ class BedrockRequest(APIRequestBase):
140
140
  super().__init__(context=context)
141
141
 
142
142
  self.model = APIModel.from_registry(self.context.model_name)
143
+ self.region = None # Will be set during build_request
143
144
 
144
145
  async def build_request(self):
145
146
  self.url = f"{self.model.api_base}/messages"
@@ -153,6 +154,7 @@ class BedrockRequest(APIRequestBase):
153
154
  base_headers,
154
155
  self.auth,
155
156
  self.url,
157
+ self.region,
156
158
  ) = await _build_anthropic_bedrock_request(self.model, self.context)
157
159
  self.request_header = self.merge_headers(
158
160
  base_headers, exclude_patterns=["anthropic", "openai", "gemini", "mistral"]
@@ -160,6 +162,7 @@ class BedrockRequest(APIRequestBase):
160
162
 
161
163
  async def execute_once(self) -> APIResponse:
162
164
  """Override execute_once to handle AWS4Auth signing."""
165
+ await self.build_request()
163
166
  import aiohttp
164
167
 
165
168
  assert self.context.status_tracker
@@ -37,7 +37,10 @@ class File:
37
37
  header, encoded = self.data.split(",", 1)
38
38
  return base64.b64decode(encoded)
39
39
  else:
40
- raise ValueError("unreadable file format")
40
+ err = f"unreadable file. self.data type: {type(self.data)}"
41
+ if isinstance(self.data, str) and len(self.data) < 1_000:
42
+ err += f". self.data: {len(self.data)}"
43
+ raise ValueError(err)
41
44
 
42
45
  def _mime(self) -> str:
43
46
  if self.media_type:
@@ -28,7 +28,7 @@ BUILTIN_MODELS = {
28
28
  "reasoning_model": False,
29
29
  },
30
30
  "llama-4-maverick": {
31
- "id": "llama-4-scout",
31
+ "id": "llama-4-maverick",
32
32
  "name": "Llama-4-Maverick-17B-128E-Instruct-FP8",
33
33
  "api_base": "https://api.llama.com/compat/v1",
34
34
  "api_key_env_var": "META_API_KEY",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.30
3
+ Version: 0.0.31
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
File without changes
File without changes
File without changes