lm-deluge 0.0.52__tar.gz → 0.0.53__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lm-deluge might be problematic. Click here for more details.

Files changed (80) hide show
  1. {lm_deluge-0.0.52/src/lm_deluge.egg-info → lm_deluge-0.0.53}/PKG-INFO +1 -1
  2. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/pyproject.toml +1 -1
  3. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/anthropic.py +10 -1
  4. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/client.py +5 -4
  5. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/anthropic.py +12 -0
  6. {lm_deluge-0.0.52 → lm_deluge-0.0.53/src/lm_deluge.egg-info}/PKG-INFO +1 -1
  7. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/LICENSE +0 -0
  8. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/README.md +0 -0
  9. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/setup.cfg +0 -0
  10. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/__init__.py +0 -0
  11. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/agent.py +0 -0
  12. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/__init__.py +0 -0
  13. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/base.py +0 -0
  14. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/bedrock.py +0 -0
  15. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/common.py +0 -0
  16. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/deprecated/bedrock.py +0 -0
  17. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/deprecated/cohere.py +0 -0
  18. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/deprecated/deepseek.py +0 -0
  19. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/deprecated/mistral.py +0 -0
  20. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/deprecated/vertex.py +0 -0
  21. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/gemini.py +0 -0
  22. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/mistral.py +0 -0
  23. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/openai.py +0 -0
  24. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/api_requests/response.py +0 -0
  25. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/batches.py +0 -0
  26. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/built_in_tools/anthropic/__init__.py +0 -0
  27. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/built_in_tools/anthropic/bash.py +0 -0
  28. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/built_in_tools/anthropic/computer_use.py +0 -0
  29. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/built_in_tools/anthropic/editor.py +0 -0
  30. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/built_in_tools/base.py +0 -0
  31. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/built_in_tools/openai.py +0 -0
  32. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/cache.py +0 -0
  33. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/cli.py +0 -0
  34. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/config.py +0 -0
  35. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/embed.py +0 -0
  36. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/errors.py +0 -0
  37. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/file.py +0 -0
  38. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/gemini_limits.py +0 -0
  39. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/image.py +0 -0
  40. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/llm_tools/__init__.py +0 -0
  41. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/llm_tools/classify.py +0 -0
  42. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/llm_tools/extract.py +0 -0
  43. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/llm_tools/locate.py +0 -0
  44. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/llm_tools/ocr.py +0 -0
  45. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/llm_tools/score.py +0 -0
  46. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/llm_tools/translate.py +0 -0
  47. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/__init__.py +0 -0
  48. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/bedrock.py +0 -0
  49. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/cerebras.py +0 -0
  50. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/cohere.py +0 -0
  51. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/deepseek.py +0 -0
  52. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/fireworks.py +0 -0
  53. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/google.py +0 -0
  54. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/grok.py +0 -0
  55. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/groq.py +0 -0
  56. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/meta.py +0 -0
  57. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/mistral.py +0 -0
  58. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/openai.py +0 -0
  59. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/openrouter.py +0 -0
  60. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/models/together.py +0 -0
  61. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/presets/cerebras.py +0 -0
  62. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/presets/meta.py +0 -0
  63. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/prompt.py +0 -0
  64. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/request_context.py +0 -0
  65. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/rerank.py +0 -0
  66. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/tool.py +0 -0
  67. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/tracker.py +0 -0
  68. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/usage.py +0 -0
  69. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/util/harmony.py +0 -0
  70. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/util/json.py +0 -0
  71. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/util/logprobs.py +0 -0
  72. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/util/spatial.py +0 -0
  73. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/util/validation.py +0 -0
  74. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge/util/xml.py +0 -0
  75. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge.egg-info/SOURCES.txt +0 -0
  76. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge.egg-info/dependency_links.txt +0 -0
  77. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge.egg-info/requires.txt +0 -0
  78. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/src/lm_deluge.egg-info/top_level.txt +0 -0
  79. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/tests/test_builtin_tools.py +0 -0
  80. {lm_deluge-0.0.52 → lm_deluge-0.0.53}/tests/test_native_mcp_server.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.52
3
+ Version: 0.0.53
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
@@ -3,7 +3,7 @@ requires = ["setuptools", "wheel"]
3
3
 
4
4
  [project]
5
5
  name = "lm_deluge"
6
- version = "0.0.52"
6
+ version = "0.0.53"
7
7
  authors = [{ name = "Benjamin Anderson", email = "ben@trytaylor.ai" }]
8
8
  description = "Python utility for using LLM API models."
9
9
  readme = "README.md"
@@ -60,7 +60,8 @@ def _build_anthropic_request(
60
60
  "type": "enabled",
61
61
  "budget_tokens": budget,
62
62
  }
63
- request_json.pop("top_p")
63
+ if "top_p" in request_json:
64
+ request_json["top_p"] = max(request_json["top_p"], 0.95)
64
65
  request_json["temperature"] = 1.0
65
66
  request_json["max_tokens"] += budget
66
67
  else:
@@ -70,6 +71,11 @@ def _build_anthropic_request(
70
71
  if system_message is not None:
71
72
  request_json["system"] = system_message
72
73
 
74
+ # handle temp + top_p for opus 4.1/sonnet 4.5
75
+ if model.name in ["claude-sonnet-4-5-20250929", "claude-opus-4-1-20250805"]:
76
+ if "temperature" in request_json and "top_p" in request_json:
77
+ request_json.pop("top_p")
78
+
73
79
  if tools:
74
80
  mcp_servers = []
75
81
  tool_definitions = []
@@ -89,6 +95,9 @@ def _build_anthropic_request(
89
95
  _add_beta(base_headers, "computer-use-2025-01-24")
90
96
  elif tool["type"] == "code_execution_20250522":
91
97
  _add_beta(base_headers, "code-execution-2025-05-22")
98
+ elif tool["type"] in ["memory_20250818", "clear_tool_uses_20250919"]:
99
+ _add_beta(base_headers, "context-management-2025-06-27")
100
+
92
101
  elif isinstance(tool, MCPServer):
93
102
  _add_beta(base_headers, "mcp-client-2025-04-04")
94
103
  mcp_servers.append(tool.for_anthropic())
@@ -427,8 +427,8 @@ class _LLMClient(BaseModel):
427
427
  try:
428
428
  response = await self.process_single_request(ctx, retry_queue)
429
429
  results[ctx.task_id] = response
430
- except Exception as e:
431
- # Create an error response for validation errors and other exceptions
430
+ except BaseException as exc:
431
+ # Capture cancellations and other BaseExceptions before fallback response fires.
432
432
  error_response = APIResponse(
433
433
  id=ctx.task_id,
434
434
  model_internal=ctx.model_name,
@@ -436,12 +436,13 @@ class _LLMClient(BaseModel):
436
436
  sampling_params=ctx.sampling_params,
437
437
  status_code=None,
438
438
  is_error=True,
439
- error_message=str(e),
439
+ error_message=f"{type(exc).__name__}: {exc}",
440
+ raw_response={"exception_repr": repr(exc)},
440
441
  )
441
442
  results[ctx.task_id] = error_response
442
- # Mark task as completed so the main loop can finish
443
443
  if ctx.status_tracker:
444
444
  ctx.status_tracker.task_failed(ctx.task_id)
445
+ raise
445
446
 
446
447
  task = asyncio.create_task(process_and_store(next_context))
447
448
  inflight_tasks.add(task)
@@ -10,6 +10,18 @@ ANTHROPIC_MODELS = {
10
10
  # ░███
11
11
  # █████
12
12
  #
13
+ "claude-4.5-sonnet": {
14
+ "id": "claude-4.5-sonnet",
15
+ "name": "claude-sonnet-4-5-20250929",
16
+ "api_base": "https://api.anthropic.com/v1",
17
+ "api_key_env_var": "ANTHROPIC_API_KEY",
18
+ "supports_json": False,
19
+ "api_spec": "anthropic",
20
+ "input_cost": 3.0,
21
+ "output_cost": 15.0,
22
+ "requests_per_minute": 4_000,
23
+ "tokens_per_minute": 400_000,
24
+ },
13
25
  "claude-4.1-opus": {
14
26
  "id": "claude-4.1-opus",
15
27
  "name": "claude-opus-4-1-20250805",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.52
3
+ Version: 0.0.53
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
File without changes
File without changes
File without changes