lm-deluge 0.0.71__tar.gz → 0.0.72__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lm_deluge-0.0.71/src/lm_deluge.egg-info → lm_deluge-0.0.72}/PKG-INFO +1 -1
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/pyproject.toml +1 -1
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/base.py +34 -11
- {lm_deluge-0.0.71 → lm_deluge-0.0.72/src/lm_deluge.egg-info}/PKG-INFO +1 -1
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/LICENSE +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/README.md +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/setup.cfg +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/__init__.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/__init__.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/anthropic.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/bedrock.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/chat_reasoning.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/common.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/deprecated/bedrock.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/deprecated/cohere.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/deprecated/deepseek.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/deprecated/mistral.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/deprecated/vertex.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/gemini.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/mistral.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/openai.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/api_requests/response.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/batches.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/built_in_tools/anthropic/__init__.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/built_in_tools/anthropic/bash.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/built_in_tools/anthropic/computer_use.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/built_in_tools/anthropic/editor.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/built_in_tools/base.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/built_in_tools/openai.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/cache.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/cli.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/client.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/config.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/embed.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/errors.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/file.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/image.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/llm_tools/__init__.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/llm_tools/classify.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/llm_tools/extract.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/llm_tools/locate.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/llm_tools/ocr.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/llm_tools/score.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/llm_tools/translate.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/mock_openai.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/__init__.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/anthropic.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/bedrock.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/cerebras.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/cohere.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/deepseek.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/fireworks.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/google.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/grok.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/groq.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/kimi.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/meta.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/minimax.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/mistral.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/openai.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/openrouter.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/models/together.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/presets/cerebras.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/presets/meta.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/prompt.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/request_context.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/rerank.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/tool.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/tracker.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/usage.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/util/harmony.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/util/json.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/util/logprobs.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/util/spatial.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/util/validation.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/util/xml.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/warnings.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge.egg-info/SOURCES.txt +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge.egg-info/dependency_links.txt +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge.egg-info/requires.txt +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge.egg-info/top_level.txt +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/tests/test_builtin_tools.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/tests/test_file_upload.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/tests/test_mock_openai.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/tests/test_native_mcp_server.py +0 -0
- {lm_deluge-0.0.71 → lm_deluge-0.0.72}/tests/test_openrouter_generic.py +0 -0
|
@@ -90,9 +90,32 @@ class APIRequestBase(ABC):
|
|
|
90
90
|
start -> poll -> result style of request.
|
|
91
91
|
"""
|
|
92
92
|
assert self.context.status_tracker, "no status tracker"
|
|
93
|
-
|
|
93
|
+
poll_interval = 5.0
|
|
94
|
+
attempt_start = time.monotonic()
|
|
95
|
+
deadline = attempt_start + self.context.request_timeout
|
|
96
|
+
response_id: str | None = None
|
|
97
|
+
last_status: str | None = None
|
|
98
|
+
|
|
94
99
|
async with aiohttp.ClientSession() as session:
|
|
95
|
-
|
|
100
|
+
|
|
101
|
+
async def cancel_response(reason: str) -> None:
|
|
102
|
+
nonlocal response_id
|
|
103
|
+
if not response_id:
|
|
104
|
+
return
|
|
105
|
+
cancel_url = f"{self.url}/{response_id}/cancel"
|
|
106
|
+
try:
|
|
107
|
+
async with session.post(
|
|
108
|
+
url=cancel_url,
|
|
109
|
+
headers=self.request_header,
|
|
110
|
+
) as cancel_response:
|
|
111
|
+
cancel_response.raise_for_status()
|
|
112
|
+
print(f"Background req {response_id} cancelled: {reason}")
|
|
113
|
+
except (
|
|
114
|
+
Exception
|
|
115
|
+
) as cancel_err: # pragma: no cover - best effort logging
|
|
116
|
+
print(
|
|
117
|
+
f"Failed to cancel background req {response_id}: {cancel_err}"
|
|
118
|
+
)
|
|
96
119
|
|
|
97
120
|
try:
|
|
98
121
|
self.context.status_tracker.total_requests += 1
|
|
@@ -109,14 +132,11 @@ class APIRequestBase(ABC):
|
|
|
109
132
|
last_status = data["status"]
|
|
110
133
|
|
|
111
134
|
while True:
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
) as http_response:
|
|
118
|
-
http_response.raise_for_status()
|
|
119
|
-
|
|
135
|
+
now = time.monotonic()
|
|
136
|
+
remaining = deadline - now
|
|
137
|
+
if remaining <= 0:
|
|
138
|
+
elapsed = now - attempt_start
|
|
139
|
+
await cancel_response(f"timed out after {elapsed:.1f}s")
|
|
120
140
|
return APIResponse(
|
|
121
141
|
id=self.context.task_id,
|
|
122
142
|
model_internal=self.context.model_name,
|
|
@@ -128,8 +148,9 @@ class APIRequestBase(ABC):
|
|
|
128
148
|
content=None,
|
|
129
149
|
usage=None,
|
|
130
150
|
)
|
|
151
|
+
|
|
131
152
|
# poll for the response
|
|
132
|
-
await asyncio.sleep(
|
|
153
|
+
await asyncio.sleep(min(poll_interval, max(remaining, 0)))
|
|
133
154
|
async with session.get(
|
|
134
155
|
url=f"{self.url}/{response_id}",
|
|
135
156
|
headers=self.request_header,
|
|
@@ -146,6 +167,8 @@ class APIRequestBase(ABC):
|
|
|
146
167
|
return await self.handle_response(http_response)
|
|
147
168
|
|
|
148
169
|
except Exception as e:
|
|
170
|
+
if response_id:
|
|
171
|
+
await cancel_response(f"errored: {type(e).__name__}")
|
|
149
172
|
raise_if_modal_exception(e)
|
|
150
173
|
tb = traceback.format_exc()
|
|
151
174
|
print(tb)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{lm_deluge-0.0.71 → lm_deluge-0.0.72}/src/lm_deluge/built_in_tools/anthropic/computer_use.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|