lm-deluge 0.0.60__py3-none-any.whl → 0.0.62__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lm-deluge might be problematic. Click here for more details.

@@ -42,6 +42,14 @@ def _build_anthropic_request(
42
42
  "content-type": "application/json",
43
43
  }
44
44
 
45
+ # Check if any messages contain uploaded files (file_id)
46
+ # If so, add the files-api beta header
47
+ for msg in prompt.messages:
48
+ for file in msg.files:
49
+ if file.is_remote and file.remote_provider == "anthropic":
50
+ _add_beta(base_headers, "files-api-2025-04-14")
51
+ break
52
+
45
53
  request_json = {
46
54
  "model": model.name,
47
55
  "messages": messages,
lm_deluge/client.py CHANGED
@@ -3,6 +3,7 @@ from typing import (
3
3
  Any,
4
4
  AsyncGenerator,
5
5
  Callable,
6
+ ClassVar,
6
7
  Literal,
7
8
  Self,
8
9
  Sequence,
@@ -31,7 +32,7 @@ from lm_deluge.tool import MCPServer, Tool
31
32
 
32
33
  from .api_requests.base import APIResponse
33
34
  from .config import SamplingParams
34
- from .models import APIModel, registry
35
+ from .models import APIModel, register_model, registry
35
36
  from .request_context import RequestContext
36
37
  from .tracker import StatusTracker
37
38
 
@@ -43,6 +44,12 @@ class _LLMClient(BaseModel):
43
44
  Keeps all validation, serialization, and existing functionality.
44
45
  """
45
46
 
47
+ _REASONING_SUFFIXES: ClassVar[dict[str, Literal["low", "medium", "high"]]] = {
48
+ "-low": "low",
49
+ "-medium": "medium",
50
+ "-high": "high",
51
+ }
52
+
46
53
  model_names: str | list[str] = ["gpt-4.1-mini"]
47
54
  name: str | None = None
48
55
  max_requests_per_minute: int = 1_000
@@ -143,23 +150,15 @@ class _LLMClient(BaseModel):
143
150
  def _normalize_model_names(
144
151
  self, models: list[str]
145
152
  ) -> tuple[list[str], list[Literal["low", "medium", "high"] | None]]:
146
- reasoning_effort_suffixes: dict[str, Literal["low", "medium", "high"]] = {
147
- "-low": "low",
148
- "-medium": "medium",
149
- "-high": "high",
150
- }
151
153
  normalized: list[str] = []
152
154
  efforts: list[Literal["low", "medium", "high"] | None] = []
153
155
 
154
156
  for name in models:
155
- base_name = name
156
- effort: Literal["low", "medium", "high"] | None = None
157
- for suffix, candidate in reasoning_effort_suffixes.items():
158
- if name.endswith(suffix) and len(name) > len(suffix):
159
- base_name = name[: -len(suffix)]
160
- effort = candidate
161
- break
162
- normalized.append(base_name)
157
+ base_name = self._preprocess_openrouter_model(name)
158
+ trimmed_name, effort = self.__class__._strip_reasoning_suffix_if_registered(
159
+ base_name
160
+ )
161
+ normalized.append(trimmed_name)
163
162
  efforts.append(effort)
164
163
 
165
164
  return normalized, efforts
@@ -254,31 +253,63 @@ class _LLMClient(BaseModel):
254
253
  def models(self):
255
254
  return self.model_names # why? idk
256
255
 
256
+ @staticmethod
257
+ def _preprocess_openrouter_model(model_name: str) -> str:
258
+ """Process openrouter: prefix and register model if needed."""
259
+ if model_name.startswith("openrouter:"):
260
+ slug = model_name.split(":", 1)[1] # Everything after "openrouter:"
261
+ # Create a unique id by replacing slashes with hyphens
262
+ model_id = f"openrouter-{slug.replace('/', '-')}"
263
+
264
+ # Register the model if not already in registry
265
+ if model_id not in registry:
266
+ register_model(
267
+ id=model_id,
268
+ name=slug, # The full slug sent to OpenRouter API (e.g., "openrouter/andromeda-alpha")
269
+ api_base="https://openrouter.ai/api/v1",
270
+ api_key_env_var="OPENROUTER_API_KEY",
271
+ api_spec="openai",
272
+ supports_json=True,
273
+ supports_logprobs=False,
274
+ supports_responses=False,
275
+ input_cost=0, # Unknown costs for generic models
276
+ cached_input_cost=0,
277
+ cache_write_cost=0,
278
+ output_cost=0,
279
+ )
280
+
281
+ return model_id
282
+ return model_name
283
+
257
284
  @model_validator(mode="before")
258
285
  @classmethod
259
286
  def fix_lists(cls, data) -> "_LLMClient":
260
- # Parse reasoning effort from model name suffixes (e.g., "gpt-5-high")
261
- # Only applies when a single model string is provided
262
- if isinstance(data.get("model_names"), str):
263
- model_name = data["model_names"]
264
- reasoning_effort_suffixes = {
265
- "-low": "low",
266
- "-medium": "medium",
267
- "-high": "high",
268
- }
269
-
270
- for suffix, effort in reasoning_effort_suffixes.items():
271
- if model_name.endswith(suffix):
272
- # Extract base model name by removing suffix
273
- base_model = model_name[: -len(suffix)]
274
- data["model_names"] = base_model
275
-
276
- # Set reasoning_effort if not already explicitly set
277
- if data.get("reasoning_effort") is None:
278
- data["reasoning_effort"] = effort
279
- break
280
-
281
- data["model_names"] = [data["model_names"]]
287
+ # Process model_names - handle both strings and lists
288
+ model_names = data.get("model_names")
289
+
290
+ if isinstance(model_names, str):
291
+ # Single model as string
292
+ # First, handle OpenRouter prefix
293
+ model_name = cls._preprocess_openrouter_model(model_names)
294
+
295
+ # Then handle reasoning effort suffix (e.g., "gpt-5-high")
296
+ model_name, effort = cls._strip_reasoning_suffix_if_registered(model_name)
297
+ if effort and data.get("reasoning_effort") is None:
298
+ data["reasoning_effort"] = effort
299
+
300
+ data["model_names"] = [model_name]
301
+
302
+ elif isinstance(model_names, list):
303
+ # List of models - process each one
304
+ processed_models = []
305
+ for model_name in model_names:
306
+ # Handle OpenRouter prefix for each model
307
+ processed_model = cls._preprocess_openrouter_model(model_name)
308
+ processed_model, _ = cls._strip_reasoning_suffix_if_registered(
309
+ processed_model
310
+ )
311
+ processed_models.append(processed_model)
312
+ data["model_names"] = processed_models
282
313
 
283
314
  if not isinstance(data.get("sampling_params", []), list):
284
315
  data["sampling_params"] = [data["sampling_params"]]
@@ -298,6 +329,18 @@ class _LLMClient(BaseModel):
298
329
  data["sampling_params"] = data["sampling_params"] * len(data["model_names"])
299
330
  return data
300
331
 
332
+ @classmethod
333
+ def _strip_reasoning_suffix_if_registered(
334
+ cls, model_name: str
335
+ ) -> tuple[str, Literal["low", "medium", "high"] | None]:
336
+ """Remove reasoning suffix only when the trimmed model already exists."""
337
+ for suffix, effort in cls._REASONING_SUFFIXES.items():
338
+ if model_name.endswith(suffix) and len(model_name) > len(suffix):
339
+ candidate = model_name[: -len(suffix)]
340
+ if candidate in registry:
341
+ return candidate, effort
342
+ return model_name, None
343
+
301
344
  @model_validator(mode="after")
302
345
  def validate_client(self) -> Self:
303
346
  if isinstance(self.model_names, str):
lm_deluge/file.py CHANGED
@@ -1,22 +1,35 @@
1
- from functools import cached_property
2
- import os
3
- import io
4
- import requests
5
1
  import base64
2
+ import io
6
3
  import mimetypes
7
- import xxhash
4
+ import os
8
5
  from dataclasses import dataclass, field
6
+ from functools import cached_property
9
7
  from pathlib import Path
8
+ from typing import Literal
9
+
10
+ import requests
11
+ import xxhash
10
12
 
11
13
 
12
14
  @dataclass
13
15
  class File:
14
16
  # raw bytes, pathlike, http url, base64 data url, or file_id
15
- data: bytes | io.BytesIO | Path | str
17
+ data: bytes | io.BytesIO | Path | str | None
16
18
  media_type: str | None = None # inferred if None
19
+ type: str = field(init=False, default="file")
20
+ is_remote: bool = False
21
+ remote_provider: Literal["openai", "anthropic", "google"] | None = None
17
22
  filename: str | None = None # optional filename for uploads
18
23
  file_id: str | None = None # for OpenAI file uploads or Anthropic file API
19
- type: str = field(init=False, default="file")
24
+
25
+ def __post_init__(self):
26
+ if self.is_remote:
27
+ if self.remote_provider is None:
28
+ raise ValueError("remote_provider must be specified")
29
+ if self.file_id is None:
30
+ raise ValueError("file_id must be specified for remote files")
31
+ if self.file_id and not self.is_remote:
32
+ print("Warning: File ID specified by file not labeled as remote.")
20
33
 
21
34
  # helpers -----------------------------------------------------------------
22
35
  def _bytes(self) -> bytes:
@@ -75,17 +88,342 @@ class File:
75
88
  @cached_property
76
89
  def fingerprint(self) -> str:
77
90
  # Hash the file contents for fingerprinting
91
+ if self.is_remote:
92
+ # For remote files, use provider:file_id for interpretability
93
+ return f"{self.remote_provider}:{self.file_id}"
78
94
  file_bytes = self._bytes()
79
95
  return xxhash.xxh64(file_bytes).hexdigest()
80
96
 
81
97
  @cached_property
82
98
  def size(self) -> int:
83
99
  """Return file size in bytes."""
100
+ if self.is_remote:
101
+ # For remote files, we don't have the bytes available
102
+ return 0
84
103
  return len(self._bytes())
85
104
 
105
+ async def as_remote(
106
+ self, provider: Literal["openai", "anthropic", "google"]
107
+ ) -> "File":
108
+ """Upload file to provider's file API and return new File with file_id.
109
+
110
+ Args:
111
+ provider: The provider to upload to ("openai", "anthropic", or "google")
112
+
113
+ Returns:
114
+ A new File object with file_id set and is_remote=True
115
+
116
+ Raises:
117
+ ValueError: If provider is unsupported or API key is missing
118
+ RuntimeError: If upload fails
119
+ """
120
+ if self.is_remote:
121
+ # If already remote with same provider, return self
122
+ if self.remote_provider == provider:
123
+ return self
124
+ # Otherwise raise error about cross-provider incompatibility
125
+ raise ValueError(
126
+ f"File is already uploaded to {self.remote_provider}. "
127
+ f"Cannot re-upload to {provider}."
128
+ )
129
+
130
+ if provider == "openai":
131
+ return await self._upload_to_openai()
132
+ elif provider == "anthropic":
133
+ return await self._upload_to_anthropic()
134
+ elif provider == "google":
135
+ return await self._upload_to_google()
136
+ else:
137
+ raise ValueError(f"Unsupported provider: {provider}")
138
+
139
+ async def _upload_to_openai(self) -> "File":
140
+ """Upload file to OpenAI's Files API."""
141
+ import aiohttp
142
+
143
+ api_key = os.environ.get("OPENAI_API_KEY")
144
+ if not api_key:
145
+ raise ValueError("OPENAI_API_KEY environment variable must be set")
146
+
147
+ url = "https://api.openai.com/v1/files"
148
+ headers = {"Authorization": f"Bearer {api_key}"}
149
+
150
+ # Get file bytes and metadata
151
+ file_bytes = self._bytes()
152
+ filename = self._filename()
153
+
154
+ # Create multipart form data
155
+ data = aiohttp.FormData()
156
+ data.add_field("purpose", "assistants")
157
+ data.add_field(
158
+ "file",
159
+ file_bytes,
160
+ filename=filename,
161
+ content_type=self._mime(),
162
+ )
163
+
164
+ try:
165
+ async with aiohttp.ClientSession() as session:
166
+ async with session.post(url, headers=headers, data=data) as response:
167
+ if response.status != 200:
168
+ text = await response.text()
169
+ raise RuntimeError(f"Failed to upload file to OpenAI: {text}")
170
+
171
+ response_data = await response.json()
172
+ file_id = response_data["id"]
173
+
174
+ # Return new File object with file_id
175
+ return File(
176
+ data=None,
177
+ media_type=self.media_type,
178
+ is_remote=True,
179
+ remote_provider="openai",
180
+ filename=filename,
181
+ file_id=file_id,
182
+ )
183
+ except aiohttp.ClientError as e:
184
+ raise RuntimeError(f"Failed to upload file to OpenAI: {e}")
185
+
186
+ async def _upload_to_anthropic(self) -> "File":
187
+ """Upload file to Anthropic's Files API."""
188
+ import aiohttp
189
+
190
+ api_key = os.environ.get("ANTHROPIC_API_KEY")
191
+ if not api_key:
192
+ raise ValueError("ANTHROPIC_API_KEY environment variable must be set")
193
+
194
+ url = "https://api.anthropic.com/v1/files"
195
+ headers = {
196
+ "x-api-key": api_key,
197
+ "anthropic-version": "2023-06-01",
198
+ "anthropic-beta": "files-api-2025-04-14",
199
+ }
200
+
201
+ # Get file bytes and metadata
202
+ file_bytes = self._bytes()
203
+ filename = self._filename()
204
+
205
+ # Create multipart form data
206
+ data = aiohttp.FormData()
207
+ data.add_field(
208
+ "file",
209
+ file_bytes,
210
+ filename=filename,
211
+ content_type=self._mime(),
212
+ )
213
+
214
+ try:
215
+ async with aiohttp.ClientSession() as session:
216
+ async with session.post(url, headers=headers, data=data) as response:
217
+ if response.status != 200:
218
+ text = await response.text()
219
+ raise RuntimeError(
220
+ f"Failed to upload file to Anthropic: {text}"
221
+ )
222
+
223
+ response_data = await response.json()
224
+ file_id = response_data["id"]
225
+
226
+ # Return new File object with file_id
227
+ return File(
228
+ data=None,
229
+ media_type=self.media_type,
230
+ is_remote=True,
231
+ remote_provider="anthropic",
232
+ filename=filename,
233
+ file_id=file_id,
234
+ )
235
+ except aiohttp.ClientError as e:
236
+ raise RuntimeError(f"Failed to upload file to Anthropic: {e}")
237
+
238
+ async def _upload_to_google(self) -> "File":
239
+ """Upload file to Google Gemini Files API."""
240
+ import json
241
+
242
+ import aiohttp
243
+
244
+ api_key = os.environ.get("GEMINI_API_KEY")
245
+ if not api_key:
246
+ raise ValueError("GEMINI_API_KEY environment variable must be set")
247
+
248
+ # Google uses a different URL structure with the API key as a parameter
249
+ url = f"https://generativelanguage.googleapis.com/upload/v1beta/files?key={api_key}"
250
+
251
+ # Get file bytes and metadata
252
+ file_bytes = self._bytes()
253
+ filename = self._filename()
254
+ mime_type = self._mime()
255
+
256
+ # Google expects a multipart request with metadata and file data
257
+ # Using the resumable upload protocol
258
+ headers = {
259
+ "X-Goog-Upload-Protocol": "multipart",
260
+ }
261
+
262
+ # Create multipart form data with metadata and file
263
+ data = aiohttp.FormData()
264
+
265
+ # Add metadata part as JSON
266
+ metadata = {"file": {"display_name": filename}}
267
+ data.add_field(
268
+ "metadata",
269
+ json.dumps(metadata),
270
+ content_type="application/json",
271
+ )
272
+
273
+ # Add file data part
274
+ data.add_field(
275
+ "file",
276
+ file_bytes,
277
+ filename=filename,
278
+ content_type=mime_type,
279
+ )
280
+
281
+ try:
282
+ async with aiohttp.ClientSession() as session:
283
+ async with session.post(url, headers=headers, data=data) as response:
284
+ if response.status not in [200, 201]:
285
+ text = await response.text()
286
+ raise RuntimeError(f"Failed to upload file to Google: {text}")
287
+
288
+ response_data = await response.json()
289
+ # Google returns a file object with a 'name' field like 'files/abc123'
290
+ file_uri = response_data.get("file", {}).get(
291
+ "uri"
292
+ ) or response_data.get("name")
293
+ if not file_uri:
294
+ raise RuntimeError(
295
+ f"No file URI in Google response: {response_data}"
296
+ )
297
+
298
+ # Return new File object with file_id (using the file URI)
299
+ return File(
300
+ data=None,
301
+ media_type=self.media_type,
302
+ is_remote=True,
303
+ remote_provider="google",
304
+ filename=filename,
305
+ file_id=file_uri,
306
+ )
307
+ except aiohttp.ClientError as e:
308
+ raise RuntimeError(f"Failed to upload file to Google: {e}")
309
+
310
+ async def delete(self) -> bool:
311
+ """Delete the uploaded file from the remote provider.
312
+
313
+ Returns:
314
+ True if deletion was successful, False otherwise
315
+
316
+ Raises:
317
+ ValueError: If file is not a remote file or provider is unsupported
318
+ RuntimeError: If deletion fails
319
+ """
320
+ if not self.is_remote:
321
+ raise ValueError(
322
+ "Cannot delete a non-remote file. Only remote files can be deleted."
323
+ )
324
+
325
+ if not self.file_id:
326
+ raise ValueError("Cannot delete file without file_id")
327
+
328
+ if self.remote_provider == "openai":
329
+ return await self._delete_from_openai()
330
+ elif self.remote_provider == "anthropic":
331
+ return await self._delete_from_anthropic()
332
+ elif self.remote_provider == "google":
333
+ return await self._delete_from_google()
334
+ else:
335
+ raise ValueError(f"Unsupported provider: {self.remote_provider}")
336
+
337
+ async def _delete_from_openai(self) -> bool:
338
+ """Delete file from OpenAI's Files API."""
339
+ import aiohttp
340
+
341
+ api_key = os.environ.get("OPENAI_API_KEY")
342
+ if not api_key:
343
+ raise ValueError("OPENAI_API_KEY environment variable must be set")
344
+
345
+ url = f"https://api.openai.com/v1/files/{self.file_id}"
346
+ headers = {"Authorization": f"Bearer {api_key}"}
347
+
348
+ try:
349
+ async with aiohttp.ClientSession() as session:
350
+ async with session.delete(url, headers=headers) as response:
351
+ if response.status == 200:
352
+ return True
353
+ else:
354
+ text = await response.text()
355
+ raise RuntimeError(f"Failed to delete file from OpenAI: {text}")
356
+ except aiohttp.ClientError as e:
357
+ raise RuntimeError(f"Failed to delete file from OpenAI: {e}")
358
+
359
+ async def _delete_from_anthropic(self) -> bool:
360
+ """Delete file from Anthropic's Files API."""
361
+ import aiohttp
362
+
363
+ api_key = os.environ.get("ANTHROPIC_API_KEY")
364
+ if not api_key:
365
+ raise ValueError("ANTHROPIC_API_KEY environment variable must be set")
366
+
367
+ url = f"https://api.anthropic.com/v1/files/{self.file_id}"
368
+ headers = {
369
+ "x-api-key": api_key,
370
+ "anthropic-version": "2023-06-01",
371
+ "anthropic-beta": "files-api-2025-04-14",
372
+ }
373
+
374
+ try:
375
+ async with aiohttp.ClientSession() as session:
376
+ async with session.delete(url, headers=headers) as response:
377
+ if response.status == 200:
378
+ return True
379
+ else:
380
+ text = await response.text()
381
+ raise RuntimeError(
382
+ f"Failed to delete file from Anthropic: {text}"
383
+ )
384
+ except aiohttp.ClientError as e:
385
+ raise RuntimeError(f"Failed to delete file from Anthropic: {e}")
386
+
387
+ async def _delete_from_google(self) -> bool:
388
+ """Delete file from Google Gemini Files API."""
389
+ import aiohttp
390
+
391
+ api_key = os.environ.get("GEMINI_API_KEY")
392
+ if not api_key:
393
+ raise ValueError("GEMINI_API_KEY environment variable must be set")
394
+
395
+ # Google file_id is the full URI like "https://generativelanguage.googleapis.com/v1beta/files/abc123"
396
+ # We need to extract just the file name part for the delete endpoint
397
+ assert self.file_id, "can't delete file with no file id"
398
+ if self.file_id.startswith("https://"):
399
+ # Extract the path after the domain
400
+ file_name = self.file_id.split("/v1beta/")[-1]
401
+ else:
402
+ file_name = self.file_id
403
+
404
+ url = f"https://generativelanguage.googleapis.com/v1beta/{file_name}?key={api_key}"
405
+
406
+ try:
407
+ async with aiohttp.ClientSession() as session:
408
+ async with session.delete(url) as response:
409
+ if response.status in [200, 204]:
410
+ return True
411
+ else:
412
+ text = await response.text()
413
+ raise RuntimeError(f"Failed to delete file from Google: {text}")
414
+ except aiohttp.ClientError as e:
415
+ raise RuntimeError(f"Failed to delete file from Google: {e}")
416
+
86
417
  # ── provider-specific emission ────────────────────────────────────────────
87
418
  def oa_chat(self) -> dict:
88
419
  """For OpenAI Chat Completions - file content as base64 or file_id."""
420
+ # Validate provider compatibility
421
+ if self.is_remote and self.remote_provider != "openai":
422
+ raise ValueError(
423
+ f"Cannot emit file uploaded to {self.remote_provider} as OpenAI format. "
424
+ f"File must be uploaded to OpenAI or provided as raw data."
425
+ )
426
+
89
427
  if self.file_id:
90
428
  return {
91
429
  "type": "file",
@@ -104,6 +442,13 @@ class File:
104
442
 
105
443
  def oa_resp(self) -> dict:
106
444
  """For OpenAI Responses API - file content as base64 or file_id."""
445
+ # Validate provider compatibility
446
+ if self.is_remote and self.remote_provider != "openai":
447
+ raise ValueError(
448
+ f"Cannot emit file uploaded to {self.remote_provider} as OpenAI format. "
449
+ f"File must be uploaded to OpenAI or provided as raw data."
450
+ )
451
+
107
452
  if self.file_id:
108
453
  return {
109
454
  "type": "input_file",
@@ -118,6 +463,13 @@ class File:
118
463
 
119
464
  def anthropic(self) -> dict:
120
465
  """For Anthropic Messages API - file content as base64 or file_id."""
466
+ # Validate provider compatibility
467
+ if self.is_remote and self.remote_provider != "anthropic":
468
+ raise ValueError(
469
+ f"Cannot emit file uploaded to {self.remote_provider} as Anthropic format. "
470
+ f"File must be uploaded to Anthropic or provided as raw data."
471
+ )
472
+
121
473
  if self.file_id:
122
474
  return {
123
475
  "type": "document",
@@ -145,13 +497,30 @@ class File:
145
497
  return filename, content, media_type
146
498
 
147
499
  def gemini(self) -> dict:
148
- """For Gemini API - files are provided as inline data."""
149
- return {
150
- "inlineData": {
151
- "mimeType": self._mime(),
152
- "data": self._base64(include_header=False),
500
+ """For Gemini API - files are provided as inline data or file URI."""
501
+ # Validate provider compatibility
502
+ if self.is_remote and self.remote_provider != "google":
503
+ raise ValueError(
504
+ f"Cannot emit file uploaded to {self.remote_provider} as Google format. "
505
+ f"File must be uploaded to Google or provided as raw data."
506
+ )
507
+
508
+ if self.file_id:
509
+ # Use file URI for uploaded files
510
+ return {
511
+ "fileData": {
512
+ "mimeType": self._mime(),
513
+ "fileUri": self.file_id,
514
+ }
515
+ }
516
+ else:
517
+ # Use inline data for non-uploaded files
518
+ return {
519
+ "inlineData": {
520
+ "mimeType": self._mime(),
521
+ "data": self._base64(include_header=False),
522
+ }
153
523
  }
154
- }
155
524
 
156
525
  def mistral(self) -> dict:
157
526
  """For Mistral API - not yet supported."""
lm_deluge/prompt.py CHANGED
@@ -474,22 +474,28 @@ class Message:
474
474
 
475
475
  def with_file(
476
476
  self,
477
- data: bytes | str | Path | io.BytesIO,
477
+ data: bytes | str | Path | io.BytesIO | File,
478
478
  *,
479
479
  media_type: str | None = None,
480
480
  filename: str | None = None,
481
+ # remote: bool = False,
482
+ # provider: Literal["openai", "anthropic", "google"] | None = None,
481
483
  ) -> "Message":
482
484
  """
483
485
  Append a file block and return self for chaining.
484
486
  """
485
- file = File(data, media_type=media_type, filename=filename)
487
+ if not isinstance(data, File):
488
+ file = File(data, media_type=media_type, filename=filename)
489
+ else:
490
+ file = data
491
+
486
492
  self.parts.append(file)
487
493
  return self
488
494
 
489
495
  @deprecated("with_file")
490
496
  def add_file(
491
497
  self,
492
- data: bytes | str | Path | io.BytesIO,
498
+ data: bytes | str | Path | io.BytesIO | File,
493
499
  *,
494
500
  media_type: str | None = None,
495
501
  filename: str | None = None,
@@ -499,6 +505,30 @@ class Message:
499
505
  """
500
506
  return self.with_file(data, media_type=media_type, filename=filename)
501
507
 
508
+ async def with_remote_file(
509
+ self,
510
+ data: bytes | str | Path | io.BytesIO | File,
511
+ *,
512
+ media_type: str | None = None,
513
+ filename: str | None = None,
514
+ provider: Literal["openai", "anthropic", "google"] = "openai",
515
+ ):
516
+ if not isinstance(data, File):
517
+ file = File(data, media_type=media_type, filename=filename)
518
+ else:
519
+ file = data
520
+
521
+ if not file.is_remote:
522
+ file = await file.as_remote(provider=provider)
523
+ else:
524
+ if file.remote_provider != provider:
525
+ raise ValueError(
526
+ f"File is already remote with provider {file.remote_provider}, cannot change provider"
527
+ )
528
+
529
+ self.parts.append(file)
530
+ return self
531
+
502
532
  def with_tool_call(self, id: str, name: str, arguments: dict) -> "Message":
503
533
  """Append a tool call block and return self for chaining."""
504
534
  self.parts.append(ToolCall(id=id, name=name, arguments=arguments))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.60
3
+ Version: 0.0.62
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
@@ -2,13 +2,13 @@ lm_deluge/__init__.py,sha256=LKKIcqQoQyDpTck6fnB7iAs75BnfNNa3Bj5Nz7KU4Hk,376
2
2
  lm_deluge/batches.py,sha256=Km6QM5_7BlF2qEyo4WPlhkaZkpzrLqf50AaveHXQOoY,25127
3
3
  lm_deluge/cache.py,sha256=xO2AIYvP3tUpTMKQjwQQYfGRJSRi6e7sMlRhLjsS-u4,4873
4
4
  lm_deluge/cli.py,sha256=Ilww5gOw3J5v0NReq_Ra4hhxU4BCIJBl1oTGxJZKedc,12065
5
- lm_deluge/client.py,sha256=nxVxN0oXYLvOiMgiF7b_qmqQk6Hohnf4ZTtSx1SI_PQ,38845
5
+ lm_deluge/client.py,sha256=TKRN1KAMOgtQFLazh_iyj185GBHtP7r8KAU4lod-qfs,40693
6
6
  lm_deluge/config.py,sha256=H1tQyJDNHGFuwxqQNL5Z-CjWAC0luHSBA3iY_pxmACM,932
7
7
  lm_deluge/embed.py,sha256=CO-TOlC5kOTAM8lcnicoG4u4K664vCBwHF1vHa-nAGg,13382
8
8
  lm_deluge/errors.py,sha256=oHjt7YnxWbh-eXMScIzov4NvpJMo0-2r5J6Wh5DQ1tk,209
9
- lm_deluge/file.py,sha256=FGomcG8s2go_55Z2CChflHgmU-UqgFftgFY8c7f_G70,5631
9
+ lm_deluge/file.py,sha256=PTmlJQ-IaYcYUFun9V0bJ1NPVP84edJrR0hvCMWFylY,19697
10
10
  lm_deluge/image.py,sha256=5AMXmn2x47yXeYNfMSMAOWcnlrOxxOel-4L8QCJwU70,8928
11
- lm_deluge/prompt.py,sha256=1hGLOIwdyGFokKv0dPiVpke3OPHD6vK5qO6q9E8H89Y,62020
11
+ lm_deluge/prompt.py,sha256=RsKgvAbcG_-cHfgR9pyrl9tEIo7rIlVxlPpCgZbbj3E,63005
12
12
  lm_deluge/request_context.py,sha256=cBayMFWupWhde2OjRugW3JH-Gin-WFGc6DK2Mb4Prdc,2576
13
13
  lm_deluge/rerank.py,sha256=-NBAJdHz9OB-SWWJnHzkFmeVO4wR6lFV7Vw-SxG7aVo,11457
14
14
  lm_deluge/tool.py,sha256=eZpzgkSIlGD7KdZQwzLF-UdyRJpRnNNXpceGJrNhRrE,26421
@@ -16,7 +16,7 @@ lm_deluge/tracker.py,sha256=aeS9GUJpgOSQRVXAnGDvlMO8qYpSxpTNLYj2hrMg0m8,14757
16
16
  lm_deluge/usage.py,sha256=xz9tAw2hqaJvv9aAVhnQ6N1Arn7fS8Shb28VwCW26wI,5136
17
17
  lm_deluge/warnings.py,sha256=nlDJMCw30VhDEFxqLO2-bfXH_Tv5qmlglzUSbokCSw8,1498
18
18
  lm_deluge/api_requests/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
19
- lm_deluge/api_requests/anthropic.py,sha256=7tTb_NMPodDHrCzakrLd9LyXuLqeTQyAGU-FvMoV3gI,8437
19
+ lm_deluge/api_requests/anthropic.py,sha256=i4coscYQcg3TKkTJPoIvVAP5rY4HQA7Dt4P-OYTgBNw,8762
20
20
  lm_deluge/api_requests/base.py,sha256=GCcydwBRx4_xAuYLvasXlyj-TgqvKAVhVvxRfJkvPbY,9471
21
21
  lm_deluge/api_requests/bedrock.py,sha256=Uppne03GcIEk1tVYzoGu7GXK2Sg94a_xvFTLDRN_phY,15412
22
22
  lm_deluge/api_requests/common.py,sha256=BZ3vRO5TB669_UsNKugkkuFSzoLHOYJIKt4nV4sf4vc,422
@@ -65,8 +65,8 @@ lm_deluge/util/logprobs.py,sha256=UkBZakOxWluaLqHrjARu7xnJ0uCHVfLGHJdnYlEcutk,11
65
65
  lm_deluge/util/spatial.py,sha256=BsF_UKhE-x0xBirc-bV1xSKZRTUhsOBdGqsMKme20C8,4099
66
66
  lm_deluge/util/validation.py,sha256=hz5dDb3ebvZrZhnaWxOxbNSVMI6nmaOODBkk0htAUhs,1575
67
67
  lm_deluge/util/xml.py,sha256=Ft4zajoYBJR3HHCt2oHwGfymGLdvp_gegVmJ-Wqk4Ck,10547
68
- lm_deluge-0.0.60.dist-info/licenses/LICENSE,sha256=uNNXGXPCw2TC7CUs7SEBkA-Mz6QBQFWUUEWDMgEs1dU,1058
69
- lm_deluge-0.0.60.dist-info/METADATA,sha256=uBr_1y__E5eT9sL6rOo3qf0MZ4rNKZe0hKVj4WMcqKE,13443
70
- lm_deluge-0.0.60.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
71
- lm_deluge-0.0.60.dist-info/top_level.txt,sha256=hqU-TJX93yBwpgkDtYcXyLr3t7TLSCCZ_reytJjwBaE,10
72
- lm_deluge-0.0.60.dist-info/RECORD,,
68
+ lm_deluge-0.0.62.dist-info/licenses/LICENSE,sha256=uNNXGXPCw2TC7CUs7SEBkA-Mz6QBQFWUUEWDMgEs1dU,1058
69
+ lm_deluge-0.0.62.dist-info/METADATA,sha256=OxyZBmmED2qbq0Aizaj6Hatrzg57qs0amHHTManmBrI,13443
70
+ lm_deluge-0.0.62.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
71
+ lm_deluge-0.0.62.dist-info/top_level.txt,sha256=hqU-TJX93yBwpgkDtYcXyLr3t7TLSCCZ_reytJjwBaE,10
72
+ lm_deluge-0.0.62.dist-info/RECORD,,