lm-deluge 0.0.57__py3-none-any.whl → 0.0.59__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lm-deluge might be problematic. Click here for more details.

@@ -20,8 +20,6 @@ TOGETHER_MODELS = {
20
20
  "api_spec": "openai",
21
21
  "input_cost": 3.0,
22
22
  "output_cost": 7.0,
23
- "requests_per_minute": None,
24
- "tokens_per_minute": None,
25
23
  },
26
24
  "deepseek-v3-together": {
27
25
  "id": "deepseek-v3-together",
@@ -32,8 +30,6 @@ TOGETHER_MODELS = {
32
30
  "api_spec": "openai",
33
31
  "input_cost": 1.25,
34
32
  "output_cost": 1.25,
35
- "requests_per_minute": None,
36
- "tokens_per_minute": None,
37
33
  },
38
34
  "qwen-3-235b-together": {
39
35
  "id": "qwen-3-235b-together",
@@ -44,8 +40,6 @@ TOGETHER_MODELS = {
44
40
  "api_spec": "openai",
45
41
  "input_cost": 0.2,
46
42
  "output_cost": 0.6,
47
- "requests_per_minute": None,
48
- "tokens_per_minute": None,
49
43
  },
50
44
  "qwen-2.5-vl-together": {
51
45
  "id": "qwen-2.5-vl-together",
@@ -56,8 +50,6 @@ TOGETHER_MODELS = {
56
50
  "api_spec": "openai",
57
51
  "input_cost": 1.95,
58
52
  "output_cost": 8.0,
59
- "requests_per_minute": None,
60
- "tokens_per_minute": None,
61
53
  },
62
54
  "llama-4-maverick-together": {
63
55
  "id": "llama-4-maverick-together",
@@ -68,8 +60,6 @@ TOGETHER_MODELS = {
68
60
  "api_spec": "openai",
69
61
  "input_cost": 0.27,
70
62
  "output_cost": 0.85,
71
- "requests_per_minute": None,
72
- "tokens_per_minute": None,
73
63
  },
74
64
  "llama-4-scout-together": {
75
65
  "id": "llama-4-scout-together",
@@ -80,8 +70,6 @@ TOGETHER_MODELS = {
80
70
  "api_spec": "openai",
81
71
  "input_cost": 0.18,
82
72
  "output_cost": 0.59,
83
- "requests_per_minute": None,
84
- "tokens_per_minute": None,
85
73
  },
86
74
  "gpt-oss-120b-together": {
87
75
  "id": "gpt-oss-120b-together",
@@ -92,8 +80,6 @@ TOGETHER_MODELS = {
92
80
  "api_spec": "openai",
93
81
  "input_cost": 0.18,
94
82
  "output_cost": 0.59,
95
- "requests_per_minute": None,
96
- "tokens_per_minute": None,
97
83
  "reasoning_model": True,
98
84
  },
99
85
  "gpt-oss-20b-together": {
@@ -105,8 +91,6 @@ TOGETHER_MODELS = {
105
91
  "api_spec": "openai",
106
92
  "input_cost": 0.18,
107
93
  "output_cost": 0.59,
108
- "requests_per_minute": None,
109
- "tokens_per_minute": None,
110
94
  "reasoning_model": True,
111
95
  },
112
96
  }
lm_deluge/prompt.py CHANGED
@@ -2,7 +2,7 @@ import io
2
2
  import json
3
3
  from dataclasses import dataclass, field
4
4
  from pathlib import Path
5
- from typing import Literal, Sequence
5
+ from typing import Literal, Sequence, TypeAlias
6
6
 
7
7
  import tiktoken
8
8
  import xxhash
@@ -1495,9 +1495,21 @@ class Conversation:
1495
1495
  return cls(msgs)
1496
1496
 
1497
1497
 
1498
- def prompts_to_conversations(prompts: Sequence[str | list[dict] | Conversation]):
1499
- if any(isinstance(x, list) for x in prompts):
1500
- raise ValueError("can't convert list[dict] to conversation yet")
1501
- return [ # type: ignore
1502
- Conversation.user(p) if isinstance(p, str) else p for p in prompts
1503
- ]
1498
+ Prompt: TypeAlias = str | list[dict] | Message | Conversation
1499
+
1500
+
1501
+ def prompts_to_conversations(prompts: Sequence[Prompt]) -> Sequence[Prompt]:
1502
+ converted = []
1503
+ for prompt in prompts:
1504
+ if isinstance(prompt, Conversation):
1505
+ converted.append(prompt)
1506
+ elif isinstance(prompt, Message):
1507
+ converted.append(Conversation([prompt]))
1508
+ elif isinstance(prompt, str):
1509
+ converted.append(Conversation.user(prompt))
1510
+ elif isinstance(prompt, list):
1511
+ conv, provider = Conversation.from_unknown(prompt)
1512
+ converted.append(conv)
1513
+ else:
1514
+ raise ValueError(f"Unknown prompt type {type(prompt)}")
1515
+ return converted
@@ -26,28 +26,22 @@ class RequestContext:
26
26
 
27
27
  # Infrastructure
28
28
  status_tracker: StatusTracker | None = None
29
- results_arr: list[Any] | None = (
30
- None # list["APIRequestBase"] but avoiding circular import
31
- )
29
+ # avoiding circular import
30
+ results_arr: list[Any] | None = None # list["APIRequestBase"]
32
31
  callback: Callable | None = None
33
32
 
34
33
  # Optional features
35
34
  tools: list | None = None
36
35
  cache: CachePattern | None = None
37
36
  use_responses_api: bool = False
37
+ background: bool = False
38
+ service_tier: str | None = None
38
39
  extra_headers: dict[str, str] | None = None
40
+ extra_body: dict[str, Any] | None = None
39
41
  force_local_mcp: bool = False
40
42
 
41
43
  # Computed properties
42
44
  cache_key: str = field(init=False)
43
- # num_tokens: int = field(init=False)
44
-
45
- # def __post_init__(self):
46
- # # Compute cache key from prompt fingerprint
47
- # # self.cache_key = self.prompt.fingerprint
48
-
49
- # # Compute token count
50
- # self.num_tokens =
51
45
 
52
46
  @cached_property
53
47
  def num_tokens(self):
@@ -74,6 +68,10 @@ class RequestContext:
74
68
  "tools": self.tools,
75
69
  "cache": self.cache,
76
70
  "use_responses_api": self.use_responses_api,
71
+ "background": self.background,
72
+ "service_tier": self.service_tier,
73
+ "extra_headers": self.extra_headers,
74
+ "extra_body": self.extra_body,
77
75
  "force_local_mcp": self.force_local_mcp,
78
76
  }
79
77
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.57
3
+ Version: 0.0.59
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
@@ -1,27 +1,27 @@
1
1
  lm_deluge/__init__.py,sha256=LKKIcqQoQyDpTck6fnB7iAs75BnfNNa3Bj5Nz7KU4Hk,376
2
- lm_deluge/batches.py,sha256=rQocJLyIs3Ko_nRdAE9jT__5cKWYxiIRAH_Lw3L0E1k,24653
2
+ lm_deluge/batches.py,sha256=Km6QM5_7BlF2qEyo4WPlhkaZkpzrLqf50AaveHXQOoY,25127
3
3
  lm_deluge/cache.py,sha256=xO2AIYvP3tUpTMKQjwQQYfGRJSRi6e7sMlRhLjsS-u4,4873
4
4
  lm_deluge/cli.py,sha256=Ilww5gOw3J5v0NReq_Ra4hhxU4BCIJBl1oTGxJZKedc,12065
5
- lm_deluge/client.py,sha256=GaHS54c2_MFn3AcUQPZOYrwKpnYYud_OfQIfwx3BAWU,32423
5
+ lm_deluge/client.py,sha256=jDXGC032MmBfAFDHdWNm23gdDP9pCiNeU-wIi9RCG5g,33616
6
6
  lm_deluge/config.py,sha256=H1tQyJDNHGFuwxqQNL5Z-CjWAC0luHSBA3iY_pxmACM,932
7
7
  lm_deluge/embed.py,sha256=CO-TOlC5kOTAM8lcnicoG4u4K664vCBwHF1vHa-nAGg,13382
8
8
  lm_deluge/errors.py,sha256=oHjt7YnxWbh-eXMScIzov4NvpJMo0-2r5J6Wh5DQ1tk,209
9
9
  lm_deluge/file.py,sha256=FGomcG8s2go_55Z2CChflHgmU-UqgFftgFY8c7f_G70,5631
10
10
  lm_deluge/image.py,sha256=5AMXmn2x47yXeYNfMSMAOWcnlrOxxOel-4L8QCJwU70,8928
11
- lm_deluge/prompt.py,sha256=RgZBcCiAtThqjILkPa4X530sR53SUK03U-6TWWk07tc,59607
12
- lm_deluge/request_context.py,sha256=o33LSEwnK6YPhZeulUoSE_VrdKCXiCQa0tjjixK2K6M,2540
11
+ lm_deluge/prompt.py,sha256=fm-wUkf5YMz1NXwFTlzjckwxoWW7cXhN2Z01zrQPO5E,60001
12
+ lm_deluge/request_context.py,sha256=cBayMFWupWhde2OjRugW3JH-Gin-WFGc6DK2Mb4Prdc,2576
13
13
  lm_deluge/rerank.py,sha256=-NBAJdHz9OB-SWWJnHzkFmeVO4wR6lFV7Vw-SxG7aVo,11457
14
14
  lm_deluge/tool.py,sha256=eZpzgkSIlGD7KdZQwzLF-UdyRJpRnNNXpceGJrNhRrE,26421
15
15
  lm_deluge/tracker.py,sha256=aeS9GUJpgOSQRVXAnGDvlMO8qYpSxpTNLYj2hrMg0m8,14757
16
16
  lm_deluge/usage.py,sha256=xz9tAw2hqaJvv9aAVhnQ6N1Arn7fS8Shb28VwCW26wI,5136
17
17
  lm_deluge/api_requests/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
18
- lm_deluge/api_requests/anthropic.py,sha256=Iy-AMo1o7xliwWhamFIUc601PZ0YWLhwAgszgfws42I,8467
19
- lm_deluge/api_requests/base.py,sha256=1et-5SdRqfnvXZT3b9fBEx0vvbCwbVunHBWtQr7Wurg,5878
18
+ lm_deluge/api_requests/anthropic.py,sha256=7tTb_NMPodDHrCzakrLd9LyXuLqeTQyAGU-FvMoV3gI,8437
19
+ lm_deluge/api_requests/base.py,sha256=GCcydwBRx4_xAuYLvasXlyj-TgqvKAVhVvxRfJkvPbY,9471
20
20
  lm_deluge/api_requests/bedrock.py,sha256=GmVxXz3ERAeQ7e52Nlztt81O4H9eJOQeOnS6b65vjm4,15453
21
21
  lm_deluge/api_requests/common.py,sha256=BZ3vRO5TB669_UsNKugkkuFSzoLHOYJIKt4nV4sf4vc,422
22
22
  lm_deluge/api_requests/gemini.py,sha256=COHqPWmeaq9fpg0YwOZqQTUbijKnXNF4cvMLnW9kLl8,7857
23
23
  lm_deluge/api_requests/mistral.py,sha256=S_LpOfCGbCVEROH_od3P-tYeNYTKFMamMTL-c_wFCBI,4597
24
- lm_deluge/api_requests/openai.py,sha256=frxSdQn9ZAAweSO-HMKRZ6gKU3Wdl1PqTVPhwy-iNA8,23202
24
+ lm_deluge/api_requests/openai.py,sha256=_da5n2FECjzKFj0fD9BzSUm2E_E0tSgGAMBk9mHOBjc,24908
25
25
  lm_deluge/api_requests/response.py,sha256=vG194gAH5p7ulpNy4qy5Pryfb1p3ZV21-YGoj__ru3E,7436
26
26
  lm_deluge/api_requests/deprecated/bedrock.py,sha256=WrcIShCoO8JCUSlFOCHxg6KQCNTZfw3TpYTvSpYk4mA,11320
27
27
  lm_deluge/api_requests/deprecated/cohere.py,sha256=KgDScD6_bWhAzOY5BHZQKSA3kurt4KGENqC4wLsGmcU,5142
@@ -41,21 +41,21 @@ lm_deluge/llm_tools/locate.py,sha256=lYNbKTmy9dTvj0lEQkOQ7yrxyqsgYzjD0C_byJKI_4w
41
41
  lm_deluge/llm_tools/ocr.py,sha256=7fDlvs6uUOvbxMasvGGNJx5Fj6biM6z3lijKZaGN26k,23
42
42
  lm_deluge/llm_tools/score.py,sha256=9oGA3-k2U5buHQXkXaEI9M4Wb5yysNhTLsPbGeghAlQ,2580
43
43
  lm_deluge/llm_tools/translate.py,sha256=iXyYvQZ8bC44FWhBk4qpdqjKM1WFF7Shq-H2PxhPgg4,1452
44
- lm_deluge/models/__init__.py,sha256=qlpGDoTC89dKOFW3KxLUMiHCg_OzpRYHyrCt0OiSW7c,4525
45
- lm_deluge/models/anthropic.py,sha256=qAsykXPDz0dK8o4h9vP1QtO-3am3VDzhsYBl4YhEsds,6734
46
- lm_deluge/models/bedrock.py,sha256=PIaXvho2agCm1hSSAEy8zHCITjApXT2eUOGDKW425tE,5424
44
+ lm_deluge/models/__init__.py,sha256=a2xzQNG2axdMaSzoLbzdOKBM5EVOLztvlo8E1k-brqM,4516
45
+ lm_deluge/models/anthropic.py,sha256=5j75sB40yZzT1wwKC7Dh0f2Y2cXnp8yxHuXW63PCuns,6285
46
+ lm_deluge/models/bedrock.py,sha256=g1PbfceSRH2lWST3ja0mUlF3oTq4e4T-si6RMe7qXgg,4888
47
47
  lm_deluge/models/cerebras.py,sha256=u2FMXJF6xMr0euDRKLKMo_NVTOcvSrrEpehbHr8sSeE,2050
48
- lm_deluge/models/cohere.py,sha256=M_7cVA9QD4qe1X4sZXCpKEkKrKz2jibaspiTnzsZ1GU,3998
48
+ lm_deluge/models/cohere.py,sha256=iXjYtM6jy_YL73Op8OfNsrMNopwae9y-Sw-4vF9cEBw,3406
49
49
  lm_deluge/models/deepseek.py,sha256=6_jDEprNNYis5I5MDQNloRes9h1P6pMYHXxOd2UZMgg,941
50
50
  lm_deluge/models/fireworks.py,sha256=yvt2Ggzye4aUqCqY74ta67Vu7FrQaLFjdFtN4P7D-dc,638
51
- lm_deluge/models/google.py,sha256=_spZkMBuUkWTHhb_Z7_Nq75l_3QF7aUtlk-Wyh6pWEI,6117
52
- lm_deluge/models/grok.py,sha256=aInkUSclXE47Lm4PKiP3OebAP9V-GOZwK-Eiis4zVow,1199
53
- lm_deluge/models/groq.py,sha256=djBs9N8LpzE0BQSb4KiY6F06B4f8csn-fB_5wfQTpNU,2548
54
- lm_deluge/models/meta.py,sha256=m6HPR82TJONYTTWkQw5EKmITMxoWzrfYOuNgFnGaRX8,2195
51
+ lm_deluge/models/google.py,sha256=Hr2MolQoaeY85pKCGO7k7OH_1nQJdrwMgrJbfz5bI8w,5387
52
+ lm_deluge/models/grok.py,sha256=TDzr8yfTaHbdJhwMA-Du6L-efaKFJhjTQViuVElCCHI,2566
53
+ lm_deluge/models/groq.py,sha256=Mi5WE1xOBGoZlymD0UN6kzhH_NOmfJYU4N2l-TO0Z8Q,2552
54
+ lm_deluge/models/meta.py,sha256=BBgnscL1gMcIdPbRqrlDl_q9YAYGSrkw9JkAIabXtLs,1883
55
55
  lm_deluge/models/mistral.py,sha256=x67o5gckBGmPcIGdVbS26XZAYFKBYM4tsxEAahGp8bk,4323
56
- lm_deluge/models/openai.py,sha256=vp-VcTi21N7M-Lvohx4RFkvqCl-L-UwwWH0A8GwYoX8,11452
57
- lm_deluge/models/openrouter.py,sha256=aAgBT5_TZQtUPQyNn-Bob6NGyrlFOclnxIb0F53pgvA,23
58
- lm_deluge/models/together.py,sha256=RCZoYAb8OVxdH9uwXnv47TDTGzC30P-FZoDbiBE23_g,4957
56
+ lm_deluge/models/openai.py,sha256=HC_oNLmKkmShkcfeUgyhesACtXGg__I2WiIIDrN-X84,10176
57
+ lm_deluge/models/openrouter.py,sha256=O-Po4tmHjAqFIVU96TUL0QnK01R4e2yDN7Z4sYJ-CuE,2120
58
+ lm_deluge/models/together.py,sha256=AjKhPsazqBgqyLwHkNQW07COM1n_oSrYQRp2BFVvn9o,4381
59
59
  lm_deluge/presets/cerebras.py,sha256=MDkqj15qQRrj8wxSCDNNe_Cs7h1WN1UjV6lTmSY1olQ,479
60
60
  lm_deluge/presets/meta.py,sha256=QrreLAVgYS6VIC_NQth1vgGAYuxY38jFQQZSe6ot7C8,364
61
61
  lm_deluge/util/harmony.py,sha256=XBfJck6q-5HbOqMhEjdfy1i17i0QtpHG8ruXV4EsHl0,2731
@@ -64,8 +64,8 @@ lm_deluge/util/logprobs.py,sha256=UkBZakOxWluaLqHrjARu7xnJ0uCHVfLGHJdnYlEcutk,11
64
64
  lm_deluge/util/spatial.py,sha256=BsF_UKhE-x0xBirc-bV1xSKZRTUhsOBdGqsMKme20C8,4099
65
65
  lm_deluge/util/validation.py,sha256=hz5dDb3ebvZrZhnaWxOxbNSVMI6nmaOODBkk0htAUhs,1575
66
66
  lm_deluge/util/xml.py,sha256=Ft4zajoYBJR3HHCt2oHwGfymGLdvp_gegVmJ-Wqk4Ck,10547
67
- lm_deluge-0.0.57.dist-info/licenses/LICENSE,sha256=uNNXGXPCw2TC7CUs7SEBkA-Mz6QBQFWUUEWDMgEs1dU,1058
68
- lm_deluge-0.0.57.dist-info/METADATA,sha256=HqZgYHU7B-Qkab-tAFUMmuYG0hwhIHVGn6inJK7Poe8,13443
69
- lm_deluge-0.0.57.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
70
- lm_deluge-0.0.57.dist-info/top_level.txt,sha256=hqU-TJX93yBwpgkDtYcXyLr3t7TLSCCZ_reytJjwBaE,10
71
- lm_deluge-0.0.57.dist-info/RECORD,,
67
+ lm_deluge-0.0.59.dist-info/licenses/LICENSE,sha256=uNNXGXPCw2TC7CUs7SEBkA-Mz6QBQFWUUEWDMgEs1dU,1058
68
+ lm_deluge-0.0.59.dist-info/METADATA,sha256=WKLfnV3lKGr1gkfEEyDhbp4oGMsu30LRXs0zPLRsdsk,13443
69
+ lm_deluge-0.0.59.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
70
+ lm_deluge-0.0.59.dist-info/top_level.txt,sha256=hqU-TJX93yBwpgkDtYcXyLr3t7TLSCCZ_reytJjwBaE,10
71
+ lm_deluge-0.0.59.dist-info/RECORD,,