lm-deluge 0.0.35__py3-none-any.whl → 0.0.36__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lm-deluge might be problematic. Click here for more details.

@@ -0,0 +1,112 @@
1
+ # ███████████ █████ █████
2
+ # ░█░░░███░░░█ ░░███ ░░███
3
+ # ░ ░███ ░ ██████ ███████ ██████ ███████ ░███████ ██████ ████████
4
+ # ░███ ███░░███ ███░░███ ███░░███░░░███░ ░███░░███ ███░░███░░███░░███
5
+ # ░███ ░███ ░███░███ ░███░███████ ░███ ░███ ░███ ░███████ ░███ ░░░
6
+ # ░███ ░███ ░███░███ ░███░███░░░ ░███ ███ ░███ ░███ ░███░░░ ░███
7
+ # █████ ░░██████ ░░███████░░██████ ░░█████ ████ █████░░██████ █████
8
+ # ░░░░░ ░░░░░░ ░░░░░███ ░░░░░░ ░░░░░ ░░░░ ░░░░░ ░░░░░░ ░░░░░
9
+ # ███ ░███
10
+ # ░░██████
11
+ # ░░░░░░
12
+ # tbh only reason to use these are that they're cheap, but all worse than haiku
13
+ TOGETHER_MODELS = {
14
+ "deepseek-r1-together": {
15
+ "id": "deepseek-r1-together",
16
+ "name": "deepseek-ai/DeepSeek-R1",
17
+ "api_base": "https://api.together.xyz/v1",
18
+ "api_key_env_var": "TOGETHER_API_KEY",
19
+ "supports_json": False,
20
+ "api_spec": "openai",
21
+ "input_cost": 3.0,
22
+ "output_cost": 7.0,
23
+ "requests_per_minute": None,
24
+ "tokens_per_minute": None,
25
+ },
26
+ "deepseek-v3-together": {
27
+ "id": "deepseek-v3-together",
28
+ "name": "deepseek-ai/DeepSeek-V3",
29
+ "api_base": "https://api.together.xyz/v1",
30
+ "api_key_env_var": "TOGETHER_API_KEY",
31
+ "supports_json": False,
32
+ "api_spec": "openai",
33
+ "input_cost": 1.25,
34
+ "output_cost": 1.25,
35
+ "requests_per_minute": None,
36
+ "tokens_per_minute": None,
37
+ },
38
+ "qwen-3-235b-together": {
39
+ "id": "qwen-3-235b-together",
40
+ "name": "Qwen/Qwen3-235B-A22B-fp8",
41
+ "api_base": "https://api.together.xyz/v1",
42
+ "api_key_env_var": "TOGETHER_API_KEY",
43
+ "supports_json": False,
44
+ "api_spec": "openai",
45
+ "input_cost": 0.2,
46
+ "output_cost": 0.6,
47
+ "requests_per_minute": None,
48
+ "tokens_per_minute": None,
49
+ },
50
+ "qwen-2.5-vl-together": {
51
+ "id": "qwen-2.5-vl-together",
52
+ "name": "Qwen/Qwen2.5-VL-72B-Instruct",
53
+ "api_base": "https://api.together.xyz/v1",
54
+ "api_key_env_var": "TOGETHER_API_KEY",
55
+ "supports_json": False,
56
+ "api_spec": "openai",
57
+ "input_cost": 1.95,
58
+ "output_cost": 8.0,
59
+ "requests_per_minute": None,
60
+ "tokens_per_minute": None,
61
+ },
62
+ "llama-4-maverick-together": {
63
+ "id": "llama-4-maverick-together",
64
+ "name": "meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",
65
+ "api_base": "https://api.together.xyz/v1",
66
+ "api_key_env_var": "TOGETHER_API_KEY",
67
+ "supports_json": False,
68
+ "api_spec": "openai",
69
+ "input_cost": 0.27,
70
+ "output_cost": 0.85,
71
+ "requests_per_minute": None,
72
+ "tokens_per_minute": None,
73
+ },
74
+ "llama-4-scout-together": {
75
+ "id": "llama-4-scout-together",
76
+ "name": "meta-llama/Llama-4-Scout-17B-16E-Instruct",
77
+ "api_base": "https://api.together.xyz/v1",
78
+ "api_key_env_var": "TOGETHER_API_KEY",
79
+ "supports_json": False,
80
+ "api_spec": "openai",
81
+ "input_cost": 0.18,
82
+ "output_cost": 0.59,
83
+ "requests_per_minute": None,
84
+ "tokens_per_minute": None,
85
+ },
86
+ "gpt-oss-120b-together": {
87
+ "id": "gpt-oss-120b-together",
88
+ "name": "openai/gpt-oss-120b",
89
+ "api_base": "https://api.together.xyz/v1",
90
+ "api_key_env_var": "TOGETHER_API_KEY",
91
+ "supports_json": False,
92
+ "api_spec": "openai",
93
+ "input_cost": 0.18,
94
+ "output_cost": 0.59,
95
+ "requests_per_minute": None,
96
+ "tokens_per_minute": None,
97
+ "reasoning_model": True,
98
+ },
99
+ "gpt-oss-20b-together": {
100
+ "id": "gpt-oss-20b-together",
101
+ "name": "openai/gpt-oss-20b",
102
+ "api_base": "https://api.together.xyz/v1",
103
+ "api_key_env_var": "TOGETHER_API_KEY",
104
+ "supports_json": False,
105
+ "api_spec": "openai",
106
+ "input_cost": 0.18,
107
+ "output_cost": 0.59,
108
+ "requests_per_minute": None,
109
+ "tokens_per_minute": None,
110
+ "reasoning_model": True,
111
+ },
112
+ }
lm_deluge/prompt.py CHANGED
@@ -654,11 +654,11 @@ class Conversation:
654
654
  pass
655
655
 
656
656
  # fluent additions
657
- def add(self, msg: Message) -> "Conversation":
657
+ def with_message(self, msg: Message) -> "Conversation":
658
658
  self.messages.append(msg)
659
659
  return self
660
660
 
661
- def add_tool_result(
661
+ def with_tool_result(
662
662
  self, tool_call_id: str, result: str | list[ToolResultPart]
663
663
  ) -> "Conversation":
664
664
  """Add a tool result to the conversation.
lm_deluge/util/harmony.py CHANGED
@@ -9,9 +9,10 @@ import copy
9
9
  from lm_deluge.api_requests.response import APIResponse
10
10
  from lm_deluge.prompt import Text, Thinking
11
11
 
12
- SAMPLE_INPUT = '''
12
+ SAMPLE_INPUT = """
13
13
  <|channel|>analysis<|message|>We need to respond as a helpful assistant. The user says "who are you and what do you want with my family?" This is a normal question. We should answer that we are ChatGPT, an AI language model, and we don't want anything with their family. We reassure them.<|start|>assistant<|channel|>final<|message|>I’m ChatGPT, a large language‑model AI created by OpenAI. I don’t have personal intentions or desires, and I’m not able to interact with anyone outside of this chat. My only goal here is to provide information, answer questions, and help you with whatever you need—nothing more, nothing less. If you have any concerns or need help with something specific, just let me know!
14
- '''.strip()
14
+ """.strip()
15
+
15
16
 
16
17
  def _split_messages(response: str):
17
18
  raw_messages = response.split("<|start|>")
@@ -23,13 +24,14 @@ def _split_messages(response: str):
23
24
 
24
25
  return messages
25
26
 
27
+
26
28
  def postprocess_harmony(response: APIResponse) -> APIResponse:
27
29
  if not response.content:
28
30
  return response
29
31
 
30
32
  parts = response.content.parts
31
33
  assert len(parts) == 1, "expected 1 parts to convert harmony"
32
- text = parts[0].text # type: ignore
34
+ text = parts[0].text # type: ignore
33
35
  messages = _split_messages(text)
34
36
 
35
37
  new_parts = []
@@ -40,6 +42,6 @@ def postprocess_harmony(response: APIResponse) -> APIResponse:
40
42
  new_parts.append(Text(text=content))
41
43
 
42
44
  new_response = copy.deepcopy(response)
43
- new_response.content.parts = new_parts # type: ignore
45
+ new_response.content.parts = new_parts # type: ignore
44
46
 
45
47
  return new_response
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.35
3
+ Version: 0.0.36
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
@@ -2,28 +2,29 @@ lm_deluge/__init__.py,sha256=mAztMuxINmh7dGbYnT8tsmw1eryQAvd0jpY8yHzd0EE,315
2
2
  lm_deluge/agent.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  lm_deluge/batches.py,sha256=vJXVnuuGkIQnXoDPODPERrvdG9X1Ov1jnXExnPe6ZAc,21772
4
4
  lm_deluge/cache.py,sha256=VB1kv8rM2t5XWPR60uhszFcxLDnVKOe1oA5hYjVDjIo,4375
5
- lm_deluge/client.py,sha256=K8uMnI_XqR3UKOz9MWR59aqbyD3EfJENfI88nQyOtKA,32569
5
+ lm_deluge/cli.py,sha256=Ilww5gOw3J5v0NReq_Ra4hhxU4BCIJBl1oTGxJZKedc,12065
6
+ lm_deluge/client.py,sha256=YKZB8oJx58n8Q5kLV6hT1HeYgxvZGro5RQVH9idqJMU,32576
6
7
  lm_deluge/config.py,sha256=H1tQyJDNHGFuwxqQNL5Z-CjWAC0luHSBA3iY_pxmACM,932
7
8
  lm_deluge/embed.py,sha256=CO-TOlC5kOTAM8lcnicoG4u4K664vCBwHF1vHa-nAGg,13382
8
9
  lm_deluge/errors.py,sha256=oHjt7YnxWbh-eXMScIzov4NvpJMo0-2r5J6Wh5DQ1tk,209
9
10
  lm_deluge/file.py,sha256=FGomcG8s2go_55Z2CChflHgmU-UqgFftgFY8c7f_G70,5631
10
11
  lm_deluge/gemini_limits.py,sha256=V9mpS9JtXYz7AY6OuKyQp5TuIMRH1BVv9YrSNmGmHNA,1569
11
12
  lm_deluge/image.py,sha256=Qpa0k5yXfrpSaHzVUwW_TEn7yEgmwzYGL17Sa7-KhSA,7729
12
- lm_deluge/prompt.py,sha256=cfwzCAmT-1K0v7SfEMUrxpBkJGgf7IFlWfNLJrCcoBM,37025
13
+ lm_deluge/prompt.py,sha256=gRGu_9wWWMusM7sf-YCdotcZUt1Cj_h_1_6oyS7XTYM,37035
13
14
  lm_deluge/request_context.py,sha256=o33LSEwnK6YPhZeulUoSE_VrdKCXiCQa0tjjixK2K6M,2540
14
15
  lm_deluge/rerank.py,sha256=-NBAJdHz9OB-SWWJnHzkFmeVO4wR6lFV7Vw-SxG7aVo,11457
15
16
  lm_deluge/tool.py,sha256=_coOKB9nPNVZoseMRumRyQ8BMR7_d0IlstzMHNT69JY,15732
16
17
  lm_deluge/tracker.py,sha256=rTOjPEwaNczNz9MKDGayPNdmDZOpIWvLll7uz0CloVU,11533
17
18
  lm_deluge/usage.py,sha256=VMEKghePFIID5JFBObqYxFpgYxnbYm_dnHy7V1-_T6M,4866
18
19
  lm_deluge/api_requests/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
19
- lm_deluge/api_requests/anthropic.py,sha256=qUlom5eM78TI_CyH_SR87D1MCWl8Gbr-oLVucoVsw7I,8159
20
+ lm_deluge/api_requests/anthropic.py,sha256=J5BzYV7aYNoL6FPArB6usyS267z1BguZTRY5JLMd0So,8159
20
21
  lm_deluge/api_requests/base.py,sha256=EVHNFtlttKbN7Tt1MnLaO-NjvKHPSV5CqlRv-OnpVAE,5593
21
22
  lm_deluge/api_requests/bedrock.py,sha256=FZMhF590JzJtAYDugbDtG93RhPt5efWZ0Wn4V8U8Dgw,11031
22
23
  lm_deluge/api_requests/common.py,sha256=BZ3vRO5TB669_UsNKugkkuFSzoLHOYJIKt4nV4sf4vc,422
23
- lm_deluge/api_requests/gemini.py,sha256=pZI_Vm6AY5x1JGPj5kftggC3q90EzuwZ-It3CTe-PEc,7819
24
+ lm_deluge/api_requests/gemini.py,sha256=COHqPWmeaq9fpg0YwOZqQTUbijKnXNF4cvMLnW9kLl8,7857
24
25
  lm_deluge/api_requests/mistral.py,sha256=S_LpOfCGbCVEROH_od3P-tYeNYTKFMamMTL-c_wFCBI,4597
25
- lm_deluge/api_requests/openai.py,sha256=3-TyTVwjrA8HoPpXiMFRn2_yqdSR_MSTUZ1XL53BVRA,22316
26
- lm_deluge/api_requests/response.py,sha256=FtkVYk_rDH93Kj9pqbB-l7a4dQHzVr6ivKL9khYKLbs,5966
26
+ lm_deluge/api_requests/openai.py,sha256=FL_UCELdkaf_GZIBPViLdNcUwPMwqvEKj9mMcH72Nmc,22346
27
+ lm_deluge/api_requests/response.py,sha256=Zc9kxBqB4JJIFR6OhXW-BS3ulK5JygE75JNBEpKgn5Q,5989
27
28
  lm_deluge/api_requests/deprecated/bedrock.py,sha256=WrcIShCoO8JCUSlFOCHxg6KQCNTZfw3TpYTvSpYk4mA,11320
28
29
  lm_deluge/api_requests/deprecated/cohere.py,sha256=KgDScD6_bWhAzOY5BHZQKSA3kurt4KGENqC4wLsGmcU,5142
29
30
  lm_deluge/api_requests/deprecated/deepseek.py,sha256=FEApI93VAWDwuaqTooIyKMgONYqRhdUmiAPBRme-IYs,4582
@@ -42,15 +43,29 @@ lm_deluge/llm_tools/locate.py,sha256=lYNbKTmy9dTvj0lEQkOQ7yrxyqsgYzjD0C_byJKI_4w
42
43
  lm_deluge/llm_tools/ocr.py,sha256=7fDlvs6uUOvbxMasvGGNJx5Fj6biM6z3lijKZaGN26k,23
43
44
  lm_deluge/llm_tools/score.py,sha256=9oGA3-k2U5buHQXkXaEI9M4Wb5yysNhTLsPbGeghAlQ,2580
44
45
  lm_deluge/llm_tools/translate.py,sha256=iXyYvQZ8bC44FWhBk4qpdqjKM1WFF7Shq-H2PxhPgg4,1452
45
- lm_deluge/models/__init__.py,sha256=1mnLb8t3_Dsg9UqIHenBlg9d3wkxBDluiKNjwHn23oQ,54503
46
- lm_deluge/util/harmony.py,sha256=Y_mHyJnswE54jZgjoJOclONh6P18joWOpb30F3_GyFU,2727
46
+ lm_deluge/models/__init__.py,sha256=cwVidzBhMS6B9zOemDf0rAUHo8rHVum8XRE7z5Gi4F4,4278
47
+ lm_deluge/models/anthropic.py,sha256=3pW7fyBY9Xh1m1RtfncU9amWTtKnjGZD0STjpu8iUSQ,5700
48
+ lm_deluge/models/bedrock.py,sha256=jpb_n-Wh3G3VAKZn7U1t5r5IQ2oTDXwrjGIP013l2cI,4534
49
+ lm_deluge/models/cerebras.py,sha256=5Mp1rLWKRLXKpfk9Ef-ydmcp8ffQlNXbp3Zg8sh1pEs,2017
50
+ lm_deluge/models/cohere.py,sha256=M_7cVA9QD4qe1X4sZXCpKEkKrKz2jibaspiTnzsZ1GU,3998
51
+ lm_deluge/models/deepseek.py,sha256=6_jDEprNNYis5I5MDQNloRes9h1P6pMYHXxOd2UZMgg,941
52
+ lm_deluge/models/fireworks.py,sha256=4D0LUgl1QBISGGA9qC62858glju2RRO0uP6X8QRAa4Y,572
53
+ lm_deluge/models/google.py,sha256=PWKVf6HLt9m_HSTvVavZ8BlriQBEtS47ir3jBvetkaQ,5823
54
+ lm_deluge/models/grok.py,sha256=aInkUSclXE47Lm4PKiP3OebAP9V-GOZwK-Eiis4zVow,1199
55
+ lm_deluge/models/groq.py,sha256=BHuBNUpcjsTpwXbnKVfmZf7oef81U48IymR_isMCzvo,2482
56
+ lm_deluge/models/meta.py,sha256=m6HPR82TJONYTTWkQw5EKmITMxoWzrfYOuNgFnGaRX8,2195
57
+ lm_deluge/models/mistral.py,sha256=x67o5gckBGmPcIGdVbS26XZAYFKBYM4tsxEAahGp8bk,4323
58
+ lm_deluge/models/openai.py,sha256=q3IqHldFJjRz-jxT2NoQW9t1_c_BGLd72d1HZlxXiLA,11100
59
+ lm_deluge/models/openrouter.py,sha256=aAgBT5_TZQtUPQyNn-Bob6NGyrlFOclnxIb0F53pgvA,23
60
+ lm_deluge/models/together.py,sha256=RCZoYAb8OVxdH9uwXnv47TDTGzC30P-FZoDbiBE23_g,4957
61
+ lm_deluge/util/harmony.py,sha256=XBfJck6q-5HbOqMhEjdfy1i17i0QtpHG8ruXV4EsHl0,2731
47
62
  lm_deluge/util/json.py,sha256=_4Oar2Cmz2L1DK3EtPLPDxD6rsYHxjROmV8ZpmMjQ-4,5822
48
63
  lm_deluge/util/logprobs.py,sha256=UkBZakOxWluaLqHrjARu7xnJ0uCHVfLGHJdnYlEcutk,11768
49
64
  lm_deluge/util/spatial.py,sha256=BsF_UKhE-x0xBirc-bV1xSKZRTUhsOBdGqsMKme20C8,4099
50
65
  lm_deluge/util/validation.py,sha256=hz5dDb3ebvZrZhnaWxOxbNSVMI6nmaOODBkk0htAUhs,1575
51
66
  lm_deluge/util/xml.py,sha256=Ft4zajoYBJR3HHCt2oHwGfymGLdvp_gegVmJ-Wqk4Ck,10547
52
- lm_deluge-0.0.35.dist-info/licenses/LICENSE,sha256=uNNXGXPCw2TC7CUs7SEBkA-Mz6QBQFWUUEWDMgEs1dU,1058
53
- lm_deluge-0.0.35.dist-info/METADATA,sha256=QwWZx4ZICOZ2qmSXqSctYRwgzIpDnIY4yN90dA09vi4,13295
54
- lm_deluge-0.0.35.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
55
- lm_deluge-0.0.35.dist-info/top_level.txt,sha256=hqU-TJX93yBwpgkDtYcXyLr3t7TLSCCZ_reytJjwBaE,10
56
- lm_deluge-0.0.35.dist-info/RECORD,,
67
+ lm_deluge-0.0.36.dist-info/licenses/LICENSE,sha256=uNNXGXPCw2TC7CUs7SEBkA-Mz6QBQFWUUEWDMgEs1dU,1058
68
+ lm_deluge-0.0.36.dist-info/METADATA,sha256=VB8l79DLczVf9_yr9WZRnxdvw5qq55grpBrhBkz-NUs,13295
69
+ lm_deluge-0.0.36.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
70
+ lm_deluge-0.0.36.dist-info/top_level.txt,sha256=hqU-TJX93yBwpgkDtYcXyLr3t7TLSCCZ_reytJjwBaE,10
71
+ lm_deluge-0.0.36.dist-info/RECORD,,