lm-deluge 0.0.62__py3-none-any.whl → 0.0.63__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of lm-deluge might be problematic. Click here for more details.

lm_deluge/client.py CHANGED
@@ -70,7 +70,7 @@ class _LLMClient(BaseModel):
70
70
  top_p: float = 1.0
71
71
  json_mode: bool = False
72
72
  max_new_tokens: int = 512
73
- reasoning_effort: Literal["low", "medium", "high", None] = None
73
+ reasoning_effort: Literal["low", "medium", "high", "minimal", "none", None] = None
74
74
  logprobs: bool = False
75
75
  top_logprobs: int | None = None
76
76
  force_local_mcp: bool = False
@@ -957,7 +957,7 @@ def LLMClient(
957
957
  top_p: float = 1.0,
958
958
  json_mode: bool = False,
959
959
  max_new_tokens: int = 512,
960
- reasoning_effort: Literal["low", "medium", "high", None] = None,
960
+ reasoning_effort: Literal["low", "medium", "high", "minimal", "none", None] = None,
961
961
  logprobs: bool = False,
962
962
  top_logprobs: int | None = None,
963
963
  force_local_mcp: bool = False,
@@ -986,7 +986,7 @@ def LLMClient(
986
986
  top_p: float = 1.0,
987
987
  json_mode: bool = False,
988
988
  max_new_tokens: int = 512,
989
- reasoning_effort: Literal["low", "medium", "high", None] = None,
989
+ reasoning_effort: Literal["low", "medium", "high", "minimal", "none", None] = None,
990
990
  logprobs: bool = False,
991
991
  top_logprobs: int | None = None,
992
992
  force_local_mcp: bool = False,
@@ -1014,7 +1014,7 @@ def LLMClient(
1014
1014
  top_p: float = 1.0,
1015
1015
  json_mode: bool = False,
1016
1016
  max_new_tokens: int = 512,
1017
- reasoning_effort: Literal["low", "medium", "high", None] = None,
1017
+ reasoning_effort: Literal["low", "medium", "high", "minimal", "none", None] = None,
1018
1018
  logprobs: bool = False,
1019
1019
  top_logprobs: int | None = None,
1020
1020
  force_local_mcp: bool = False,
lm_deluge/config.py CHANGED
@@ -8,7 +8,7 @@ class SamplingParams(BaseModel):
8
8
  top_p: float = 1.0
9
9
  json_mode: bool = False
10
10
  max_new_tokens: int = 512
11
- reasoning_effort: Literal["low", "medium", "high", "none", None] = None
11
+ reasoning_effort: Literal["low", "medium", "high", "minimal", "none", None] = None
12
12
  logprobs: bool = False
13
13
  top_logprobs: int | None = None
14
14
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.62
3
+ Version: 0.0.63
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
@@ -2,8 +2,8 @@ lm_deluge/__init__.py,sha256=LKKIcqQoQyDpTck6fnB7iAs75BnfNNa3Bj5Nz7KU4Hk,376
2
2
  lm_deluge/batches.py,sha256=Km6QM5_7BlF2qEyo4WPlhkaZkpzrLqf50AaveHXQOoY,25127
3
3
  lm_deluge/cache.py,sha256=xO2AIYvP3tUpTMKQjwQQYfGRJSRi6e7sMlRhLjsS-u4,4873
4
4
  lm_deluge/cli.py,sha256=Ilww5gOw3J5v0NReq_Ra4hhxU4BCIJBl1oTGxJZKedc,12065
5
- lm_deluge/client.py,sha256=TKRN1KAMOgtQFLazh_iyj185GBHtP7r8KAU4lod-qfs,40693
6
- lm_deluge/config.py,sha256=H1tQyJDNHGFuwxqQNL5Z-CjWAC0luHSBA3iY_pxmACM,932
5
+ lm_deluge/client.py,sha256=PSwcbT1nAzyuxMiDXPGNZZzaJd_YJM1qXDTBYvXYDSc,40769
6
+ lm_deluge/config.py,sha256=s3wFBRD6pi0wtXMJRmQDT2vdiqSvhjUPmLehbkv41i0,943
7
7
  lm_deluge/embed.py,sha256=CO-TOlC5kOTAM8lcnicoG4u4K664vCBwHF1vHa-nAGg,13382
8
8
  lm_deluge/errors.py,sha256=oHjt7YnxWbh-eXMScIzov4NvpJMo0-2r5J6Wh5DQ1tk,209
9
9
  lm_deluge/file.py,sha256=PTmlJQ-IaYcYUFun9V0bJ1NPVP84edJrR0hvCMWFylY,19697
@@ -65,8 +65,8 @@ lm_deluge/util/logprobs.py,sha256=UkBZakOxWluaLqHrjARu7xnJ0uCHVfLGHJdnYlEcutk,11
65
65
  lm_deluge/util/spatial.py,sha256=BsF_UKhE-x0xBirc-bV1xSKZRTUhsOBdGqsMKme20C8,4099
66
66
  lm_deluge/util/validation.py,sha256=hz5dDb3ebvZrZhnaWxOxbNSVMI6nmaOODBkk0htAUhs,1575
67
67
  lm_deluge/util/xml.py,sha256=Ft4zajoYBJR3HHCt2oHwGfymGLdvp_gegVmJ-Wqk4Ck,10547
68
- lm_deluge-0.0.62.dist-info/licenses/LICENSE,sha256=uNNXGXPCw2TC7CUs7SEBkA-Mz6QBQFWUUEWDMgEs1dU,1058
69
- lm_deluge-0.0.62.dist-info/METADATA,sha256=OxyZBmmED2qbq0Aizaj6Hatrzg57qs0amHHTManmBrI,13443
70
- lm_deluge-0.0.62.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
71
- lm_deluge-0.0.62.dist-info/top_level.txt,sha256=hqU-TJX93yBwpgkDtYcXyLr3t7TLSCCZ_reytJjwBaE,10
72
- lm_deluge-0.0.62.dist-info/RECORD,,
68
+ lm_deluge-0.0.63.dist-info/licenses/LICENSE,sha256=uNNXGXPCw2TC7CUs7SEBkA-Mz6QBQFWUUEWDMgEs1dU,1058
69
+ lm_deluge-0.0.63.dist-info/METADATA,sha256=I-2GRdzCDkdMd_-e0XLer0vuVObXrRzL32o6Lhc2XDM,13443
70
+ lm_deluge-0.0.63.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
71
+ lm_deluge-0.0.63.dist-info/top_level.txt,sha256=hqU-TJX93yBwpgkDtYcXyLr3t7TLSCCZ_reytJjwBaE,10
72
+ lm_deluge-0.0.63.dist-info/RECORD,,