lm-deluge 0.0.85__py3-none-any.whl → 0.0.87__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,165 @@
1
+ """
2
+ Utility functions for GEPA.
3
+
4
+ Includes conversation formatting and text extraction helpers.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import re
10
+ from typing import Any
11
+
12
+ from lm_deluge.prompt import Conversation
13
+
14
+
15
+ def format_conversation_compact(conversation: Conversation) -> str:
16
+ """
17
+ Format a Conversation for showing to the proposer LLM.
18
+
19
+ Goals:
20
+ - Show full user and assistant message content
21
+ - Show tool calls with their arguments
22
+ - Abbreviate tool results (just show placeholder, not full content)
23
+ - No decorative separators, keep it compact
24
+
25
+ Args:
26
+ conversation: The conversation to format
27
+
28
+ Returns:
29
+ A string representation suitable for including in a prompt
30
+ """
31
+ lines: list[str] = []
32
+
33
+ # Check for system message (first message with role="system")
34
+ for msg in conversation.messages:
35
+ if msg.role == "system":
36
+ lines.append(f"[system]\n{msg.completion}")
37
+ lines.append("")
38
+ break
39
+
40
+ for msg in conversation.messages:
41
+ role = msg.role
42
+
43
+ if role == "system":
44
+ # Already handled above
45
+ continue
46
+
47
+ if role == "user":
48
+ text_content = msg.completion or ""
49
+ lines.append(f"[user]\n{text_content}")
50
+
51
+ elif role == "assistant":
52
+ # Handle text content
53
+ text_content = msg.completion or ""
54
+ if text_content:
55
+ lines.append(f"[assistant]\n{text_content}")
56
+
57
+ # Handle tool calls
58
+ if msg.tool_calls:
59
+ for tc in msg.tool_calls:
60
+ tool_name = tc.name
61
+ # Format arguments compactly
62
+ args_str = _format_tool_args(tc.arguments)
63
+ lines.append(f"[tool_call: {tool_name}]\n{args_str}")
64
+
65
+ elif role == "tool":
66
+ # Just show placeholder for tool results - content can be huge
67
+ # Try to get tool names from tool_results
68
+ if msg.tool_results:
69
+ for tr in msg.tool_results:
70
+ tool_id = getattr(tr, "tool_call_id", "unknown")
71
+ lines.append(f"[tool_result: {tool_id}] (content omitted)")
72
+ else:
73
+ lines.append("[tool_result] (content omitted)")
74
+
75
+ lines.append("")
76
+
77
+ return "\n".join(lines).strip()
78
+
79
+
80
+ def _format_tool_args(arguments: dict[str, Any] | str | None) -> str:
81
+ """Format tool call arguments compactly."""
82
+ if arguments is None:
83
+ return "(no arguments)"
84
+
85
+ if isinstance(arguments, str):
86
+ # Already a string (might be JSON string)
87
+ return arguments[:500] + "..." if len(arguments) > 500 else arguments
88
+
89
+ if isinstance(arguments, dict):
90
+ # Format as key=value pairs
91
+ parts = []
92
+ for key, value in arguments.items():
93
+ value_str = str(value)
94
+ # Truncate long values
95
+ if len(value_str) > 200:
96
+ value_str = value_str[:200] + "..."
97
+ parts.append(f" {key}: {value_str}")
98
+ return "\n".join(parts) if parts else "(no arguments)"
99
+
100
+ return str(arguments)
101
+
102
+
103
+ def extract_text_from_response(response: str) -> str:
104
+ """
105
+ Extract text from between ``` blocks in LLM response.
106
+
107
+ Handles various formats:
108
+ - ```text``` or ```language\ntext```
109
+ - Incomplete blocks
110
+ - No blocks (returns trimmed response)
111
+ """
112
+ # Find content between first and last ```
113
+ start = response.find("```")
114
+ if start == -1:
115
+ return response.strip()
116
+
117
+ start += 3
118
+ end = response.rfind("```")
119
+
120
+ if end <= start:
121
+ # Handle incomplete blocks
122
+ stripped = response.strip()
123
+ if stripped.startswith("```"):
124
+ match = re.match(r"^```\S*\n?", response)
125
+ if match:
126
+ return response[match.end() :].strip()
127
+ elif stripped.endswith("```"):
128
+ return stripped[:-3].strip()
129
+ return stripped
130
+
131
+ # Skip language specifier (e.g., ```python\n)
132
+ content = response[start:end]
133
+ match = re.match(r"^\S*\n", content)
134
+ if match:
135
+ content = content[match.end() :]
136
+
137
+ return content.strip()
138
+
139
+
140
+ def format_components_for_prompt(
141
+ component_values: dict[str, str],
142
+ component_descriptions: dict[str, str],
143
+ ) -> str:
144
+ """
145
+ Format components for showing to the proposer.
146
+
147
+ Args:
148
+ component_values: Current text value for each component
149
+ component_descriptions: Description of what each component does
150
+
151
+ Returns:
152
+ Formatted string listing all components
153
+ """
154
+ lines = []
155
+ for name, value in component_values.items():
156
+ description = component_descriptions.get(name, "")
157
+ lines.append(f"### {name}")
158
+ if description:
159
+ lines.append(f"*{description}*")
160
+ lines.append("```")
161
+ lines.append(value)
162
+ lines.append("```")
163
+ lines.append("")
164
+
165
+ return "\n".join(lines)
@@ -38,8 +38,8 @@ class AbstractWebSearchManager(abc.ABC):
38
38
  return self._tools
39
39
 
40
40
  self._tools = [
41
- Tool.from_function(self._search),
42
- Tool.from_function(self._fetch),
41
+ Tool.from_function(self._search, name=self.search_tool_name),
42
+ Tool.from_function(self._fetch, name=self.fetch_tool_name),
43
43
  ]
44
44
 
45
45
  return self._tools
lm_deluge/warnings.py CHANGED
@@ -9,6 +9,7 @@ WARNINGS: dict[str, str] = {
9
9
  "WARN_LOGPROBS_UNSUPPORTED": "Ignoring logprobs param for non-logprobs model: {model_name}",
10
10
  "WARN_MINIMAL_TO_LOW": "'minimal' reasoning effort only allowed for gpt-5 models. Setting to 'low' for {model_name}.",
11
11
  "WARN_MINIMAL_TO_NONE": "GPT-5.1 models don't support 'minimal' reasoning effort. Converting to 'none' for {model_name}.",
12
+ "WARN_XHIGH_TO_HIGH": "'xhigh' reasoning effort only supported for gpt-5.2 and gpt-5.1-codex-max. Using 'high' for {model_name}.",
12
13
  "WARN_MEDIA_RESOLUTION_UNSUPPORTED": "media_resolution parameter is only supported for Gemini 3 models, ignoring for {model_name}.",
13
14
  "WARN_GEMINI3_MISSING_SIGNATURE": "Gemini 3 thought signature missing in {part_type}, injecting dummy signature 'context_engineering_is_the_way_to_go' to avoid API error.",
14
15
  "WARN_GEMINI3_NO_REASONING": "Gemini 3 requires reasoning (thinkingConfig). Setting thinkingConfig to low.",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: lm_deluge
3
- Version: 0.0.85
3
+ Version: 0.0.87
4
4
  Summary: Python utility for using LLM API models.
5
5
  Author-email: Benjamin Anderson <ben@trytaylor.ai>
6
6
  Requires-Python: >=3.10
@@ -2,8 +2,8 @@ lm_deluge/__init__.py,sha256=ye2mm-8r9bveEAMWyV13F6APAu2cNzHROU0LOULyPfY,792
2
2
  lm_deluge/batches.py,sha256=Km6QM5_7BlF2qEyo4WPlhkaZkpzrLqf50AaveHXQOoY,25127
3
3
  lm_deluge/cache.py,sha256=xO2AIYvP3tUpTMKQjwQQYfGRJSRi6e7sMlRhLjsS-u4,4873
4
4
  lm_deluge/cli.py,sha256=Ilww5gOw3J5v0NReq_Ra4hhxU4BCIJBl1oTGxJZKedc,12065
5
- lm_deluge/client.py,sha256=HZ_frrki94g65kMuy1RjRK_oBouCXoYvXLadTPXsQ-U,49216
6
- lm_deluge/config.py,sha256=C-_rVwAFL5sivLfKSkaa2ANMqqxKbyDCW86KfQB_Lck,1357
5
+ lm_deluge/client.py,sha256=gsmb3LlsiffAfohtHzMzY-5JvYdCFe6zUzUZaOJteMo,49440
6
+ lm_deluge/config.py,sha256=Fh7hL0A7HS3zIXd7pkv2Gewkjf1h31QZmCscL1q1yRc,1380
7
7
  lm_deluge/embed.py,sha256=CO-TOlC5kOTAM8lcnicoG4u4K664vCBwHF1vHa-nAGg,13382
8
8
  lm_deluge/errors.py,sha256=oHjt7YnxWbh-eXMScIzov4NvpJMo0-2r5J6Wh5DQ1tk,209
9
9
  lm_deluge/file.py,sha256=PTmlJQ-IaYcYUFun9V0bJ1NPVP84edJrR0hvCMWFylY,19697
@@ -14,23 +14,23 @@ lm_deluge/request_context.py,sha256=CX15dT4Jxz77C-w5EKNyJCfYEa69wNKHbfNi47iG8W4,
14
14
  lm_deluge/rerank.py,sha256=-NBAJdHz9OB-SWWJnHzkFmeVO4wR6lFV7Vw-SxG7aVo,11457
15
15
  lm_deluge/tracker.py,sha256=B53KIsrK10L9L73cYbVB2pNSC0-FdvJGpIfw735CvaA,14808
16
16
  lm_deluge/usage.py,sha256=xz9tAw2hqaJvv9aAVhnQ6N1Arn7fS8Shb28VwCW26wI,5136
17
- lm_deluge/warnings.py,sha256=3_lWpR20b5WEfchqvbWVIc-vo8afU0Jg6S9FkbA5pZQ,2479
17
+ lm_deluge/warnings.py,sha256=mLAZRbsRmT7XjgMRvztEWdtCGsgiAamTdHZ-toi8hUY,2614
18
18
  lm_deluge/api_requests/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
19
- lm_deluge/api_requests/anthropic.py,sha256=b9mr8oXJIX8AOYAOlOc0gTtmf2Ob_eVjWnuq4YvazUQ,11801
19
+ lm_deluge/api_requests/anthropic.py,sha256=U6sFSKT6Op6roIPGinNxlBHz7HFfRUELmv-vsS_1hiE,11975
20
20
  lm_deluge/api_requests/base.py,sha256=05j5nrZhgNon2YRFXT_L-yVXKlvdodwOJan6Z6WpSp8,10911
21
21
  lm_deluge/api_requests/bedrock.py,sha256=mY1xTvgfCLyqLlfFFmu_baKgkVq1Df1_MJXeN_G1jWQ,15597
22
22
  lm_deluge/api_requests/chat_reasoning.py,sha256=sJvstvKFqsSBUjYcwxzGt2_FH4cEp3Z6gKcBPyPjGwk,236
23
23
  lm_deluge/api_requests/common.py,sha256=BZ3vRO5TB669_UsNKugkkuFSzoLHOYJIKt4nV4sf4vc,422
24
- lm_deluge/api_requests/gemini.py,sha256=w42YdpBDcgPOCUwZT2voJbKLFV5GTT5skCX93RyYPLc,12388
24
+ lm_deluge/api_requests/gemini.py,sha256=_L1V1O7N70rvnp28kXfCaWXluQXEBfMbvSk3S7SrZcI,12730
25
25
  lm_deluge/api_requests/mistral.py,sha256=8JZP2CDf1XZfaPcTk0WS4q-VfYYj58ptpoH8LD3MQG4,4528
26
- lm_deluge/api_requests/openai.py,sha256=KjPu5z5rkinN0DHQs-_GczJZzHimLd7vADHKPYMeUzI,28891
26
+ lm_deluge/api_requests/openai.py,sha256=cEoMpMPKrVTz2Zjm5pdD8sQpOHDB9O8ndwf9TGqLPcA,29889
27
27
  lm_deluge/api_requests/response.py,sha256=vG194gAH5p7ulpNy4qy5Pryfb1p3ZV21-YGoj__ru3E,7436
28
28
  lm_deluge/api_requests/deprecated/bedrock.py,sha256=WrcIShCoO8JCUSlFOCHxg6KQCNTZfw3TpYTvSpYk4mA,11320
29
29
  lm_deluge/api_requests/deprecated/cohere.py,sha256=KgDScD6_bWhAzOY5BHZQKSA3kurt4KGENqC4wLsGmcU,5142
30
30
  lm_deluge/api_requests/deprecated/deepseek.py,sha256=FEApI93VAWDwuaqTooIyKMgONYqRhdUmiAPBRme-IYs,4582
31
31
  lm_deluge/api_requests/deprecated/mistral.py,sha256=pOfOZUM4U35I3Plch84SnAFpDAzouHcSNNMtgxRvjy4,4709
32
32
  lm_deluge/api_requests/deprecated/vertex.py,sha256=ygXz2RjdXErPCSBbiHLEWbf5_sSTIi31WoX0UaoYzRI,15275
33
- lm_deluge/models/__init__.py,sha256=_c-gxqAaNO4xy4dtsqIwG1odpwcCa2J02_YQnuXYtc0,4669
33
+ lm_deluge/models/__init__.py,sha256=_xygx3WwtWbLNmczz7Isl8xwfiVnl8D_4PJwYG-Ya10,4855
34
34
  lm_deluge/models/anthropic.py,sha256=X92EYIapos-8LXnIYiypPJcFhI0tqmXja_w8e9H4CF8,6781
35
35
  lm_deluge/models/arcee.py,sha256=4OI8eA8RoA-zYww4fWwhVZDFWB2Kd4-KQTTPl9r3Ay4,465
36
36
  lm_deluge/models/bedrock.py,sha256=g1PbfceSRH2lWST3ja0mUlF3oTq4e4T-si6RMe7qXgg,4888
@@ -39,14 +39,14 @@ lm_deluge/models/cohere.py,sha256=iXjYtM6jy_YL73Op8OfNsrMNopwae9y-Sw-4vF9cEBw,34
39
39
  lm_deluge/models/deepseek.py,sha256=b5t_ep6fE-2cKD2mmImBaLcJUbYrfizYnjG96sfKNTk,2072
40
40
  lm_deluge/models/fireworks.py,sha256=yvt2Ggzye4aUqCqY74ta67Vu7FrQaLFjdFtN4P7D-dc,638
41
41
  lm_deluge/models/google.py,sha256=IDePlNOvF0lvpv3UhkUD8g30TUJqoaJHQGzTglyGg80,6560
42
- lm_deluge/models/grok.py,sha256=TDzr8yfTaHbdJhwMA-Du6L-efaKFJhjTQViuVElCCHI,2566
42
+ lm_deluge/models/grok.py,sha256=rSvN3fKiO_WPNa5O_TzVTDj9-RqqjeXFBiC9OAcGZ4Q,3340
43
43
  lm_deluge/models/groq.py,sha256=Mi5WE1xOBGoZlymD0UN6kzhH_NOmfJYU4N2l-TO0Z8Q,2552
44
44
  lm_deluge/models/kimi.py,sha256=B_ZL4_0q6hS1VVskBWlBR569nNSjC8RgA2lj1eCjRRE,1183
45
45
  lm_deluge/models/meta.py,sha256=BBgnscL1gMcIdPbRqrlDl_q9YAYGSrkw9JkAIabXtLs,1883
46
46
  lm_deluge/models/minimax.py,sha256=rwW9gNotAYfDVtMlqmSYegN6GoZM_9DSNNZU2yPOmaU,275
47
47
  lm_deluge/models/mistral.py,sha256=x67o5gckBGmPcIGdVbS26XZAYFKBYM4tsxEAahGp8bk,4323
48
- lm_deluge/models/openai.py,sha256=UDmPqvMaBjqky2Z6yNV4bG4LqxUZmbWEJJO4pq0Mqzc,13329
49
- lm_deluge/models/openrouter.py,sha256=AHQTvnXM96_70XR0eqb3Wu2rAj2SHBZCWQeEKJ307LY,2816
48
+ lm_deluge/models/openai.py,sha256=ihSBjuu1QEn-voxEmDUgKn82AuDxrtRCJvFdj-XEZmU,14280
49
+ lm_deluge/models/openrouter.py,sha256=Wf0NZcuU8_SA4mYlNNez_yS7NUK4ljYbEtMJYaIE5i0,4819
50
50
  lm_deluge/models/together.py,sha256=wrGs4wO65on-dSlU9AARAA-rc4GDuWkidPjRQ7GScNg,4749
51
51
  lm_deluge/models/zai.py,sha256=BIde8TwjvmkfEi-6bSSBSFIh7KVnlJ7_aNdlqNZRGGI,16
52
52
  lm_deluge/pipelines/__init__.py,sha256=U97UmEq4iQKPDH83xA5PztpRBQtXzITtG1A6PaDeyG0,231
@@ -56,6 +56,17 @@ lm_deluge/pipelines/locate.py,sha256=lYNbKTmy9dTvj0lEQkOQ7yrxyqsgYzjD0C_byJKI_4w
56
56
  lm_deluge/pipelines/ocr.py,sha256=7fDlvs6uUOvbxMasvGGNJx5Fj6biM6z3lijKZaGN26k,23
57
57
  lm_deluge/pipelines/score.py,sha256=hkLMroJMfQ92HPlTBNOHrDRtvdYUBWK0MBlhOfvFTMk,2582
58
58
  lm_deluge/pipelines/translate.py,sha256=v_OvBQA2RB-QcWf0aopKHpYc2PDmckxzJGSmSuUX3Sw,1461
59
+ lm_deluge/pipelines/gepa/__init__.py,sha256=PZSch82ulEUEZscEYhsII7mr78ePlraaJM5ZJrQDk_0,2843
60
+ lm_deluge/pipelines/gepa/core.py,sha256=1KyJBF51H_EYxEx2KI3DewJfhqyV04dYm9iPuoOh4m4,10978
61
+ lm_deluge/pipelines/gepa/optimizer.py,sha256=T44RGSsv4uE8qb4ZfXJ7H4aausTvKsVnH8mlzb1dWpc,14803
62
+ lm_deluge/pipelines/gepa/proposer.py,sha256=EqA56kgg9M4A6qX5gBmQh_tsVH9NVkvUdcXoKCMvDAU,6709
63
+ lm_deluge/pipelines/gepa/util.py,sha256=PCrARH_pYGNSZa9aUmfTzFmGW2PPN50fQGf10E_yTeE,4909
64
+ lm_deluge/pipelines/gepa/docs/samples.py,sha256=VSDDhr2UiC9wqV-UweUZPTyBYy8hqpfLpEscoESwbwY,24305
65
+ lm_deluge/pipelines/gepa/examples/01_synthetic_keywords.py,sha256=wVy8OUuR4DAobb3ZCVO9-_sPbFVhdnUpnbradqsWKrA,4415
66
+ lm_deluge/pipelines/gepa/examples/02_gsm8k_math.py,sha256=7ri3IyzkNxizyi8NmOwvGsT2OBOOFKW2iehuHPLbDgA,7690
67
+ lm_deluge/pipelines/gepa/examples/03_hotpotqa_multihop.py,sha256=YmGMsaEKOfX_2Bzc2XH9iOXDA5w_mmxjThAvxLv1sdg,8843
68
+ lm_deluge/pipelines/gepa/examples/04_batch_classification.py,sha256=_-SlQrV4o9EYOr5uXJqJAyVbzPCasKCK-ZqB_0nF4hg,7918
69
+ lm_deluge/pipelines/gepa/examples/simple_qa.py,sha256=RH1kcV4G16FUXVokLKn3ufRRj_C4qYSwyRxJykKwwmA,4366
59
70
  lm_deluge/tool/__init__.py,sha256=_GD2RAK0wYnKsAPdrM2w8X4xKtugBEm-eSZTboGRL9s,40260
60
71
  lm_deluge/tool/builtin/base.py,sha256=FLYdKVAqlffA6WOu4j8wQVRd0iHMsyBW_T3vfl--aXo,276
61
72
  lm_deluge/tool/builtin/gemini.py,sha256=uKrzzEZ0RO5EHddYYFvRKoMk7O6YOSWFDojhzbpQSfs,1724
@@ -83,7 +94,7 @@ lm_deluge/tool/prefab/sheets.py,sha256=RhH4PgRI4E6WYKfJpScflT7HtAULvp88ZA94NmJyE
83
94
  lm_deluge/tool/prefab/subagents.py,sha256=srJ7On7YR0Y8WuNvf5TJl_7IUfEtG3zlxZeLgmn_-NI,8484
84
95
  lm_deluge/tool/prefab/todos.py,sha256=mrtv68uRc-grc0xKP6xKpfPSA2yXYU7FLNy6fDzEbG8,14902
85
96
  lm_deluge/tool/prefab/tool_search.py,sha256=EJL5R5BgnO6owspk7F01Yzr8C9q9oJqmfMrWBjLE_bA,6151
86
- lm_deluge/tool/prefab/web_search.py,sha256=i_FFBB2TAZwgN4-_9a6cD10Z9bgFGk4qNUbnOI3EnJA,6200
97
+ lm_deluge/tool/prefab/web_search.py,sha256=TQ_-WK6axCG38qInm5bLVEywHfu2sAq-DGIIl1H0Hzw,6255
87
98
  lm_deluge/tool/prefab/otc/__init__.py,sha256=33AcwAt9ycECxonnvkcyh13w9Sr2Cbs6OOlBjzBvl54,6373
88
99
  lm_deluge/tool/prefab/otc/executor.py,sha256=4IROA_0un3HaV4GK7r4vQiVxJvcoHkqVii-4asGH-Cw,10508
89
100
  lm_deluge/tool/prefab/otc/parse.py,sha256=lSAtez-pBFcJVQMW2evRvV9KlduRtPevzGCEB1fmUMo,4517
@@ -94,8 +105,8 @@ lm_deluge/util/schema.py,sha256=q6uwhA4s1lM2dHT1Kwc46E7OY1VecMOtTEI0PTFn6tA,1320
94
105
  lm_deluge/util/spatial.py,sha256=BsF_UKhE-x0xBirc-bV1xSKZRTUhsOBdGqsMKme20C8,4099
95
106
  lm_deluge/util/validation.py,sha256=hz5dDb3ebvZrZhnaWxOxbNSVMI6nmaOODBkk0htAUhs,1575
96
107
  lm_deluge/util/xml.py,sha256=Ft4zajoYBJR3HHCt2oHwGfymGLdvp_gegVmJ-Wqk4Ck,10547
97
- lm_deluge-0.0.85.dist-info/licenses/LICENSE,sha256=uNNXGXPCw2TC7CUs7SEBkA-Mz6QBQFWUUEWDMgEs1dU,1058
98
- lm_deluge-0.0.85.dist-info/METADATA,sha256=MgU3mDxSvV_NBnmoacgJYXQZ-dAUdSI0JZHgXz7-BxY,13595
99
- lm_deluge-0.0.85.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
100
- lm_deluge-0.0.85.dist-info/top_level.txt,sha256=hqU-TJX93yBwpgkDtYcXyLr3t7TLSCCZ_reytJjwBaE,10
101
- lm_deluge-0.0.85.dist-info/RECORD,,
108
+ lm_deluge-0.0.87.dist-info/licenses/LICENSE,sha256=uNNXGXPCw2TC7CUs7SEBkA-Mz6QBQFWUUEWDMgEs1dU,1058
109
+ lm_deluge-0.0.87.dist-info/METADATA,sha256=Y77Jir9_4uLeEP1oXvZABD6UdTTS8YsYvmQYw7aobqI,13595
110
+ lm_deluge-0.0.87.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
111
+ lm_deluge-0.0.87.dist-info/top_level.txt,sha256=hqU-TJX93yBwpgkDtYcXyLr3t7TLSCCZ_reytJjwBaE,10
112
+ lm_deluge-0.0.87.dist-info/RECORD,,