lm-deluge 0.0.67__py3-none-any.whl → 0.0.90__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of lm-deluge might be problematic. Click here for more details.
- lm_deluge/__init__.py +1 -2
- lm_deluge/api_requests/anthropic.py +117 -22
- lm_deluge/api_requests/base.py +84 -11
- lm_deluge/api_requests/bedrock.py +30 -6
- lm_deluge/api_requests/chat_reasoning.py +4 -0
- lm_deluge/api_requests/gemini.py +166 -20
- lm_deluge/api_requests/openai.py +145 -25
- lm_deluge/batches.py +15 -45
- lm_deluge/client.py +309 -50
- lm_deluge/config.py +15 -3
- lm_deluge/models/__init__.py +14 -1
- lm_deluge/models/anthropic.py +29 -14
- lm_deluge/models/arcee.py +16 -0
- lm_deluge/models/deepseek.py +36 -4
- lm_deluge/models/google.py +42 -0
- lm_deluge/models/grok.py +24 -0
- lm_deluge/models/kimi.py +36 -0
- lm_deluge/models/minimax.py +18 -0
- lm_deluge/models/openai.py +100 -0
- lm_deluge/models/openrouter.py +133 -7
- lm_deluge/models/together.py +11 -0
- lm_deluge/models/zai.py +50 -0
- lm_deluge/pipelines/gepa/__init__.py +95 -0
- lm_deluge/pipelines/gepa/core.py +354 -0
- lm_deluge/pipelines/gepa/docs/samples.py +705 -0
- lm_deluge/pipelines/gepa/examples/01_synthetic_keywords.py +140 -0
- lm_deluge/pipelines/gepa/examples/02_gsm8k_math.py +261 -0
- lm_deluge/pipelines/gepa/examples/03_hotpotqa_multihop.py +300 -0
- lm_deluge/pipelines/gepa/examples/04_batch_classification.py +271 -0
- lm_deluge/pipelines/gepa/examples/simple_qa.py +129 -0
- lm_deluge/pipelines/gepa/optimizer.py +435 -0
- lm_deluge/pipelines/gepa/proposer.py +235 -0
- lm_deluge/pipelines/gepa/util.py +165 -0
- lm_deluge/{llm_tools → pipelines}/score.py +2 -2
- lm_deluge/{llm_tools → pipelines}/translate.py +5 -3
- lm_deluge/prompt.py +537 -88
- lm_deluge/request_context.py +7 -2
- lm_deluge/server/__init__.py +24 -0
- lm_deluge/server/__main__.py +144 -0
- lm_deluge/server/adapters.py +369 -0
- lm_deluge/server/app.py +388 -0
- lm_deluge/server/auth.py +71 -0
- lm_deluge/server/model_policy.py +215 -0
- lm_deluge/server/models_anthropic.py +172 -0
- lm_deluge/server/models_openai.py +175 -0
- lm_deluge/tool/__init__.py +1130 -0
- lm_deluge/tool/builtin/anthropic/__init__.py +300 -0
- lm_deluge/tool/builtin/anthropic/bash.py +0 -0
- lm_deluge/tool/builtin/anthropic/computer_use.py +0 -0
- lm_deluge/tool/builtin/gemini.py +59 -0
- lm_deluge/tool/builtin/openai.py +74 -0
- lm_deluge/tool/cua/__init__.py +173 -0
- lm_deluge/tool/cua/actions.py +148 -0
- lm_deluge/tool/cua/base.py +27 -0
- lm_deluge/tool/cua/batch.py +215 -0
- lm_deluge/tool/cua/converters.py +466 -0
- lm_deluge/tool/cua/kernel.py +702 -0
- lm_deluge/tool/cua/trycua.py +989 -0
- lm_deluge/tool/prefab/__init__.py +45 -0
- lm_deluge/tool/prefab/batch_tool.py +156 -0
- lm_deluge/tool/prefab/docs.py +1119 -0
- lm_deluge/tool/prefab/email.py +294 -0
- lm_deluge/tool/prefab/filesystem.py +1711 -0
- lm_deluge/tool/prefab/full_text_search/__init__.py +285 -0
- lm_deluge/tool/prefab/full_text_search/tantivy_index.py +396 -0
- lm_deluge/tool/prefab/memory.py +458 -0
- lm_deluge/tool/prefab/otc/__init__.py +165 -0
- lm_deluge/tool/prefab/otc/executor.py +281 -0
- lm_deluge/tool/prefab/otc/parse.py +188 -0
- lm_deluge/tool/prefab/random.py +212 -0
- lm_deluge/tool/prefab/rlm/__init__.py +296 -0
- lm_deluge/tool/prefab/rlm/executor.py +349 -0
- lm_deluge/tool/prefab/rlm/parse.py +144 -0
- lm_deluge/tool/prefab/sandbox/__init__.py +19 -0
- lm_deluge/tool/prefab/sandbox/daytona_sandbox.py +483 -0
- lm_deluge/tool/prefab/sandbox/docker_sandbox.py +609 -0
- lm_deluge/tool/prefab/sandbox/fargate_sandbox.py +546 -0
- lm_deluge/tool/prefab/sandbox/modal_sandbox.py +469 -0
- lm_deluge/tool/prefab/sandbox/seatbelt_sandbox.py +827 -0
- lm_deluge/tool/prefab/sheets.py +385 -0
- lm_deluge/tool/prefab/skills.py +0 -0
- lm_deluge/tool/prefab/subagents.py +233 -0
- lm_deluge/tool/prefab/todos.py +342 -0
- lm_deluge/tool/prefab/tool_search.py +169 -0
- lm_deluge/tool/prefab/web_search.py +199 -0
- lm_deluge/tracker.py +16 -13
- lm_deluge/util/schema.py +412 -0
- lm_deluge/warnings.py +8 -0
- {lm_deluge-0.0.67.dist-info → lm_deluge-0.0.90.dist-info}/METADATA +23 -9
- lm_deluge-0.0.90.dist-info/RECORD +132 -0
- lm_deluge/built_in_tools/anthropic/__init__.py +0 -128
- lm_deluge/built_in_tools/openai.py +0 -28
- lm_deluge/presets/cerebras.py +0 -17
- lm_deluge/presets/meta.py +0 -13
- lm_deluge/tool.py +0 -849
- lm_deluge-0.0.67.dist-info/RECORD +0 -72
- lm_deluge/{llm_tools → pipelines}/__init__.py +1 -1
- /lm_deluge/{llm_tools → pipelines}/classify.py +0 -0
- /lm_deluge/{llm_tools → pipelines}/extract.py +0 -0
- /lm_deluge/{llm_tools → pipelines}/locate.py +0 -0
- /lm_deluge/{llm_tools → pipelines}/ocr.py +0 -0
- /lm_deluge/{built_in_tools/anthropic/bash.py → skills/anthropic.py} +0 -0
- /lm_deluge/{built_in_tools/anthropic/computer_use.py → skills/compat.py} +0 -0
- /lm_deluge/{built_in_tools → tool/builtin}/anthropic/editor.py +0 -0
- /lm_deluge/{built_in_tools → tool/builtin}/base.py +0 -0
- {lm_deluge-0.0.67.dist-info → lm_deluge-0.0.90.dist-info}/WHEEL +0 -0
- {lm_deluge-0.0.67.dist-info → lm_deluge-0.0.90.dist-info}/licenses/LICENSE +0 -0
- {lm_deluge-0.0.67.dist-info → lm_deluge-0.0.90.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,469 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import os
|
|
3
|
+
import secrets
|
|
4
|
+
import shlex
|
|
5
|
+
import time
|
|
6
|
+
from dataclasses import dataclass, field
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import modal
|
|
10
|
+
|
|
11
|
+
from lm_deluge.tool import Tool
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@dataclass
|
|
15
|
+
class TrackedProcess:
|
|
16
|
+
"""Tracks a process running in the sandbox."""
|
|
17
|
+
|
|
18
|
+
process: Any # Modal's ContainerProcess
|
|
19
|
+
name: str
|
|
20
|
+
command: str
|
|
21
|
+
started_at: float = field(default_factory=time.time)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class ModalSandbox:
|
|
25
|
+
def __init__(
|
|
26
|
+
self,
|
|
27
|
+
app_name: str | None = None,
|
|
28
|
+
*,
|
|
29
|
+
image: Any | None = None,
|
|
30
|
+
block_network: bool = False,
|
|
31
|
+
add_local_files: list[str] | None = None,
|
|
32
|
+
encrypted_ports: list[int] | None = None,
|
|
33
|
+
stateful: bool = False,
|
|
34
|
+
):
|
|
35
|
+
app_name = app_name or secrets.token_urlsafe(32)
|
|
36
|
+
app = modal.App.lookup(app_name, create_if_missing=True)
|
|
37
|
+
self.app = app
|
|
38
|
+
self.block_network = block_network
|
|
39
|
+
self.encrypted_ports = encrypted_ports or []
|
|
40
|
+
self.stateful = stateful
|
|
41
|
+
|
|
42
|
+
if image is None:
|
|
43
|
+
image = modal.Image.debian_slim(python_version="3.12")
|
|
44
|
+
|
|
45
|
+
assert isinstance(image, modal.Image), "expected modal Image"
|
|
46
|
+
if add_local_files:
|
|
47
|
+
for path in add_local_files:
|
|
48
|
+
if os.path.exists(path):
|
|
49
|
+
# Compute a reasonable remote path based on the basename
|
|
50
|
+
basename = os.path.basename(os.path.normpath(path))
|
|
51
|
+
remote_path = f"/root/{basename}"
|
|
52
|
+
if os.path.isdir(path):
|
|
53
|
+
image = image.add_local_dir(path, remote_path) # type: ignore
|
|
54
|
+
else:
|
|
55
|
+
image = image.add_local_file(path, remote_path) # type: ignore
|
|
56
|
+
else:
|
|
57
|
+
raise FileNotFoundError(f"File not found: {path}")
|
|
58
|
+
|
|
59
|
+
# Create sandbox with encrypted_ports if specified
|
|
60
|
+
create_kwargs: dict[str, Any] = {
|
|
61
|
+
"app": app,
|
|
62
|
+
"block_network": block_network,
|
|
63
|
+
"image": image,
|
|
64
|
+
}
|
|
65
|
+
if self.encrypted_ports:
|
|
66
|
+
create_kwargs["encrypted_ports"] = self.encrypted_ports
|
|
67
|
+
|
|
68
|
+
self.sb = modal.Sandbox.create(**create_kwargs)
|
|
69
|
+
|
|
70
|
+
# Process tracking - simple dict for background processes
|
|
71
|
+
self.processes: dict[str, TrackedProcess] = {}
|
|
72
|
+
self.process_counter: int = 0
|
|
73
|
+
self._destroyed = False
|
|
74
|
+
|
|
75
|
+
# Stateful mode: persistent shell process
|
|
76
|
+
self._shell_process: Any | None = None
|
|
77
|
+
self._shell_initialized = False
|
|
78
|
+
# Unique delimiter for detecting command completion
|
|
79
|
+
self._delimiter = f"__DELIM_{secrets.token_hex(8)}__"
|
|
80
|
+
# Buffer for reading output
|
|
81
|
+
self._output_buffer = ""
|
|
82
|
+
|
|
83
|
+
def __enter__(self):
|
|
84
|
+
"""Synchronous context manager entry (use async with for async support)."""
|
|
85
|
+
return self
|
|
86
|
+
|
|
87
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
88
|
+
"""Synchronous context manager exit - cleanup sandbox."""
|
|
89
|
+
if not self._destroyed:
|
|
90
|
+
self._destroy()
|
|
91
|
+
return False
|
|
92
|
+
|
|
93
|
+
def __del__(self):
|
|
94
|
+
"""Cleanup sandbox when garbage collected (backup cleanup)."""
|
|
95
|
+
if not self._destroyed:
|
|
96
|
+
try:
|
|
97
|
+
self._destroy()
|
|
98
|
+
except Exception:
|
|
99
|
+
# Ignore errors during cleanup in __del__
|
|
100
|
+
pass
|
|
101
|
+
|
|
102
|
+
def _generate_process_name(self) -> str:
|
|
103
|
+
"""Generate a unique process name like p1, p2, etc."""
|
|
104
|
+
self.process_counter += 1
|
|
105
|
+
return f"p{self.process_counter}"
|
|
106
|
+
|
|
107
|
+
async def _ensure_shell_started(self):
|
|
108
|
+
"""Start the persistent shell for stateful mode if not already running."""
|
|
109
|
+
if self._shell_initialized:
|
|
110
|
+
return
|
|
111
|
+
|
|
112
|
+
# Start bash with stdbuf to force line-buffered output
|
|
113
|
+
# This allows us to read output as it's produced without waiting for process exit
|
|
114
|
+
self._shell_process = await self.sb.exec.aio("stdbuf", "-oL", "bash")
|
|
115
|
+
self._shell_initialized = True
|
|
116
|
+
self._output_buffer = ""
|
|
117
|
+
|
|
118
|
+
async def _read_until_delimiter(
|
|
119
|
+
self, timeout: int | None = None
|
|
120
|
+
) -> tuple[str, int]:
|
|
121
|
+
"""
|
|
122
|
+
Read from shell stdout until we see the delimiter.
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
Tuple of (output, exit_code)
|
|
126
|
+
"""
|
|
127
|
+
# Delimiter format in output: __DELIM_xxx__:END:exit_code
|
|
128
|
+
end_marker = f"{self._delimiter}:END:"
|
|
129
|
+
|
|
130
|
+
async def read_loop() -> tuple[str, int]:
|
|
131
|
+
assert self._shell_process
|
|
132
|
+
async for chunk in self._shell_process.stdout:
|
|
133
|
+
self._output_buffer += chunk
|
|
134
|
+
|
|
135
|
+
# Check if we have the marker in buffer
|
|
136
|
+
if end_marker in self._output_buffer:
|
|
137
|
+
# Split at the marker
|
|
138
|
+
marker_idx = self._output_buffer.find(end_marker)
|
|
139
|
+
output = self._output_buffer[:marker_idx]
|
|
140
|
+
|
|
141
|
+
# Parse exit code from "END:exit_code\n..."
|
|
142
|
+
after_marker = self._output_buffer[marker_idx + len(end_marker) :]
|
|
143
|
+
exit_code = 0
|
|
144
|
+
exit_line = after_marker.split("\n")[0]
|
|
145
|
+
if exit_line.isdigit():
|
|
146
|
+
exit_code = int(exit_line)
|
|
147
|
+
elif exit_line.lstrip("-").isdigit():
|
|
148
|
+
exit_code = int(exit_line)
|
|
149
|
+
|
|
150
|
+
# Keep anything after this marker's newline for next command
|
|
151
|
+
newline_idx = after_marker.find("\n")
|
|
152
|
+
if newline_idx >= 0:
|
|
153
|
+
self._output_buffer = after_marker[newline_idx + 1 :]
|
|
154
|
+
else:
|
|
155
|
+
self._output_buffer = ""
|
|
156
|
+
|
|
157
|
+
return output, exit_code
|
|
158
|
+
|
|
159
|
+
# Stream ended without finding marker
|
|
160
|
+
output = self._output_buffer
|
|
161
|
+
self._output_buffer = ""
|
|
162
|
+
return output, -1
|
|
163
|
+
|
|
164
|
+
if timeout:
|
|
165
|
+
try:
|
|
166
|
+
return await asyncio.wait_for(read_loop(), timeout=timeout)
|
|
167
|
+
except asyncio.TimeoutError:
|
|
168
|
+
output = self._output_buffer
|
|
169
|
+
self._output_buffer = ""
|
|
170
|
+
return output + "\n[Command timed out]", -1
|
|
171
|
+
else:
|
|
172
|
+
return await read_loop()
|
|
173
|
+
|
|
174
|
+
async def _exec_stateful(
|
|
175
|
+
self,
|
|
176
|
+
command: str,
|
|
177
|
+
timeout: int | None = None,
|
|
178
|
+
) -> str:
|
|
179
|
+
"""Execute a command in the persistent shell (stateful mode)."""
|
|
180
|
+
await self._ensure_shell_started()
|
|
181
|
+
assert self._shell_process is not None
|
|
182
|
+
|
|
183
|
+
# Send the command followed by a marker that includes the exit code
|
|
184
|
+
# Format: command; echo "__DELIM_xxx__:END:$?"
|
|
185
|
+
wrapped_cmd = f"{command}; echo '{self._delimiter}:END:'$?\n"
|
|
186
|
+
self._shell_process.stdin.write(wrapped_cmd.encode())
|
|
187
|
+
await self._shell_process.stdin.drain.aio()
|
|
188
|
+
|
|
189
|
+
# Read output until delimiter
|
|
190
|
+
output, exit_code = await self._read_until_delimiter(timeout=timeout)
|
|
191
|
+
|
|
192
|
+
# Clean up output - remove any leading/trailing whitespace artifacts
|
|
193
|
+
output = output.strip()
|
|
194
|
+
|
|
195
|
+
# Truncate if needed
|
|
196
|
+
if len(output) > 5000:
|
|
197
|
+
output = "...[truncated]...\n" + output[-5000:]
|
|
198
|
+
|
|
199
|
+
# Include exit code if non-zero
|
|
200
|
+
if exit_code != 0:
|
|
201
|
+
output = f"[Exit code: {exit_code}]\n{output}"
|
|
202
|
+
|
|
203
|
+
return output if output else "(no output)"
|
|
204
|
+
|
|
205
|
+
async def _exec(
|
|
206
|
+
self,
|
|
207
|
+
command: str | None = None,
|
|
208
|
+
cmd: list[str] | None = None,
|
|
209
|
+
timeout: int | None = 120000,
|
|
210
|
+
run_in_background: bool = False,
|
|
211
|
+
name: str | None = None,
|
|
212
|
+
description: str | None = None,
|
|
213
|
+
) -> str:
|
|
214
|
+
"""
|
|
215
|
+
Execute a command in the sandbox.
|
|
216
|
+
|
|
217
|
+
Args:
|
|
218
|
+
command: Shell command as a string (e.g., "ls -la")
|
|
219
|
+
cmd: Command as array of strings (e.g., ["ls", "-la"])
|
|
220
|
+
timeout: Timeout in milliseconds (default: 120000 = 2 minutes, max: 600000)
|
|
221
|
+
run_in_background: If True, run in background and return immediately.
|
|
222
|
+
name: Name for background process (auto-generated if not provided)
|
|
223
|
+
description: Short description of what this command does (for logging)
|
|
224
|
+
|
|
225
|
+
Returns:
|
|
226
|
+
Output string if foreground, or confirmation message if background
|
|
227
|
+
"""
|
|
228
|
+
# Handle both command formats
|
|
229
|
+
if command is not None:
|
|
230
|
+
cmd_str = command
|
|
231
|
+
elif cmd is not None:
|
|
232
|
+
cmd_str = shlex.join(cmd)
|
|
233
|
+
else:
|
|
234
|
+
return "Error: Must provide either 'command' (string) or 'cmd' (array)"
|
|
235
|
+
|
|
236
|
+
# Convert timeout from milliseconds to seconds for Modal API
|
|
237
|
+
timeout_seconds: int | None = None
|
|
238
|
+
if timeout is not None and not run_in_background:
|
|
239
|
+
timeout_seconds = min(timeout // 1000, 600) # Cap at 10 minutes
|
|
240
|
+
|
|
241
|
+
# Use stateful mode for foreground commands when enabled
|
|
242
|
+
# Background processes always use stateless mode (they need independent processes)
|
|
243
|
+
if self.stateful and not run_in_background:
|
|
244
|
+
return await self._exec_stateful(cmd_str, timeout=timeout_seconds)
|
|
245
|
+
|
|
246
|
+
# Stateless mode: spawn a new process for each command
|
|
247
|
+
if command is not None:
|
|
248
|
+
cmd_list = ["bash", "-c", command]
|
|
249
|
+
else:
|
|
250
|
+
cmd_list = cmd # type: ignore
|
|
251
|
+
|
|
252
|
+
# Start the process (no timeout for background processes)
|
|
253
|
+
assert cmd_list, "no cmd list"
|
|
254
|
+
process = await self.sb.exec.aio(
|
|
255
|
+
*cmd_list, timeout=None if run_in_background else timeout_seconds
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
if run_in_background:
|
|
259
|
+
# Background process - track it but don't read stdout
|
|
260
|
+
proc_name = name or self._generate_process_name()
|
|
261
|
+
tracked = TrackedProcess(
|
|
262
|
+
process=process,
|
|
263
|
+
name=proc_name,
|
|
264
|
+
command=cmd_str,
|
|
265
|
+
)
|
|
266
|
+
self.processes[proc_name] = tracked
|
|
267
|
+
|
|
268
|
+
return (
|
|
269
|
+
f"Started background process '{proc_name}'.\n"
|
|
270
|
+
f"Command: {cmd_str}\n"
|
|
271
|
+
f"Note: Use another command (e.g., curl localhost:PORT) to verify the process is working. "
|
|
272
|
+
f"Use list_processes() to check status."
|
|
273
|
+
)
|
|
274
|
+
else:
|
|
275
|
+
# Wait for completion and return output
|
|
276
|
+
output = ""
|
|
277
|
+
try:
|
|
278
|
+
async for line in process.stdout:
|
|
279
|
+
output += line
|
|
280
|
+
except Exception:
|
|
281
|
+
pass
|
|
282
|
+
|
|
283
|
+
# Wait for process to complete to get exit code
|
|
284
|
+
await process.wait.aio()
|
|
285
|
+
|
|
286
|
+
# Truncate if needed
|
|
287
|
+
if len(output) > 5000:
|
|
288
|
+
output = "...[truncated]...\n" + output[-5000:]
|
|
289
|
+
|
|
290
|
+
# Include exit code if non-zero
|
|
291
|
+
if process.returncode != 0:
|
|
292
|
+
output = f"[Exit code: {process.returncode}]\n{output}"
|
|
293
|
+
|
|
294
|
+
return output if output else "(no output)"
|
|
295
|
+
|
|
296
|
+
def _check_process(self, name: str | None = None) -> str:
|
|
297
|
+
"""
|
|
298
|
+
Check status of a background process.
|
|
299
|
+
|
|
300
|
+
Args:
|
|
301
|
+
name: Process name. If not provided, shows all processes.
|
|
302
|
+
|
|
303
|
+
Returns:
|
|
304
|
+
Process status information
|
|
305
|
+
"""
|
|
306
|
+
if not self.processes:
|
|
307
|
+
return "No background processes have been started."
|
|
308
|
+
|
|
309
|
+
if name:
|
|
310
|
+
proc = self.processes.get(name)
|
|
311
|
+
if not proc:
|
|
312
|
+
available = ", ".join(self.processes.keys())
|
|
313
|
+
return f"Process '{name}' not found. Available: {available}"
|
|
314
|
+
|
|
315
|
+
# Use poll() to check status without blocking
|
|
316
|
+
poll_result = proc.process.poll()
|
|
317
|
+
if poll_result is None:
|
|
318
|
+
status = "running"
|
|
319
|
+
else:
|
|
320
|
+
status = f"completed (exit code: {poll_result})"
|
|
321
|
+
|
|
322
|
+
elapsed = time.time() - proc.started_at
|
|
323
|
+
return f"Process: {name}\nCommand: {proc.command}\nStatus: {status}\nRunning for: {elapsed:.1f}s"
|
|
324
|
+
else:
|
|
325
|
+
# Show all processes
|
|
326
|
+
lines = ["NAME STATUS COMMAND"]
|
|
327
|
+
for proc_name, proc in self.processes.items():
|
|
328
|
+
poll_result = proc.process.poll()
|
|
329
|
+
if poll_result is None:
|
|
330
|
+
status = "running"
|
|
331
|
+
else:
|
|
332
|
+
status = f"exit {poll_result}"
|
|
333
|
+
|
|
334
|
+
cmd_display = (
|
|
335
|
+
proc.command[:40] + "..."
|
|
336
|
+
if len(proc.command) > 40
|
|
337
|
+
else proc.command
|
|
338
|
+
)
|
|
339
|
+
lines.append(f"{proc_name:<8} {status:<19} {cmd_display}")
|
|
340
|
+
|
|
341
|
+
return "\n".join(lines)
|
|
342
|
+
|
|
343
|
+
def _get_url(self, port: int = 8080) -> str:
|
|
344
|
+
"""
|
|
345
|
+
Get public URL for a port.
|
|
346
|
+
|
|
347
|
+
Args:
|
|
348
|
+
port: Port number (default 8080)
|
|
349
|
+
|
|
350
|
+
Returns:
|
|
351
|
+
URL and token information
|
|
352
|
+
"""
|
|
353
|
+
if self.block_network:
|
|
354
|
+
return "Error: Network is blocked. Create sandbox with block_network=False to use tunnels."
|
|
355
|
+
|
|
356
|
+
# For port 8080 or if no encrypted_ports, use create_connect_token
|
|
357
|
+
if port == 8080 or port not in self.encrypted_ports:
|
|
358
|
+
try:
|
|
359
|
+
creds = self.sb.create_connect_token(
|
|
360
|
+
user_metadata={"user_id": "sandbox"}
|
|
361
|
+
)
|
|
362
|
+
return f"URL: {creds.url}\nToken: {creds.token}"
|
|
363
|
+
except Exception as e:
|
|
364
|
+
return f"Error getting URL: {e}"
|
|
365
|
+
|
|
366
|
+
# For other ports that were configured with encrypted_ports
|
|
367
|
+
try:
|
|
368
|
+
tunnels = self.sb.tunnels()
|
|
369
|
+
if port in tunnels:
|
|
370
|
+
tunnel = tunnels[port]
|
|
371
|
+
return f"URL: {tunnel.url}"
|
|
372
|
+
else:
|
|
373
|
+
available = list(tunnels.keys()) if tunnels else []
|
|
374
|
+
return f"Port {port} not available. Available ports: {available}"
|
|
375
|
+
except Exception as e:
|
|
376
|
+
return f"Error getting tunnel: {e}"
|
|
377
|
+
|
|
378
|
+
def _destroy(self):
|
|
379
|
+
"""Destroy the sandbox and mark as destroyed."""
|
|
380
|
+
if not self._destroyed:
|
|
381
|
+
# Clean up persistent shell if in stateful mode
|
|
382
|
+
if self._shell_process is not None:
|
|
383
|
+
try:
|
|
384
|
+
self._shell_process.stdin.write_eof()
|
|
385
|
+
except Exception:
|
|
386
|
+
pass
|
|
387
|
+
self._shell_process = None
|
|
388
|
+
self._shell_initialized = False
|
|
389
|
+
|
|
390
|
+
self.sb.terminate()
|
|
391
|
+
self._destroyed = True
|
|
392
|
+
|
|
393
|
+
def get_tools(self):
|
|
394
|
+
if self.stateful:
|
|
395
|
+
bash_description = (
|
|
396
|
+
"Execute a bash command in the sandbox environment. "
|
|
397
|
+
"This sandbox maintains state between commands - shell variables, "
|
|
398
|
+
"working directory (cd), and functions persist across calls. "
|
|
399
|
+
"Set run_in_background=true to run servers or long-running processes "
|
|
400
|
+
"(background processes run independently and don't share state)."
|
|
401
|
+
)
|
|
402
|
+
else:
|
|
403
|
+
bash_description = (
|
|
404
|
+
"Execute a bash command in the sandbox environment. "
|
|
405
|
+
"Each command runs in a fresh shell (no state persistence between commands). "
|
|
406
|
+
"Set run_in_background=true to run servers or long-running processes. "
|
|
407
|
+
"For background processes, verify they're working using another command (e.g., curl localhost:PORT)."
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
bash_tool = Tool(
|
|
411
|
+
name="bash",
|
|
412
|
+
description=bash_description,
|
|
413
|
+
run=self._exec,
|
|
414
|
+
parameters={
|
|
415
|
+
"command": {
|
|
416
|
+
"type": "string",
|
|
417
|
+
"description": "Shell command to execute (e.g., 'ls -la', 'python -m http.server 8080')",
|
|
418
|
+
},
|
|
419
|
+
"description": {
|
|
420
|
+
"type": "string",
|
|
421
|
+
"description": "Short description of what this command does (5-10 words)",
|
|
422
|
+
},
|
|
423
|
+
"run_in_background": {
|
|
424
|
+
"type": "boolean",
|
|
425
|
+
"description": "If true, run in background without waiting. Default: false.",
|
|
426
|
+
},
|
|
427
|
+
"name": {
|
|
428
|
+
"type": "string",
|
|
429
|
+
"description": "Name for background process (e.g., 'server'). Only used with run_in_background=true.",
|
|
430
|
+
},
|
|
431
|
+
"timeout": {
|
|
432
|
+
"type": "integer",
|
|
433
|
+
"description": "Timeout in milliseconds (default: 120000, max: 600000)",
|
|
434
|
+
},
|
|
435
|
+
},
|
|
436
|
+
required=["command"],
|
|
437
|
+
)
|
|
438
|
+
|
|
439
|
+
check_tool = Tool(
|
|
440
|
+
name="list_processes",
|
|
441
|
+
description="Check status of background processes. Shows whether each process is running or has exited.",
|
|
442
|
+
run=self._check_process,
|
|
443
|
+
parameters={
|
|
444
|
+
"name": {
|
|
445
|
+
"type": "string",
|
|
446
|
+
"description": "Process name to check, or omit to see all processes",
|
|
447
|
+
},
|
|
448
|
+
},
|
|
449
|
+
required=[],
|
|
450
|
+
)
|
|
451
|
+
|
|
452
|
+
url_tool = Tool(
|
|
453
|
+
name="get_url",
|
|
454
|
+
description=(
|
|
455
|
+
"Get a public URL to access a port in the sandbox. "
|
|
456
|
+
"Use after starting a web server to get the external URL. "
|
|
457
|
+
"Default port is 8080."
|
|
458
|
+
),
|
|
459
|
+
run=self._get_url,
|
|
460
|
+
parameters={
|
|
461
|
+
"port": {
|
|
462
|
+
"type": "integer",
|
|
463
|
+
"description": "Port number to expose (default: 8080)",
|
|
464
|
+
},
|
|
465
|
+
},
|
|
466
|
+
required=[],
|
|
467
|
+
)
|
|
468
|
+
|
|
469
|
+
return [bash_tool, check_tool, url_tool]
|