ripperdoc 0.2.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ripperdoc/__init__.py +3 -0
- ripperdoc/__main__.py +20 -0
- ripperdoc/cli/__init__.py +1 -0
- ripperdoc/cli/cli.py +405 -0
- ripperdoc/cli/commands/__init__.py +82 -0
- ripperdoc/cli/commands/agents_cmd.py +263 -0
- ripperdoc/cli/commands/base.py +19 -0
- ripperdoc/cli/commands/clear_cmd.py +18 -0
- ripperdoc/cli/commands/compact_cmd.py +23 -0
- ripperdoc/cli/commands/config_cmd.py +31 -0
- ripperdoc/cli/commands/context_cmd.py +144 -0
- ripperdoc/cli/commands/cost_cmd.py +82 -0
- ripperdoc/cli/commands/doctor_cmd.py +221 -0
- ripperdoc/cli/commands/exit_cmd.py +19 -0
- ripperdoc/cli/commands/help_cmd.py +20 -0
- ripperdoc/cli/commands/mcp_cmd.py +70 -0
- ripperdoc/cli/commands/memory_cmd.py +202 -0
- ripperdoc/cli/commands/models_cmd.py +413 -0
- ripperdoc/cli/commands/permissions_cmd.py +302 -0
- ripperdoc/cli/commands/resume_cmd.py +98 -0
- ripperdoc/cli/commands/status_cmd.py +167 -0
- ripperdoc/cli/commands/tasks_cmd.py +278 -0
- ripperdoc/cli/commands/todos_cmd.py +69 -0
- ripperdoc/cli/commands/tools_cmd.py +19 -0
- ripperdoc/cli/ui/__init__.py +1 -0
- ripperdoc/cli/ui/context_display.py +298 -0
- ripperdoc/cli/ui/helpers.py +22 -0
- ripperdoc/cli/ui/rich_ui.py +1557 -0
- ripperdoc/cli/ui/spinner.py +49 -0
- ripperdoc/cli/ui/thinking_spinner.py +128 -0
- ripperdoc/cli/ui/tool_renderers.py +298 -0
- ripperdoc/core/__init__.py +1 -0
- ripperdoc/core/agents.py +486 -0
- ripperdoc/core/commands.py +33 -0
- ripperdoc/core/config.py +559 -0
- ripperdoc/core/default_tools.py +88 -0
- ripperdoc/core/permissions.py +252 -0
- ripperdoc/core/providers/__init__.py +47 -0
- ripperdoc/core/providers/anthropic.py +250 -0
- ripperdoc/core/providers/base.py +265 -0
- ripperdoc/core/providers/gemini.py +615 -0
- ripperdoc/core/providers/openai.py +487 -0
- ripperdoc/core/query.py +1058 -0
- ripperdoc/core/query_utils.py +622 -0
- ripperdoc/core/skills.py +295 -0
- ripperdoc/core/system_prompt.py +431 -0
- ripperdoc/core/tool.py +240 -0
- ripperdoc/sdk/__init__.py +9 -0
- ripperdoc/sdk/client.py +333 -0
- ripperdoc/tools/__init__.py +1 -0
- ripperdoc/tools/ask_user_question_tool.py +431 -0
- ripperdoc/tools/background_shell.py +389 -0
- ripperdoc/tools/bash_output_tool.py +98 -0
- ripperdoc/tools/bash_tool.py +1016 -0
- ripperdoc/tools/dynamic_mcp_tool.py +428 -0
- ripperdoc/tools/enter_plan_mode_tool.py +226 -0
- ripperdoc/tools/exit_plan_mode_tool.py +153 -0
- ripperdoc/tools/file_edit_tool.py +346 -0
- ripperdoc/tools/file_read_tool.py +203 -0
- ripperdoc/tools/file_write_tool.py +205 -0
- ripperdoc/tools/glob_tool.py +179 -0
- ripperdoc/tools/grep_tool.py +370 -0
- ripperdoc/tools/kill_bash_tool.py +136 -0
- ripperdoc/tools/ls_tool.py +471 -0
- ripperdoc/tools/mcp_tools.py +591 -0
- ripperdoc/tools/multi_edit_tool.py +456 -0
- ripperdoc/tools/notebook_edit_tool.py +386 -0
- ripperdoc/tools/skill_tool.py +205 -0
- ripperdoc/tools/task_tool.py +379 -0
- ripperdoc/tools/todo_tool.py +494 -0
- ripperdoc/tools/tool_search_tool.py +380 -0
- ripperdoc/utils/__init__.py +1 -0
- ripperdoc/utils/bash_constants.py +51 -0
- ripperdoc/utils/bash_output_utils.py +43 -0
- ripperdoc/utils/coerce.py +34 -0
- ripperdoc/utils/context_length_errors.py +252 -0
- ripperdoc/utils/exit_code_handlers.py +241 -0
- ripperdoc/utils/file_watch.py +135 -0
- ripperdoc/utils/git_utils.py +274 -0
- ripperdoc/utils/json_utils.py +27 -0
- ripperdoc/utils/log.py +176 -0
- ripperdoc/utils/mcp.py +560 -0
- ripperdoc/utils/memory.py +253 -0
- ripperdoc/utils/message_compaction.py +676 -0
- ripperdoc/utils/messages.py +519 -0
- ripperdoc/utils/output_utils.py +258 -0
- ripperdoc/utils/path_ignore.py +677 -0
- ripperdoc/utils/path_utils.py +46 -0
- ripperdoc/utils/permissions/__init__.py +27 -0
- ripperdoc/utils/permissions/path_validation_utils.py +174 -0
- ripperdoc/utils/permissions/shell_command_validation.py +552 -0
- ripperdoc/utils/permissions/tool_permission_utils.py +279 -0
- ripperdoc/utils/prompt.py +17 -0
- ripperdoc/utils/safe_get_cwd.py +31 -0
- ripperdoc/utils/sandbox_utils.py +38 -0
- ripperdoc/utils/session_history.py +260 -0
- ripperdoc/utils/session_usage.py +117 -0
- ripperdoc/utils/shell_token_utils.py +95 -0
- ripperdoc/utils/shell_utils.py +159 -0
- ripperdoc/utils/todo.py +203 -0
- ripperdoc/utils/token_estimation.py +34 -0
- ripperdoc-0.2.6.dist-info/METADATA +193 -0
- ripperdoc-0.2.6.dist-info/RECORD +107 -0
- ripperdoc-0.2.6.dist-info/WHEEL +5 -0
- ripperdoc-0.2.6.dist-info/entry_points.txt +3 -0
- ripperdoc-0.2.6.dist-info/licenses/LICENSE +53 -0
- ripperdoc-0.2.6.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,389 @@
|
|
|
1
|
+
"""Lightweight background shell manager for BashTool.
|
|
2
|
+
|
|
3
|
+
Allows starting shell commands that keep running while the caller continues.
|
|
4
|
+
Output can be polled via the BashOutput tool and commands can be terminated
|
|
5
|
+
via the KillBash tool.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import concurrent.futures
|
|
10
|
+
import contextlib
|
|
11
|
+
import threading
|
|
12
|
+
import time
|
|
13
|
+
import uuid
|
|
14
|
+
from dataclasses import dataclass, field
|
|
15
|
+
from typing import Any, Dict, List, Optional
|
|
16
|
+
|
|
17
|
+
import atexit
|
|
18
|
+
|
|
19
|
+
from ripperdoc.utils.shell_utils import build_shell_command, find_suitable_shell
|
|
20
|
+
from ripperdoc.utils.log import get_logger
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
logger = get_logger()
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
@dataclass
|
|
27
|
+
class BackgroundTask:
|
|
28
|
+
"""In-memory record of a background shell command."""
|
|
29
|
+
|
|
30
|
+
id: str
|
|
31
|
+
command: str
|
|
32
|
+
process: asyncio.subprocess.Process
|
|
33
|
+
start_time: float
|
|
34
|
+
timeout: Optional[float] = None
|
|
35
|
+
stdout_chunks: List[str] = field(default_factory=list)
|
|
36
|
+
stderr_chunks: List[str] = field(default_factory=list)
|
|
37
|
+
exit_code: Optional[int] = None
|
|
38
|
+
killed: bool = False
|
|
39
|
+
timed_out: bool = False
|
|
40
|
+
reader_tasks: List[asyncio.Task] = field(default_factory=list)
|
|
41
|
+
done_event: asyncio.Event = field(default_factory=asyncio.Event)
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
_tasks: Dict[str, BackgroundTask] = {}
|
|
45
|
+
_tasks_lock = threading.Lock()
|
|
46
|
+
_background_loop: Optional[asyncio.AbstractEventLoop] = None
|
|
47
|
+
_background_thread: Optional[threading.Thread] = None
|
|
48
|
+
_loop_lock = threading.Lock()
|
|
49
|
+
_shutdown_registered = False
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def _safe_log_exception(message: str, **extra: Any) -> None:
|
|
53
|
+
"""Log an exception but never let logging failures bubble up."""
|
|
54
|
+
try:
|
|
55
|
+
logger.exception(message, extra=extra)
|
|
56
|
+
except (OSError, RuntimeError, ValueError):
|
|
57
|
+
pass
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _ensure_background_loop() -> asyncio.AbstractEventLoop:
|
|
61
|
+
"""Create (or return) a dedicated loop for background processes."""
|
|
62
|
+
global _background_loop, _background_thread
|
|
63
|
+
|
|
64
|
+
if _background_loop and _background_loop.is_running():
|
|
65
|
+
return _background_loop
|
|
66
|
+
|
|
67
|
+
with _loop_lock:
|
|
68
|
+
if _background_loop and _background_loop.is_running():
|
|
69
|
+
return _background_loop
|
|
70
|
+
|
|
71
|
+
loop = asyncio.new_event_loop()
|
|
72
|
+
ready = threading.Event()
|
|
73
|
+
|
|
74
|
+
def _run_loop() -> None:
|
|
75
|
+
asyncio.set_event_loop(loop)
|
|
76
|
+
ready.set()
|
|
77
|
+
loop.run_forever()
|
|
78
|
+
|
|
79
|
+
thread = threading.Thread(
|
|
80
|
+
target=_run_loop,
|
|
81
|
+
name="ripperdoc-bg-loop",
|
|
82
|
+
daemon=True,
|
|
83
|
+
)
|
|
84
|
+
thread.start()
|
|
85
|
+
ready.wait()
|
|
86
|
+
|
|
87
|
+
_background_loop = loop
|
|
88
|
+
_background_thread = thread
|
|
89
|
+
_register_shutdown_hook()
|
|
90
|
+
return loop
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
def _register_shutdown_hook() -> None:
|
|
94
|
+
global _shutdown_registered
|
|
95
|
+
if _shutdown_registered:
|
|
96
|
+
return
|
|
97
|
+
atexit.register(shutdown_background_shell)
|
|
98
|
+
_shutdown_registered = True
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
def _submit_to_background_loop(coro: Any) -> concurrent.futures.Future:
|
|
102
|
+
"""Run a coroutine on the background loop and return a thread-safe future."""
|
|
103
|
+
loop = _ensure_background_loop()
|
|
104
|
+
return asyncio.run_coroutine_threadsafe(coro, loop)
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
async def _pump_stream(stream: asyncio.StreamReader, sink: List[str]) -> None:
|
|
108
|
+
"""Continuously read from a stream into a buffer."""
|
|
109
|
+
try:
|
|
110
|
+
while True:
|
|
111
|
+
chunk = await stream.read(4096)
|
|
112
|
+
if not chunk:
|
|
113
|
+
break
|
|
114
|
+
text = chunk.decode("utf-8", errors="replace")
|
|
115
|
+
with _tasks_lock:
|
|
116
|
+
sink.append(text)
|
|
117
|
+
except (OSError, RuntimeError, asyncio.CancelledError) as exc:
|
|
118
|
+
if isinstance(exc, asyncio.CancelledError):
|
|
119
|
+
return # Normal cancellation
|
|
120
|
+
# Best effort; ignore stream read errors to avoid leaking tasks.
|
|
121
|
+
logger.debug(
|
|
122
|
+
f"Stream pump error for background task: {exc}",
|
|
123
|
+
exc_info=True,
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
async def _finalize_reader_tasks(reader_tasks: List[asyncio.Task], timeout: float = 1.0) -> None:
|
|
128
|
+
"""Wait for stream reader tasks to finish, cancelling if they hang."""
|
|
129
|
+
if not reader_tasks:
|
|
130
|
+
return
|
|
131
|
+
|
|
132
|
+
try:
|
|
133
|
+
await asyncio.wait_for(
|
|
134
|
+
asyncio.gather(*reader_tasks, return_exceptions=True), timeout=timeout
|
|
135
|
+
)
|
|
136
|
+
except asyncio.TimeoutError:
|
|
137
|
+
for task in reader_tasks:
|
|
138
|
+
if not task.done():
|
|
139
|
+
task.cancel()
|
|
140
|
+
await asyncio.gather(*reader_tasks, return_exceptions=True)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
async def _monitor_task(task: BackgroundTask) -> None:
|
|
144
|
+
"""Wait for a background process to finish or timeout, then mark status."""
|
|
145
|
+
try:
|
|
146
|
+
if task.timeout:
|
|
147
|
+
await asyncio.wait_for(task.process.wait(), timeout=task.timeout)
|
|
148
|
+
else:
|
|
149
|
+
await task.process.wait()
|
|
150
|
+
with _tasks_lock:
|
|
151
|
+
task.exit_code = task.process.returncode
|
|
152
|
+
except asyncio.TimeoutError:
|
|
153
|
+
logger.warning(f"Background task {task.id} timed out after {task.timeout}s: {task.command}")
|
|
154
|
+
with _tasks_lock:
|
|
155
|
+
task.timed_out = True
|
|
156
|
+
task.process.kill()
|
|
157
|
+
await task.process.wait()
|
|
158
|
+
with _tasks_lock:
|
|
159
|
+
task.exit_code = -1
|
|
160
|
+
except asyncio.CancelledError:
|
|
161
|
+
return
|
|
162
|
+
except (OSError, RuntimeError, ProcessLookupError) as exc:
|
|
163
|
+
logger.warning(
|
|
164
|
+
"Error monitoring background task: %s: %s",
|
|
165
|
+
type(exc).__name__, exc,
|
|
166
|
+
extra={"task_id": task.id, "command": task.command},
|
|
167
|
+
)
|
|
168
|
+
with _tasks_lock:
|
|
169
|
+
task.exit_code = -1
|
|
170
|
+
finally:
|
|
171
|
+
# Ensure readers are finished before marking done.
|
|
172
|
+
await _finalize_reader_tasks(task.reader_tasks)
|
|
173
|
+
task.done_event.set()
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
async def _start_background_command(
|
|
177
|
+
command: str, timeout: Optional[float] = None, shell_executable: Optional[str] = None
|
|
178
|
+
) -> str:
|
|
179
|
+
"""Launch a background shell command on the dedicated loop."""
|
|
180
|
+
selected_shell = shell_executable or find_suitable_shell()
|
|
181
|
+
argv = build_shell_command(selected_shell, command)
|
|
182
|
+
process = await asyncio.create_subprocess_exec(
|
|
183
|
+
*argv,
|
|
184
|
+
stdout=asyncio.subprocess.PIPE,
|
|
185
|
+
stderr=asyncio.subprocess.PIPE,
|
|
186
|
+
stdin=asyncio.subprocess.DEVNULL,
|
|
187
|
+
start_new_session=False,
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
task_id = f"bash_{uuid.uuid4().hex[:8]}"
|
|
191
|
+
record = BackgroundTask(
|
|
192
|
+
id=task_id,
|
|
193
|
+
command=command,
|
|
194
|
+
process=process,
|
|
195
|
+
start_time=_loop_time(),
|
|
196
|
+
timeout=timeout,
|
|
197
|
+
)
|
|
198
|
+
with _tasks_lock:
|
|
199
|
+
_tasks[task_id] = record
|
|
200
|
+
|
|
201
|
+
# Start stream pumps and monitor task.
|
|
202
|
+
if process.stdout:
|
|
203
|
+
record.reader_tasks.append(
|
|
204
|
+
asyncio.create_task(_pump_stream(process.stdout, record.stdout_chunks))
|
|
205
|
+
)
|
|
206
|
+
if process.stderr:
|
|
207
|
+
record.reader_tasks.append(
|
|
208
|
+
asyncio.create_task(_pump_stream(process.stderr, record.stderr_chunks))
|
|
209
|
+
)
|
|
210
|
+
asyncio.create_task(_monitor_task(record))
|
|
211
|
+
|
|
212
|
+
return task_id
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
async def start_background_command(
|
|
216
|
+
command: str, timeout: Optional[float] = None, shell_executable: Optional[str] = None
|
|
217
|
+
) -> str:
|
|
218
|
+
"""Launch a background shell command and return its task id."""
|
|
219
|
+
future = _submit_to_background_loop(
|
|
220
|
+
_start_background_command(command, timeout, shell_executable)
|
|
221
|
+
)
|
|
222
|
+
return await asyncio.wrap_future(future)
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def _compute_status(task: BackgroundTask) -> str:
|
|
226
|
+
"""Return a human-friendly status string."""
|
|
227
|
+
if task.killed:
|
|
228
|
+
return "killed"
|
|
229
|
+
if task.timed_out:
|
|
230
|
+
return "failed"
|
|
231
|
+
if task.exit_code is None:
|
|
232
|
+
return "running"
|
|
233
|
+
return "completed" if task.exit_code == 0 else "failed"
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def _loop_time() -> float:
|
|
237
|
+
"""Return a monotonic timestamp without requiring a running event loop."""
|
|
238
|
+
try:
|
|
239
|
+
return asyncio.get_running_loop().time()
|
|
240
|
+
except RuntimeError:
|
|
241
|
+
return time.monotonic()
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def get_background_status(task_id: str, consume: bool = True) -> dict:
|
|
245
|
+
"""Fetch the current status and buffered output of a background command.
|
|
246
|
+
|
|
247
|
+
If consume is True, buffered stdout/stderr are cleared after reading.
|
|
248
|
+
"""
|
|
249
|
+
with _tasks_lock:
|
|
250
|
+
if task_id not in _tasks:
|
|
251
|
+
raise KeyError(f"No background task found with id '{task_id}'")
|
|
252
|
+
|
|
253
|
+
task = _tasks[task_id]
|
|
254
|
+
stdout = "".join(task.stdout_chunks)
|
|
255
|
+
stderr = "".join(task.stderr_chunks)
|
|
256
|
+
|
|
257
|
+
if consume:
|
|
258
|
+
task.stdout_chunks.clear()
|
|
259
|
+
task.stderr_chunks.clear()
|
|
260
|
+
|
|
261
|
+
return {
|
|
262
|
+
"id": task.id,
|
|
263
|
+
"command": task.command,
|
|
264
|
+
"status": _compute_status(task),
|
|
265
|
+
"stdout": stdout,
|
|
266
|
+
"stderr": stderr,
|
|
267
|
+
"exit_code": task.exit_code,
|
|
268
|
+
"timed_out": task.timed_out,
|
|
269
|
+
"killed": task.killed,
|
|
270
|
+
"duration_ms": (_loop_time() - task.start_time) * 1000.0,
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
async def kill_background_task(task_id: str) -> bool:
|
|
275
|
+
"""Attempt to kill a running background task."""
|
|
276
|
+
KILL_WAIT_SECONDS = 2.0
|
|
277
|
+
|
|
278
|
+
async def _kill(task_id: str) -> bool:
|
|
279
|
+
with _tasks_lock:
|
|
280
|
+
task = _tasks.get(task_id)
|
|
281
|
+
if not task:
|
|
282
|
+
return False
|
|
283
|
+
|
|
284
|
+
if task.exit_code is not None:
|
|
285
|
+
return False
|
|
286
|
+
|
|
287
|
+
try:
|
|
288
|
+
task.killed = True
|
|
289
|
+
task.process.kill()
|
|
290
|
+
try:
|
|
291
|
+
await asyncio.wait_for(task.process.wait(), timeout=KILL_WAIT_SECONDS)
|
|
292
|
+
except asyncio.TimeoutError:
|
|
293
|
+
# Best effort: force kill and don't block.
|
|
294
|
+
with contextlib.suppress(ProcessLookupError, PermissionError):
|
|
295
|
+
task.process.kill()
|
|
296
|
+
await asyncio.wait_for(task.process.wait(), timeout=1.0)
|
|
297
|
+
|
|
298
|
+
with _tasks_lock:
|
|
299
|
+
task.exit_code = task.process.returncode or -1
|
|
300
|
+
return True
|
|
301
|
+
finally:
|
|
302
|
+
await _finalize_reader_tasks(task.reader_tasks)
|
|
303
|
+
task.done_event.set()
|
|
304
|
+
|
|
305
|
+
future = _submit_to_background_loop(_kill(task_id))
|
|
306
|
+
return await asyncio.wrap_future(future)
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
def list_background_tasks() -> List[str]:
|
|
310
|
+
"""Return known background task ids."""
|
|
311
|
+
with _tasks_lock:
|
|
312
|
+
return list(_tasks.keys())
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
async def _shutdown_loop(loop: asyncio.AbstractEventLoop) -> None:
|
|
316
|
+
"""Drain running background processes before stopping the loop."""
|
|
317
|
+
with _tasks_lock:
|
|
318
|
+
tasks = list(_tasks.values())
|
|
319
|
+
_tasks.clear()
|
|
320
|
+
|
|
321
|
+
for task in tasks:
|
|
322
|
+
try:
|
|
323
|
+
task.killed = True
|
|
324
|
+
with contextlib.suppress(ProcessLookupError):
|
|
325
|
+
task.process.kill()
|
|
326
|
+
try:
|
|
327
|
+
with contextlib.suppress(ProcessLookupError):
|
|
328
|
+
await asyncio.wait_for(task.process.wait(), timeout=1.5)
|
|
329
|
+
except asyncio.TimeoutError:
|
|
330
|
+
with contextlib.suppress(ProcessLookupError, PermissionError):
|
|
331
|
+
task.process.kill()
|
|
332
|
+
with contextlib.suppress(asyncio.TimeoutError, ProcessLookupError):
|
|
333
|
+
await asyncio.wait_for(task.process.wait(), timeout=0.5)
|
|
334
|
+
task.exit_code = task.process.returncode or -1
|
|
335
|
+
except (OSError, RuntimeError, asyncio.CancelledError) as exc:
|
|
336
|
+
if not isinstance(exc, asyncio.CancelledError):
|
|
337
|
+
_safe_log_exception(
|
|
338
|
+
"Error shutting down background task",
|
|
339
|
+
task_id=task.id,
|
|
340
|
+
command=task.command,
|
|
341
|
+
)
|
|
342
|
+
finally:
|
|
343
|
+
await _finalize_reader_tasks(task.reader_tasks)
|
|
344
|
+
task.done_event.set()
|
|
345
|
+
|
|
346
|
+
current = asyncio.current_task()
|
|
347
|
+
pending = [t for t in asyncio.all_tasks(loop) if t is not current]
|
|
348
|
+
for pending_task in pending:
|
|
349
|
+
pending_task.cancel()
|
|
350
|
+
if pending:
|
|
351
|
+
with contextlib.suppress(Exception):
|
|
352
|
+
await asyncio.gather(*pending, return_exceptions=True)
|
|
353
|
+
|
|
354
|
+
with contextlib.suppress(Exception):
|
|
355
|
+
await loop.shutdown_asyncgens()
|
|
356
|
+
|
|
357
|
+
|
|
358
|
+
def shutdown_background_shell() -> None:
|
|
359
|
+
"""Stop background tasks/loop to avoid asyncio 'Event loop is closed' warnings."""
|
|
360
|
+
global _background_loop, _background_thread
|
|
361
|
+
|
|
362
|
+
loop = _background_loop
|
|
363
|
+
thread = _background_thread
|
|
364
|
+
|
|
365
|
+
if not loop or loop.is_closed():
|
|
366
|
+
_background_loop = None
|
|
367
|
+
_background_thread = None
|
|
368
|
+
return
|
|
369
|
+
|
|
370
|
+
try:
|
|
371
|
+
if loop.is_running():
|
|
372
|
+
try:
|
|
373
|
+
fut = asyncio.run_coroutine_threadsafe(_shutdown_loop(loop), loop)
|
|
374
|
+
fut.result(timeout=3)
|
|
375
|
+
except (RuntimeError, TimeoutError, concurrent.futures.TimeoutError):
|
|
376
|
+
logger.debug("Failed to cleanly shutdown background loop", exc_info=True)
|
|
377
|
+
try:
|
|
378
|
+
loop.call_soon_threadsafe(loop.stop)
|
|
379
|
+
except (RuntimeError, OSError):
|
|
380
|
+
logger.debug("Failed to stop background loop", exc_info=True)
|
|
381
|
+
else:
|
|
382
|
+
loop.run_until_complete(_shutdown_loop(loop))
|
|
383
|
+
finally:
|
|
384
|
+
if thread and thread.is_alive():
|
|
385
|
+
thread.join(timeout=2)
|
|
386
|
+
with contextlib.suppress(Exception):
|
|
387
|
+
loop.close()
|
|
388
|
+
_background_loop = None
|
|
389
|
+
_background_thread = None
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
"""Tool to retrieve output from background bash tasks."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, AsyncGenerator, Optional
|
|
4
|
+
from pydantic import BaseModel, Field
|
|
5
|
+
|
|
6
|
+
from ripperdoc.core.tool import Tool, ToolUseContext, ToolResult, ValidationResult
|
|
7
|
+
from ripperdoc.tools.background_shell import get_background_status
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class BashOutputInput(BaseModel):
|
|
11
|
+
"""Input schema for BashOutput."""
|
|
12
|
+
|
|
13
|
+
task_id: str = Field(
|
|
14
|
+
description="Background task id returned by BashTool when run_in_background is true"
|
|
15
|
+
)
|
|
16
|
+
consume: bool = Field(
|
|
17
|
+
default=True, description="Whether to clear buffered output after reading (default: True)"
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class BashOutputData(BaseModel):
|
|
22
|
+
"""Snapshot of a background task."""
|
|
23
|
+
|
|
24
|
+
task_id: str
|
|
25
|
+
command: str
|
|
26
|
+
status: str
|
|
27
|
+
stdout: str
|
|
28
|
+
stderr: str
|
|
29
|
+
exit_code: Optional[int]
|
|
30
|
+
duration_ms: float
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class BashOutputTool(Tool[BashOutputInput, BashOutputData]):
|
|
34
|
+
"""Read buffered output from a background bash task."""
|
|
35
|
+
|
|
36
|
+
@property
|
|
37
|
+
def name(self) -> str:
|
|
38
|
+
return "BashOutput"
|
|
39
|
+
|
|
40
|
+
async def description(self) -> str:
|
|
41
|
+
return "Read output and status from a background bash command started with BashTool(run_in_background=True)."
|
|
42
|
+
|
|
43
|
+
async def prompt(self, safe_mode: bool = False) -> str:
|
|
44
|
+
return "Fetch buffered output and status for a background bash task by id."
|
|
45
|
+
|
|
46
|
+
@property
|
|
47
|
+
def input_schema(self) -> type[BashOutputInput]:
|
|
48
|
+
return BashOutputInput
|
|
49
|
+
|
|
50
|
+
def is_read_only(self) -> bool:
|
|
51
|
+
return True
|
|
52
|
+
|
|
53
|
+
def is_concurrency_safe(self) -> bool:
|
|
54
|
+
return True
|
|
55
|
+
|
|
56
|
+
def needs_permissions(self, input_data: Any = None) -> bool:
|
|
57
|
+
return False
|
|
58
|
+
|
|
59
|
+
async def validate_input(
|
|
60
|
+
self, input_data: BashOutputInput, context: Optional[ToolUseContext] = None
|
|
61
|
+
) -> ValidationResult:
|
|
62
|
+
try:
|
|
63
|
+
get_background_status(input_data.task_id, consume=False)
|
|
64
|
+
except KeyError:
|
|
65
|
+
return ValidationResult(
|
|
66
|
+
result=False, message=f"No background task found with id '{input_data.task_id}'"
|
|
67
|
+
)
|
|
68
|
+
return ValidationResult(result=True)
|
|
69
|
+
|
|
70
|
+
def render_result_for_assistant(self, output: BashOutputData) -> str:
|
|
71
|
+
parts = [
|
|
72
|
+
f"status: {output.status}",
|
|
73
|
+
f"exit code: {output.exit_code if output.exit_code is not None else 'running'}",
|
|
74
|
+
]
|
|
75
|
+
if output.stdout:
|
|
76
|
+
parts.append(f"stdout:\n{output.stdout}")
|
|
77
|
+
if output.stderr:
|
|
78
|
+
parts.append(f"stderr:\n{output.stderr}")
|
|
79
|
+
return "\n\n".join(parts)
|
|
80
|
+
|
|
81
|
+
def render_tool_use_message(self, input_data: BashOutputInput, verbose: bool = False) -> str:
|
|
82
|
+
suffix = " (consume=0)" if not input_data.consume else ""
|
|
83
|
+
return f"$ bash-output {input_data.task_id}{suffix}"
|
|
84
|
+
|
|
85
|
+
async def call(
|
|
86
|
+
self, input_data: BashOutputInput, context: ToolUseContext
|
|
87
|
+
) -> AsyncGenerator[ToolResult, None]:
|
|
88
|
+
status = get_background_status(input_data.task_id, consume=input_data.consume)
|
|
89
|
+
output = BashOutputData(
|
|
90
|
+
task_id=status["id"],
|
|
91
|
+
command=status["command"],
|
|
92
|
+
status=status["status"],
|
|
93
|
+
stdout=status["stdout"],
|
|
94
|
+
stderr=status["stderr"],
|
|
95
|
+
exit_code=status["exit_code"],
|
|
96
|
+
duration_ms=status["duration_ms"],
|
|
97
|
+
)
|
|
98
|
+
yield ToolResult(data=output, result_for_assistant=self.render_result_for_assistant(output))
|