htmlgraph 0.26.5__py3-none-any.whl → 0.26.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- htmlgraph/.htmlgraph/.session-warning-state.json +1 -1
- htmlgraph/__init__.py +1 -1
- htmlgraph/api/main.py +50 -10
- htmlgraph/api/templates/dashboard-redesign.html +608 -54
- htmlgraph/api/templates/partials/activity-feed.html +21 -0
- htmlgraph/api/templates/partials/features.html +81 -12
- htmlgraph/api/templates/partials/orchestration.html +35 -0
- htmlgraph/cli/.htmlgraph/.session-warning-state.json +6 -0
- htmlgraph/cli/.htmlgraph/agents.json +72 -0
- htmlgraph/cli/__init__.py +42 -0
- htmlgraph/cli/__main__.py +6 -0
- htmlgraph/cli/analytics.py +939 -0
- htmlgraph/cli/base.py +660 -0
- htmlgraph/cli/constants.py +206 -0
- htmlgraph/cli/core.py +856 -0
- htmlgraph/cli/main.py +143 -0
- htmlgraph/cli/models.py +462 -0
- htmlgraph/cli/templates/__init__.py +1 -0
- htmlgraph/cli/templates/cost_dashboard.py +398 -0
- htmlgraph/cli/work/__init__.py +159 -0
- htmlgraph/cli/work/features.py +567 -0
- htmlgraph/cli/work/orchestration.py +675 -0
- htmlgraph/cli/work/sessions.py +465 -0
- htmlgraph/cli/work/tracks.py +485 -0
- htmlgraph/dashboard.html +6414 -634
- htmlgraph/db/schema.py +8 -3
- htmlgraph/docs/ORCHESTRATION_PATTERNS.md +20 -13
- htmlgraph/docs/README.md +2 -3
- htmlgraph/hooks/event_tracker.py +355 -26
- htmlgraph/hooks/git_commands.py +175 -0
- htmlgraph/hooks/orchestrator.py +137 -71
- htmlgraph/hooks/orchestrator_reflector.py +23 -0
- htmlgraph/hooks/pretooluse.py +29 -6
- htmlgraph/hooks/session_handler.py +28 -0
- htmlgraph/hooks/session_summary.py +391 -0
- htmlgraph/hooks/subagent_detection.py +202 -0
- htmlgraph/hooks/subagent_stop.py +71 -12
- htmlgraph/hooks/validator.py +192 -79
- htmlgraph/operations/__init__.py +18 -0
- htmlgraph/operations/initialization.py +596 -0
- htmlgraph/operations/initialization.py.backup +228 -0
- htmlgraph/orchestration/__init__.py +16 -1
- htmlgraph/orchestration/claude_launcher.py +185 -0
- htmlgraph/orchestration/command_builder.py +71 -0
- htmlgraph/orchestration/headless_spawner.py +72 -1332
- htmlgraph/orchestration/plugin_manager.py +136 -0
- htmlgraph/orchestration/prompts.py +137 -0
- htmlgraph/orchestration/spawners/__init__.py +16 -0
- htmlgraph/orchestration/spawners/base.py +194 -0
- htmlgraph/orchestration/spawners/claude.py +170 -0
- htmlgraph/orchestration/spawners/codex.py +442 -0
- htmlgraph/orchestration/spawners/copilot.py +299 -0
- htmlgraph/orchestration/spawners/gemini.py +478 -0
- htmlgraph/orchestration/subprocess_runner.py +33 -0
- htmlgraph/orchestration.md +563 -0
- htmlgraph/orchestrator-system-prompt-optimized.txt +620 -55
- htmlgraph/orchestrator_config.py +357 -0
- htmlgraph/orchestrator_mode.py +45 -12
- htmlgraph/transcript.py +16 -4
- htmlgraph-0.26.7.data/data/htmlgraph/dashboard.html +6592 -0
- {htmlgraph-0.26.5.dist-info → htmlgraph-0.26.7.dist-info}/METADATA +1 -1
- {htmlgraph-0.26.5.dist-info → htmlgraph-0.26.7.dist-info}/RECORD +68 -34
- {htmlgraph-0.26.5.dist-info → htmlgraph-0.26.7.dist-info}/entry_points.txt +1 -1
- htmlgraph/cli.py +0 -7256
- htmlgraph-0.26.5.data/data/htmlgraph/dashboard.html +0 -812
- {htmlgraph-0.26.5.data → htmlgraph-0.26.7.data}/data/htmlgraph/styles.css +0 -0
- {htmlgraph-0.26.5.data → htmlgraph-0.26.7.data}/data/htmlgraph/templates/AGENTS.md.template +0 -0
- {htmlgraph-0.26.5.data → htmlgraph-0.26.7.data}/data/htmlgraph/templates/CLAUDE.md.template +0 -0
- {htmlgraph-0.26.5.data → htmlgraph-0.26.7.data}/data/htmlgraph/templates/GEMINI.md.template +0 -0
- {htmlgraph-0.26.5.dist-info → htmlgraph-0.26.7.dist-info}/WHEEL +0 -0
|
@@ -1,28 +1,20 @@
|
|
|
1
|
-
"""Headless AI spawner for multi-AI orchestration.
|
|
1
|
+
"""Headless AI spawner for multi-AI orchestration.
|
|
2
2
|
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
import subprocess
|
|
6
|
-
import sys
|
|
7
|
-
import time
|
|
8
|
-
from dataclasses import dataclass
|
|
9
|
-
from typing import TYPE_CHECKING, Any
|
|
3
|
+
This module provides backward compatibility by delegating to modular spawner implementations.
|
|
4
|
+
"""
|
|
10
5
|
|
|
11
|
-
|
|
12
|
-
from htmlgraph.orchestration.live_events import LiveEventPublisher
|
|
13
|
-
from htmlgraph.sdk import SDK
|
|
6
|
+
from typing import Any
|
|
14
7
|
|
|
8
|
+
from .spawners import (
|
|
9
|
+
AIResult,
|
|
10
|
+
ClaudeSpawner,
|
|
11
|
+
CodexSpawner,
|
|
12
|
+
CopilotSpawner,
|
|
13
|
+
GeminiSpawner,
|
|
14
|
+
)
|
|
15
15
|
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
"""Result from AI CLI execution."""
|
|
19
|
-
|
|
20
|
-
success: bool
|
|
21
|
-
response: str
|
|
22
|
-
tokens_used: int | None
|
|
23
|
-
error: str | None
|
|
24
|
-
raw_output: dict | list | str | None
|
|
25
|
-
tracked_events: list[dict] | None = None # Events tracked in HtmlGraph
|
|
16
|
+
# Re-export AIResult for backward compatibility
|
|
17
|
+
__all__ = ["HeadlessSpawner", "AIResult"]
|
|
26
18
|
|
|
27
19
|
|
|
28
20
|
class HeadlessSpawner:
|
|
@@ -65,396 +57,30 @@ class HeadlessSpawner:
|
|
|
65
57
|
"""
|
|
66
58
|
|
|
67
59
|
def __init__(self) -> None:
|
|
68
|
-
"""Initialize spawner."""
|
|
69
|
-
self.
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
Get LiveEventPublisher instance for real-time WebSocket streaming.
|
|
74
|
-
|
|
75
|
-
Returns None if publisher unavailable (optional dependency).
|
|
76
|
-
"""
|
|
77
|
-
if self._live_publisher is None:
|
|
78
|
-
try:
|
|
79
|
-
from htmlgraph.orchestration.live_events import LiveEventPublisher
|
|
80
|
-
|
|
81
|
-
self._live_publisher = LiveEventPublisher()
|
|
82
|
-
except Exception:
|
|
83
|
-
# Live events are optional
|
|
84
|
-
pass
|
|
85
|
-
return self._live_publisher
|
|
86
|
-
|
|
87
|
-
def _publish_live_event(
|
|
88
|
-
self,
|
|
89
|
-
event_type: str,
|
|
90
|
-
spawner_type: str,
|
|
91
|
-
**kwargs: str | int | float | bool | None,
|
|
92
|
-
) -> None:
|
|
93
|
-
"""
|
|
94
|
-
Publish a live event for WebSocket streaming.
|
|
95
|
-
|
|
96
|
-
Silently fails if publisher unavailable (optional feature).
|
|
97
|
-
"""
|
|
98
|
-
publisher = self._get_live_publisher()
|
|
99
|
-
if publisher is None:
|
|
100
|
-
return
|
|
101
|
-
|
|
102
|
-
parent_event_id = os.getenv("HTMLGRAPH_PARENT_EVENT")
|
|
103
|
-
|
|
104
|
-
try:
|
|
105
|
-
if event_type == "spawner_start":
|
|
106
|
-
publisher.spawner_start(
|
|
107
|
-
spawner_type=spawner_type,
|
|
108
|
-
prompt=str(kwargs.get("prompt", "")),
|
|
109
|
-
parent_event_id=parent_event_id,
|
|
110
|
-
model=str(kwargs.get("model", "")) if kwargs.get("model") else None,
|
|
111
|
-
)
|
|
112
|
-
elif event_type == "spawner_phase":
|
|
113
|
-
progress_val = kwargs.get("progress")
|
|
114
|
-
publisher.spawner_phase(
|
|
115
|
-
spawner_type=spawner_type,
|
|
116
|
-
phase=str(kwargs.get("phase", "executing")),
|
|
117
|
-
progress=int(progress_val) if progress_val is not None else None,
|
|
118
|
-
details=str(kwargs.get("details", ""))
|
|
119
|
-
if kwargs.get("details")
|
|
120
|
-
else None,
|
|
121
|
-
parent_event_id=parent_event_id,
|
|
122
|
-
)
|
|
123
|
-
elif event_type == "spawner_complete":
|
|
124
|
-
duration_val = kwargs.get("duration")
|
|
125
|
-
tokens_val = kwargs.get("tokens")
|
|
126
|
-
publisher.spawner_complete(
|
|
127
|
-
spawner_type=spawner_type,
|
|
128
|
-
success=bool(kwargs.get("success", False)),
|
|
129
|
-
duration_seconds=float(duration_val)
|
|
130
|
-
if duration_val is not None
|
|
131
|
-
else None,
|
|
132
|
-
response_preview=str(kwargs.get("response", ""))[:200]
|
|
133
|
-
if kwargs.get("response")
|
|
134
|
-
else None,
|
|
135
|
-
tokens_used=int(tokens_val) if tokens_val is not None else None,
|
|
136
|
-
error=str(kwargs.get("error", "")) if kwargs.get("error") else None,
|
|
137
|
-
parent_event_id=parent_event_id,
|
|
138
|
-
)
|
|
139
|
-
elif event_type == "spawner_tool_use":
|
|
140
|
-
publisher.spawner_tool_use(
|
|
141
|
-
spawner_type=spawner_type,
|
|
142
|
-
tool_name=str(kwargs.get("tool_name", "unknown")),
|
|
143
|
-
parent_event_id=parent_event_id,
|
|
144
|
-
)
|
|
145
|
-
elif event_type == "spawner_message":
|
|
146
|
-
publisher.spawner_message(
|
|
147
|
-
spawner_type=spawner_type,
|
|
148
|
-
message=str(kwargs.get("message", "")),
|
|
149
|
-
role=str(kwargs.get("role", "assistant")),
|
|
150
|
-
parent_event_id=parent_event_id,
|
|
151
|
-
)
|
|
152
|
-
except Exception:
|
|
153
|
-
# Live events should never break spawner execution
|
|
154
|
-
pass
|
|
155
|
-
|
|
156
|
-
def _get_sdk(self) -> "SDK | None":
|
|
157
|
-
"""
|
|
158
|
-
Get SDK instance for HtmlGraph tracking with parent session support.
|
|
159
|
-
|
|
160
|
-
Returns None if SDK unavailable.
|
|
161
|
-
"""
|
|
162
|
-
try:
|
|
163
|
-
from htmlgraph.sdk import SDK
|
|
164
|
-
|
|
165
|
-
# Read parent session context from environment
|
|
166
|
-
parent_session = os.getenv("HTMLGRAPH_PARENT_SESSION")
|
|
167
|
-
parent_agent = os.getenv("HTMLGRAPH_PARENT_AGENT")
|
|
168
|
-
|
|
169
|
-
# Create SDK with parent session context
|
|
170
|
-
sdk = SDK(
|
|
171
|
-
agent=f"spawner-{parent_agent}" if parent_agent else "spawner",
|
|
172
|
-
parent_session=parent_session, # Pass parent session
|
|
173
|
-
)
|
|
174
|
-
|
|
175
|
-
return sdk
|
|
176
|
-
|
|
177
|
-
except Exception:
|
|
178
|
-
# SDK unavailable or not properly initialized (optional dependency)
|
|
179
|
-
# This happens in test contexts without active sessions
|
|
180
|
-
# Don't log error to avoid noise in tests
|
|
181
|
-
return None
|
|
182
|
-
|
|
183
|
-
def _parse_and_track_gemini_events(
|
|
184
|
-
self, jsonl_output: str, sdk: "SDK"
|
|
185
|
-
) -> list[dict]:
|
|
186
|
-
"""
|
|
187
|
-
Parse Gemini stream-json events and track in HtmlGraph.
|
|
188
|
-
|
|
189
|
-
Args:
|
|
190
|
-
jsonl_output: JSONL output from Gemini CLI
|
|
191
|
-
sdk: HtmlGraph SDK instance for tracking
|
|
192
|
-
|
|
193
|
-
Returns:
|
|
194
|
-
Parsed events list
|
|
195
|
-
"""
|
|
196
|
-
events = []
|
|
197
|
-
|
|
198
|
-
# Get parent context for metadata
|
|
199
|
-
parent_activity = os.getenv("HTMLGRAPH_PARENT_ACTIVITY")
|
|
200
|
-
nesting_depth_str = os.getenv("HTMLGRAPH_NESTING_DEPTH", "0")
|
|
201
|
-
nesting_depth = int(nesting_depth_str) if nesting_depth_str.isdigit() else 0
|
|
202
|
-
|
|
203
|
-
for line in jsonl_output.splitlines():
|
|
204
|
-
if not line.strip():
|
|
205
|
-
continue
|
|
206
|
-
|
|
207
|
-
try:
|
|
208
|
-
event = json.loads(line)
|
|
209
|
-
events.append(event)
|
|
210
|
-
|
|
211
|
-
# Track based on event type
|
|
212
|
-
event_type = event.get("type")
|
|
213
|
-
|
|
214
|
-
try:
|
|
215
|
-
if event_type == "tool_use":
|
|
216
|
-
tool_name = event.get("tool_name", "unknown_tool")
|
|
217
|
-
parameters = event.get("parameters", {})
|
|
218
|
-
payload = {
|
|
219
|
-
"tool_name": tool_name,
|
|
220
|
-
"parameters": parameters,
|
|
221
|
-
}
|
|
222
|
-
if parent_activity:
|
|
223
|
-
payload["parent_activity"] = parent_activity
|
|
224
|
-
if nesting_depth > 0:
|
|
225
|
-
payload["nesting_depth"] = nesting_depth
|
|
226
|
-
sdk.track_activity(
|
|
227
|
-
tool="gemini_tool_call",
|
|
228
|
-
summary=f"Gemini called {tool_name}",
|
|
229
|
-
payload=payload,
|
|
230
|
-
)
|
|
231
|
-
|
|
232
|
-
elif event_type == "tool_result":
|
|
233
|
-
status = event.get("status", "unknown")
|
|
234
|
-
success = status == "success"
|
|
235
|
-
tool_id = event.get("tool_id", "unknown")
|
|
236
|
-
payload = {"tool_id": tool_id, "status": status}
|
|
237
|
-
if parent_activity:
|
|
238
|
-
payload["parent_activity"] = parent_activity
|
|
239
|
-
if nesting_depth > 0:
|
|
240
|
-
payload["nesting_depth"] = nesting_depth
|
|
241
|
-
sdk.track_activity(
|
|
242
|
-
tool="gemini_tool_result",
|
|
243
|
-
summary=f"Gemini tool result: {status}",
|
|
244
|
-
success=success,
|
|
245
|
-
payload=payload,
|
|
246
|
-
)
|
|
247
|
-
|
|
248
|
-
elif event_type == "message":
|
|
249
|
-
role = event.get("role")
|
|
250
|
-
if role == "assistant":
|
|
251
|
-
content = event.get("content", "")
|
|
252
|
-
# Truncate for summary
|
|
253
|
-
summary = (
|
|
254
|
-
content[:100] + "..." if len(content) > 100 else content
|
|
255
|
-
)
|
|
256
|
-
payload = {"role": role, "content_length": len(content)}
|
|
257
|
-
if parent_activity:
|
|
258
|
-
payload["parent_activity"] = parent_activity
|
|
259
|
-
if nesting_depth > 0:
|
|
260
|
-
payload["nesting_depth"] = nesting_depth
|
|
261
|
-
sdk.track_activity(
|
|
262
|
-
tool="gemini_message",
|
|
263
|
-
summary=f"Gemini: {summary}",
|
|
264
|
-
payload=payload,
|
|
265
|
-
)
|
|
266
|
-
|
|
267
|
-
elif event_type == "result":
|
|
268
|
-
stats = event.get("stats", {})
|
|
269
|
-
payload = {"stats": stats}
|
|
270
|
-
if parent_activity:
|
|
271
|
-
payload["parent_activity"] = parent_activity
|
|
272
|
-
if nesting_depth > 0:
|
|
273
|
-
payload["nesting_depth"] = nesting_depth
|
|
274
|
-
sdk.track_activity(
|
|
275
|
-
tool="gemini_completion",
|
|
276
|
-
summary="Gemini task completed",
|
|
277
|
-
payload=payload,
|
|
278
|
-
)
|
|
279
|
-
except Exception:
|
|
280
|
-
# Tracking failure should not break parsing
|
|
281
|
-
pass
|
|
60
|
+
"""Initialize spawner with modular implementations."""
|
|
61
|
+
self._gemini_spawner = GeminiSpawner()
|
|
62
|
+
self._codex_spawner = CodexSpawner()
|
|
63
|
+
self._copilot_spawner = CopilotSpawner()
|
|
64
|
+
self._claude_spawner = ClaudeSpawner()
|
|
282
65
|
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
66
|
+
# Expose internal methods for backward compatibility with tests
|
|
67
|
+
def _parse_and_track_gemini_events(self, jsonl_output: str, sdk: Any) -> list[dict]:
|
|
68
|
+
"""Parse and track Gemini events (delegates to GeminiSpawner)."""
|
|
69
|
+
return self._gemini_spawner._parse_and_track_events(jsonl_output, sdk)
|
|
286
70
|
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
self, jsonl_output: str, sdk: "SDK"
|
|
291
|
-
) -> list[dict]:
|
|
292
|
-
"""
|
|
293
|
-
Parse Codex JSONL events and track in HtmlGraph.
|
|
294
|
-
|
|
295
|
-
Args:
|
|
296
|
-
jsonl_output: JSONL output from Codex CLI
|
|
297
|
-
sdk: HtmlGraph SDK instance for tracking
|
|
298
|
-
|
|
299
|
-
Returns:
|
|
300
|
-
Parsed events list
|
|
301
|
-
"""
|
|
302
|
-
events = []
|
|
303
|
-
parse_errors = []
|
|
304
|
-
|
|
305
|
-
# Get parent context for metadata
|
|
306
|
-
parent_activity = os.getenv("HTMLGRAPH_PARENT_ACTIVITY")
|
|
307
|
-
nesting_depth_str = os.getenv("HTMLGRAPH_NESTING_DEPTH", "0")
|
|
308
|
-
nesting_depth = int(nesting_depth_str) if nesting_depth_str.isdigit() else 0
|
|
309
|
-
|
|
310
|
-
for line_num, line in enumerate(jsonl_output.splitlines(), start=1):
|
|
311
|
-
if not line.strip():
|
|
312
|
-
continue
|
|
313
|
-
|
|
314
|
-
try:
|
|
315
|
-
event = json.loads(line)
|
|
316
|
-
events.append(event)
|
|
317
|
-
|
|
318
|
-
event_type = event.get("type")
|
|
319
|
-
|
|
320
|
-
try:
|
|
321
|
-
# Track item.started events
|
|
322
|
-
if event_type == "item.started":
|
|
323
|
-
item = event.get("item", {})
|
|
324
|
-
item_type = item.get("type")
|
|
325
|
-
|
|
326
|
-
if item_type == "command_execution":
|
|
327
|
-
command = item.get("command", "")
|
|
328
|
-
payload = {"command": command}
|
|
329
|
-
if parent_activity:
|
|
330
|
-
payload["parent_activity"] = parent_activity
|
|
331
|
-
if nesting_depth > 0:
|
|
332
|
-
payload["nesting_depth"] = nesting_depth
|
|
333
|
-
sdk.track_activity(
|
|
334
|
-
tool="codex_command",
|
|
335
|
-
summary=f"Codex executing: {command[:80]}",
|
|
336
|
-
payload=payload,
|
|
337
|
-
)
|
|
338
|
-
|
|
339
|
-
# Track item.completed events
|
|
340
|
-
elif event_type == "item.completed":
|
|
341
|
-
item = event.get("item", {})
|
|
342
|
-
item_type = item.get("type")
|
|
343
|
-
|
|
344
|
-
if item_type == "file_change":
|
|
345
|
-
path = item.get("path", "unknown")
|
|
346
|
-
payload = {"path": path}
|
|
347
|
-
if parent_activity:
|
|
348
|
-
payload["parent_activity"] = parent_activity
|
|
349
|
-
if nesting_depth > 0:
|
|
350
|
-
payload["nesting_depth"] = nesting_depth
|
|
351
|
-
sdk.track_activity(
|
|
352
|
-
tool="codex_file_change",
|
|
353
|
-
summary=f"Codex modified: {path}",
|
|
354
|
-
file_paths=[path],
|
|
355
|
-
payload=payload,
|
|
356
|
-
)
|
|
357
|
-
|
|
358
|
-
elif item_type == "agent_message":
|
|
359
|
-
text = item.get("text", "")
|
|
360
|
-
summary = text[:100] + "..." if len(text) > 100 else text
|
|
361
|
-
payload = {"text_length": len(text)}
|
|
362
|
-
if parent_activity:
|
|
363
|
-
payload["parent_activity"] = parent_activity
|
|
364
|
-
if nesting_depth > 0:
|
|
365
|
-
payload["nesting_depth"] = nesting_depth
|
|
366
|
-
sdk.track_activity(
|
|
367
|
-
tool="codex_message",
|
|
368
|
-
summary=f"Codex: {summary}",
|
|
369
|
-
payload=payload,
|
|
370
|
-
)
|
|
371
|
-
|
|
372
|
-
# Track turn.completed for token usage
|
|
373
|
-
elif event_type == "turn.completed":
|
|
374
|
-
usage = event.get("usage", {})
|
|
375
|
-
total_tokens = sum(usage.values())
|
|
376
|
-
payload = {"usage": usage}
|
|
377
|
-
if parent_activity:
|
|
378
|
-
payload["parent_activity"] = parent_activity
|
|
379
|
-
if nesting_depth > 0:
|
|
380
|
-
payload["nesting_depth"] = nesting_depth
|
|
381
|
-
sdk.track_activity(
|
|
382
|
-
tool="codex_completion",
|
|
383
|
-
summary=f"Codex turn completed ({total_tokens} tokens)",
|
|
384
|
-
payload=payload,
|
|
385
|
-
)
|
|
386
|
-
except Exception:
|
|
387
|
-
# Tracking failure should not break parsing
|
|
388
|
-
pass
|
|
389
|
-
|
|
390
|
-
except json.JSONDecodeError as e:
|
|
391
|
-
parse_errors.append(
|
|
392
|
-
{
|
|
393
|
-
"line_number": line_num,
|
|
394
|
-
"error": str(e),
|
|
395
|
-
"content": line[:100],
|
|
396
|
-
}
|
|
397
|
-
)
|
|
398
|
-
continue
|
|
399
|
-
|
|
400
|
-
return events
|
|
71
|
+
def _parse_and_track_codex_events(self, jsonl_output: str, sdk: Any) -> list[dict]:
|
|
72
|
+
"""Parse and track Codex events (delegates to CodexSpawner)."""
|
|
73
|
+
return self._codex_spawner._parse_and_track_events(jsonl_output, sdk)
|
|
401
74
|
|
|
402
75
|
def _parse_and_track_copilot_events(
|
|
403
|
-
self, prompt: str, response: str, sdk:
|
|
76
|
+
self, prompt: str, response: str, sdk: Any
|
|
404
77
|
) -> list[dict]:
|
|
405
|
-
"""
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
Args:
|
|
409
|
-
prompt: Original prompt
|
|
410
|
-
response: Response from Copilot
|
|
411
|
-
sdk: HtmlGraph SDK instance for tracking
|
|
412
|
-
|
|
413
|
-
Returns:
|
|
414
|
-
Synthetic events list for consistency
|
|
415
|
-
"""
|
|
416
|
-
events = []
|
|
417
|
-
|
|
418
|
-
# Get parent context for metadata
|
|
419
|
-
parent_activity = os.getenv("HTMLGRAPH_PARENT_ACTIVITY")
|
|
420
|
-
nesting_depth_str = os.getenv("HTMLGRAPH_NESTING_DEPTH", "0")
|
|
421
|
-
nesting_depth = int(nesting_depth_str) if nesting_depth_str.isdigit() else 0
|
|
78
|
+
"""Parse and track Copilot events (delegates to CopilotSpawner)."""
|
|
79
|
+
return self._copilot_spawner._parse_and_track_events(prompt, response, sdk)
|
|
422
80
|
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
events.append(start_event)
|
|
427
|
-
payload: dict[str, str | int] = {"prompt_length": len(prompt)}
|
|
428
|
-
if parent_activity:
|
|
429
|
-
payload["parent_activity"] = parent_activity
|
|
430
|
-
if nesting_depth > 0:
|
|
431
|
-
payload["nesting_depth"] = nesting_depth
|
|
432
|
-
sdk.track_activity(
|
|
433
|
-
tool="copilot_start",
|
|
434
|
-
summary=f"Copilot started with prompt: {prompt[:80]}",
|
|
435
|
-
payload=payload,
|
|
436
|
-
)
|
|
437
|
-
except Exception:
|
|
438
|
-
pass
|
|
439
|
-
|
|
440
|
-
try:
|
|
441
|
-
# Track result
|
|
442
|
-
result_event = {"type": "copilot_result", "response": response[:100]}
|
|
443
|
-
events.append(result_event)
|
|
444
|
-
payload_result: dict[str, str | int] = {"response_length": len(response)}
|
|
445
|
-
if parent_activity:
|
|
446
|
-
payload_result["parent_activity"] = parent_activity
|
|
447
|
-
if nesting_depth > 0:
|
|
448
|
-
payload_result["nesting_depth"] = nesting_depth
|
|
449
|
-
sdk.track_activity(
|
|
450
|
-
tool="copilot_result",
|
|
451
|
-
summary=f"Copilot completed: {response[:80]}",
|
|
452
|
-
payload=payload_result,
|
|
453
|
-
)
|
|
454
|
-
except Exception:
|
|
455
|
-
pass
|
|
456
|
-
|
|
457
|
-
return events
|
|
81
|
+
def _get_sdk(self) -> Any:
|
|
82
|
+
"""Get SDK instance (delegates to base spawner implementation)."""
|
|
83
|
+
return self._gemini_spawner._get_sdk()
|
|
458
84
|
|
|
459
85
|
def spawn_gemini(
|
|
460
86
|
self,
|
|
@@ -473,7 +99,8 @@ class HeadlessSpawner:
|
|
|
473
99
|
Args:
|
|
474
100
|
prompt: Task description for Gemini
|
|
475
101
|
output_format: "json" or "stream-json" (enables real-time tracking)
|
|
476
|
-
model: Model selection (
|
|
102
|
+
model: Model selection. Default: None (recommended - lets CLI choose
|
|
103
|
+
thinking-compatible models). Older models may fail.
|
|
477
104
|
include_directories: Directories to include for context. Default: None
|
|
478
105
|
track_in_htmlgraph: Enable HtmlGraph activity tracking. Default: True
|
|
479
106
|
timeout: Max seconds to wait
|
|
@@ -483,317 +110,16 @@ class HeadlessSpawner:
|
|
|
483
110
|
Returns:
|
|
484
111
|
AIResult with response, error, and tracked events if tracking enabled
|
|
485
112
|
"""
|
|
486
|
-
|
|
487
|
-
sdk: SDK | None = None
|
|
488
|
-
tracked_events: list[dict] = []
|
|
489
|
-
if track_in_htmlgraph:
|
|
490
|
-
sdk = self._get_sdk()
|
|
491
|
-
|
|
492
|
-
# Publish live event: spawner starting
|
|
493
|
-
self._publish_live_event(
|
|
494
|
-
"spawner_start",
|
|
495
|
-
"gemini",
|
|
113
|
+
return self._gemini_spawner.spawn(
|
|
496
114
|
prompt=prompt,
|
|
115
|
+
output_format=output_format,
|
|
497
116
|
model=model,
|
|
117
|
+
include_directories=include_directories,
|
|
118
|
+
track_in_htmlgraph=track_in_htmlgraph,
|
|
119
|
+
timeout=timeout,
|
|
120
|
+
tracker=tracker,
|
|
121
|
+
parent_event_id=parent_event_id,
|
|
498
122
|
)
|
|
499
|
-
start_time = time.time()
|
|
500
|
-
|
|
501
|
-
try:
|
|
502
|
-
# Build command based on tested pattern from spike spk-4029eef3
|
|
503
|
-
cmd = ["gemini", "-p", prompt, "--output-format", output_format]
|
|
504
|
-
|
|
505
|
-
# Add model option if specified
|
|
506
|
-
if model:
|
|
507
|
-
cmd.extend(["-m", model])
|
|
508
|
-
|
|
509
|
-
# Add include directories if specified
|
|
510
|
-
if include_directories:
|
|
511
|
-
for directory in include_directories:
|
|
512
|
-
cmd.extend(["--include-directories", directory])
|
|
513
|
-
|
|
514
|
-
# CRITICAL: Add --yolo for headless mode (auto-approve all tools)
|
|
515
|
-
cmd.append("--yolo")
|
|
516
|
-
|
|
517
|
-
# Track spawner start if SDK available
|
|
518
|
-
if sdk:
|
|
519
|
-
try:
|
|
520
|
-
sdk.track_activity(
|
|
521
|
-
tool="gemini_spawn_start",
|
|
522
|
-
summary=f"Spawning Gemini: {prompt[:80]}",
|
|
523
|
-
payload={"prompt_length": len(prompt), "model": model},
|
|
524
|
-
)
|
|
525
|
-
except Exception:
|
|
526
|
-
# Tracking failure should not break execution
|
|
527
|
-
pass
|
|
528
|
-
|
|
529
|
-
# Publish live event: executing
|
|
530
|
-
self._publish_live_event(
|
|
531
|
-
"spawner_phase",
|
|
532
|
-
"gemini",
|
|
533
|
-
phase="executing",
|
|
534
|
-
details="Running Gemini CLI",
|
|
535
|
-
)
|
|
536
|
-
|
|
537
|
-
# Record subprocess invocation if tracker is available
|
|
538
|
-
subprocess_event_id = None
|
|
539
|
-
print(
|
|
540
|
-
f"DEBUG: tracker={tracker is not None}, parent_event_id={parent_event_id}",
|
|
541
|
-
file=sys.stderr,
|
|
542
|
-
)
|
|
543
|
-
if tracker and parent_event_id:
|
|
544
|
-
print(
|
|
545
|
-
"DEBUG: Recording subprocess invocation for Gemini...",
|
|
546
|
-
file=sys.stderr,
|
|
547
|
-
)
|
|
548
|
-
try:
|
|
549
|
-
subprocess_event = tracker.record_tool_call(
|
|
550
|
-
tool_name="subprocess.gemini",
|
|
551
|
-
tool_input={"cmd": cmd},
|
|
552
|
-
phase_event_id=parent_event_id,
|
|
553
|
-
spawned_agent="gemini-2.0-flash",
|
|
554
|
-
)
|
|
555
|
-
if subprocess_event:
|
|
556
|
-
subprocess_event_id = subprocess_event.get("event_id")
|
|
557
|
-
print(
|
|
558
|
-
f"DEBUG: Subprocess event created for Gemini: {subprocess_event_id}",
|
|
559
|
-
file=sys.stderr,
|
|
560
|
-
)
|
|
561
|
-
else:
|
|
562
|
-
print("DEBUG: subprocess_event was None", file=sys.stderr)
|
|
563
|
-
except Exception as e:
|
|
564
|
-
# Tracking failure should not break execution
|
|
565
|
-
print(
|
|
566
|
-
f"DEBUG: Exception recording Gemini subprocess: {e}",
|
|
567
|
-
file=sys.stderr,
|
|
568
|
-
)
|
|
569
|
-
pass
|
|
570
|
-
else:
|
|
571
|
-
print(
|
|
572
|
-
f"DEBUG: Skipping Gemini subprocess tracking - tracker={tracker is not None}, parent_event_id={parent_event_id}",
|
|
573
|
-
file=sys.stderr,
|
|
574
|
-
)
|
|
575
|
-
|
|
576
|
-
# Execute with timeout and stderr redirection
|
|
577
|
-
# Note: Cannot use capture_output with stderr parameter
|
|
578
|
-
result = subprocess.run(
|
|
579
|
-
cmd,
|
|
580
|
-
stdout=subprocess.PIPE,
|
|
581
|
-
stderr=subprocess.DEVNULL, # Redirect stderr to avoid polluting JSON
|
|
582
|
-
text=True,
|
|
583
|
-
timeout=timeout,
|
|
584
|
-
)
|
|
585
|
-
|
|
586
|
-
# Complete subprocess invocation tracking
|
|
587
|
-
if tracker and subprocess_event_id:
|
|
588
|
-
try:
|
|
589
|
-
tracker.complete_tool_call(
|
|
590
|
-
event_id=subprocess_event_id,
|
|
591
|
-
output_summary=result.stdout[:500] if result.stdout else "",
|
|
592
|
-
success=result.returncode == 0,
|
|
593
|
-
)
|
|
594
|
-
except Exception:
|
|
595
|
-
# Tracking failure should not break execution
|
|
596
|
-
pass
|
|
597
|
-
|
|
598
|
-
# Publish live event: processing response
|
|
599
|
-
self._publish_live_event(
|
|
600
|
-
"spawner_phase",
|
|
601
|
-
"gemini",
|
|
602
|
-
phase="processing",
|
|
603
|
-
details="Parsing Gemini response",
|
|
604
|
-
)
|
|
605
|
-
|
|
606
|
-
# Check for command execution errors
|
|
607
|
-
if result.returncode != 0:
|
|
608
|
-
duration = time.time() - start_time
|
|
609
|
-
self._publish_live_event(
|
|
610
|
-
"spawner_complete",
|
|
611
|
-
"gemini",
|
|
612
|
-
success=False,
|
|
613
|
-
duration=duration,
|
|
614
|
-
error=f"CLI failed with exit code {result.returncode}",
|
|
615
|
-
)
|
|
616
|
-
return AIResult(
|
|
617
|
-
success=False,
|
|
618
|
-
response="",
|
|
619
|
-
tokens_used=None,
|
|
620
|
-
error=f"Gemini CLI failed with exit code {result.returncode}",
|
|
621
|
-
raw_output=None,
|
|
622
|
-
tracked_events=tracked_events,
|
|
623
|
-
)
|
|
624
|
-
|
|
625
|
-
# Handle stream-json format with real-time tracking
|
|
626
|
-
if output_format == "stream-json" and sdk:
|
|
627
|
-
try:
|
|
628
|
-
tracked_events = self._parse_and_track_gemini_events(
|
|
629
|
-
result.stdout, sdk
|
|
630
|
-
)
|
|
631
|
-
# Only use stream-json parsing if we got valid events
|
|
632
|
-
if tracked_events:
|
|
633
|
-
# For stream-json, we need to extract response differently
|
|
634
|
-
# Collect all assistant message content, then check result
|
|
635
|
-
response_text = ""
|
|
636
|
-
for event in tracked_events:
|
|
637
|
-
if event.get("type") == "message":
|
|
638
|
-
# Only collect assistant messages
|
|
639
|
-
if event.get("role") == "assistant":
|
|
640
|
-
content = event.get("content", "")
|
|
641
|
-
if content:
|
|
642
|
-
response_text += content
|
|
643
|
-
elif event.get("type") == "result":
|
|
644
|
-
# Result event may have response field (override if present)
|
|
645
|
-
if "response" in event and event["response"]:
|
|
646
|
-
response_text = event["response"]
|
|
647
|
-
# Don't break - we've already collected messages
|
|
648
|
-
|
|
649
|
-
# Token usage from stats in result event
|
|
650
|
-
tokens = None
|
|
651
|
-
for event in tracked_events:
|
|
652
|
-
if event.get("type") == "result":
|
|
653
|
-
stats = event.get("stats", {})
|
|
654
|
-
if stats and "models" in stats:
|
|
655
|
-
total_tokens = 0
|
|
656
|
-
for model_stats in stats["models"].values():
|
|
657
|
-
model_tokens = model_stats.get(
|
|
658
|
-
"tokens", {}
|
|
659
|
-
).get("total", 0)
|
|
660
|
-
total_tokens += model_tokens
|
|
661
|
-
tokens = total_tokens if total_tokens > 0 else None
|
|
662
|
-
break
|
|
663
|
-
|
|
664
|
-
# Publish live event: complete
|
|
665
|
-
duration = time.time() - start_time
|
|
666
|
-
self._publish_live_event(
|
|
667
|
-
"spawner_complete",
|
|
668
|
-
"gemini",
|
|
669
|
-
success=True,
|
|
670
|
-
duration=duration,
|
|
671
|
-
response=response_text,
|
|
672
|
-
tokens=tokens,
|
|
673
|
-
)
|
|
674
|
-
return AIResult(
|
|
675
|
-
success=True,
|
|
676
|
-
response=response_text,
|
|
677
|
-
tokens_used=tokens,
|
|
678
|
-
error=None,
|
|
679
|
-
raw_output={"events": tracked_events},
|
|
680
|
-
tracked_events=tracked_events,
|
|
681
|
-
)
|
|
682
|
-
|
|
683
|
-
except Exception:
|
|
684
|
-
# Fall back to regular JSON parsing if tracking fails
|
|
685
|
-
pass
|
|
686
|
-
|
|
687
|
-
# Parse JSON response (for json format or fallback)
|
|
688
|
-
try:
|
|
689
|
-
output = json.loads(result.stdout)
|
|
690
|
-
except json.JSONDecodeError as e:
|
|
691
|
-
duration = time.time() - start_time
|
|
692
|
-
self._publish_live_event(
|
|
693
|
-
"spawner_complete",
|
|
694
|
-
"gemini",
|
|
695
|
-
success=False,
|
|
696
|
-
duration=duration,
|
|
697
|
-
error=f"Failed to parse JSON: {e}",
|
|
698
|
-
)
|
|
699
|
-
return AIResult(
|
|
700
|
-
success=False,
|
|
701
|
-
response="",
|
|
702
|
-
tokens_used=None,
|
|
703
|
-
error=f"Failed to parse JSON output: {e}",
|
|
704
|
-
raw_output={"stdout": result.stdout},
|
|
705
|
-
tracked_events=tracked_events,
|
|
706
|
-
)
|
|
707
|
-
|
|
708
|
-
# Extract response and token usage from parsed output
|
|
709
|
-
# Response is at top level in JSON output
|
|
710
|
-
response_text = output.get("response", "")
|
|
711
|
-
|
|
712
|
-
# Token usage is in stats.models (sum across all models)
|
|
713
|
-
tokens = None
|
|
714
|
-
stats = output.get("stats", {})
|
|
715
|
-
if stats and "models" in stats:
|
|
716
|
-
total_tokens = 0
|
|
717
|
-
for model_stats in stats["models"].values():
|
|
718
|
-
model_tokens = model_stats.get("tokens", {}).get("total", 0)
|
|
719
|
-
total_tokens += model_tokens
|
|
720
|
-
tokens = total_tokens if total_tokens > 0 else None
|
|
721
|
-
|
|
722
|
-
# Publish live event: complete
|
|
723
|
-
duration = time.time() - start_time
|
|
724
|
-
self._publish_live_event(
|
|
725
|
-
"spawner_complete",
|
|
726
|
-
"gemini",
|
|
727
|
-
success=True,
|
|
728
|
-
duration=duration,
|
|
729
|
-
response=response_text,
|
|
730
|
-
tokens=tokens,
|
|
731
|
-
)
|
|
732
|
-
return AIResult(
|
|
733
|
-
success=True,
|
|
734
|
-
response=response_text,
|
|
735
|
-
tokens_used=tokens,
|
|
736
|
-
error=None,
|
|
737
|
-
raw_output=output,
|
|
738
|
-
tracked_events=tracked_events,
|
|
739
|
-
)
|
|
740
|
-
|
|
741
|
-
except subprocess.TimeoutExpired as e:
|
|
742
|
-
duration = time.time() - start_time
|
|
743
|
-
self._publish_live_event(
|
|
744
|
-
"spawner_complete",
|
|
745
|
-
"gemini",
|
|
746
|
-
success=False,
|
|
747
|
-
duration=duration,
|
|
748
|
-
error=f"Timed out after {timeout} seconds",
|
|
749
|
-
)
|
|
750
|
-
return AIResult(
|
|
751
|
-
success=False,
|
|
752
|
-
response="",
|
|
753
|
-
tokens_used=None,
|
|
754
|
-
error=f"Gemini CLI timed out after {timeout} seconds",
|
|
755
|
-
raw_output={
|
|
756
|
-
"partial_stdout": e.stdout.decode() if e.stdout else None,
|
|
757
|
-
"partial_stderr": e.stderr.decode() if e.stderr else None,
|
|
758
|
-
}
|
|
759
|
-
if e.stdout or e.stderr
|
|
760
|
-
else None,
|
|
761
|
-
tracked_events=tracked_events,
|
|
762
|
-
)
|
|
763
|
-
except FileNotFoundError:
|
|
764
|
-
duration = time.time() - start_time
|
|
765
|
-
self._publish_live_event(
|
|
766
|
-
"spawner_complete",
|
|
767
|
-
"gemini",
|
|
768
|
-
success=False,
|
|
769
|
-
duration=duration,
|
|
770
|
-
error="CLI not found",
|
|
771
|
-
)
|
|
772
|
-
return AIResult(
|
|
773
|
-
success=False,
|
|
774
|
-
response="",
|
|
775
|
-
tokens_used=None,
|
|
776
|
-
error="Gemini CLI not found. Ensure 'gemini' is installed and in PATH.",
|
|
777
|
-
raw_output=None,
|
|
778
|
-
tracked_events=tracked_events,
|
|
779
|
-
)
|
|
780
|
-
except Exception as e:
|
|
781
|
-
duration = time.time() - start_time
|
|
782
|
-
self._publish_live_event(
|
|
783
|
-
"spawner_complete",
|
|
784
|
-
"gemini",
|
|
785
|
-
success=False,
|
|
786
|
-
duration=duration,
|
|
787
|
-
error=str(e),
|
|
788
|
-
)
|
|
789
|
-
return AIResult(
|
|
790
|
-
success=False,
|
|
791
|
-
response="",
|
|
792
|
-
tokens_used=None,
|
|
793
|
-
error=f"Unexpected error: {type(e).__name__}: {e}",
|
|
794
|
-
raw_output=None,
|
|
795
|
-
tracked_events=tracked_events,
|
|
796
|
-
)
|
|
797
123
|
|
|
798
124
|
def spawn_codex(
|
|
799
125
|
self,
|
|
@@ -838,305 +164,24 @@ class HeadlessSpawner:
|
|
|
838
164
|
Returns:
|
|
839
165
|
AIResult with response, error, and tracked events if tracking enabled
|
|
840
166
|
"""
|
|
841
|
-
|
|
842
|
-
sdk: SDK | None = None
|
|
843
|
-
tracked_events: list[dict] = []
|
|
844
|
-
if track_in_htmlgraph and output_json:
|
|
845
|
-
sdk = self._get_sdk()
|
|
846
|
-
|
|
847
|
-
# Publish live event: spawner starting
|
|
848
|
-
self._publish_live_event(
|
|
849
|
-
"spawner_start",
|
|
850
|
-
"codex",
|
|
167
|
+
return self._codex_spawner.spawn(
|
|
851
168
|
prompt=prompt,
|
|
169
|
+
output_json=output_json,
|
|
852
170
|
model=model,
|
|
171
|
+
sandbox=sandbox,
|
|
172
|
+
full_auto=full_auto,
|
|
173
|
+
images=images,
|
|
174
|
+
output_last_message=output_last_message,
|
|
175
|
+
output_schema=output_schema,
|
|
176
|
+
skip_git_check=skip_git_check,
|
|
177
|
+
working_directory=working_directory,
|
|
178
|
+
use_oss=use_oss,
|
|
179
|
+
bypass_approvals=bypass_approvals,
|
|
180
|
+
track_in_htmlgraph=track_in_htmlgraph,
|
|
181
|
+
timeout=timeout,
|
|
182
|
+
tracker=tracker,
|
|
183
|
+
parent_event_id=parent_event_id,
|
|
853
184
|
)
|
|
854
|
-
start_time = time.time()
|
|
855
|
-
|
|
856
|
-
cmd = ["codex", "exec"]
|
|
857
|
-
|
|
858
|
-
if output_json:
|
|
859
|
-
cmd.append("--json")
|
|
860
|
-
|
|
861
|
-
# Add model if specified
|
|
862
|
-
if model:
|
|
863
|
-
cmd.extend(["--model", model])
|
|
864
|
-
|
|
865
|
-
# Add sandbox mode if specified
|
|
866
|
-
if sandbox:
|
|
867
|
-
cmd.extend(["--sandbox", sandbox])
|
|
868
|
-
|
|
869
|
-
# Add full auto flag
|
|
870
|
-
if full_auto:
|
|
871
|
-
cmd.append("--full-auto")
|
|
872
|
-
|
|
873
|
-
# Add images
|
|
874
|
-
if images:
|
|
875
|
-
for image in images:
|
|
876
|
-
cmd.extend(["--image", image])
|
|
877
|
-
|
|
878
|
-
# Add output last message file if specified
|
|
879
|
-
if output_last_message:
|
|
880
|
-
cmd.extend(["--output-last-message", output_last_message])
|
|
881
|
-
|
|
882
|
-
# Add output schema if specified
|
|
883
|
-
if output_schema:
|
|
884
|
-
cmd.extend(["--output-schema", output_schema])
|
|
885
|
-
|
|
886
|
-
# Add skip git check flag
|
|
887
|
-
if skip_git_check:
|
|
888
|
-
cmd.append("--skip-git-repo-check")
|
|
889
|
-
|
|
890
|
-
# Add working directory if specified
|
|
891
|
-
if working_directory:
|
|
892
|
-
cmd.extend(["--cd", working_directory])
|
|
893
|
-
|
|
894
|
-
# Add OSS flag
|
|
895
|
-
if use_oss:
|
|
896
|
-
cmd.append("--oss")
|
|
897
|
-
|
|
898
|
-
# Add bypass approvals flag
|
|
899
|
-
if bypass_approvals:
|
|
900
|
-
cmd.append("--dangerously-bypass-approvals-and-sandbox")
|
|
901
|
-
|
|
902
|
-
# Add prompt as final argument
|
|
903
|
-
cmd.append(prompt)
|
|
904
|
-
|
|
905
|
-
# Track spawner start if SDK available
|
|
906
|
-
if sdk:
|
|
907
|
-
try:
|
|
908
|
-
sdk.track_activity(
|
|
909
|
-
tool="codex_spawn_start",
|
|
910
|
-
summary=f"Spawning Codex: {prompt[:80]}",
|
|
911
|
-
payload={
|
|
912
|
-
"prompt_length": len(prompt),
|
|
913
|
-
"model": model,
|
|
914
|
-
"sandbox": sandbox,
|
|
915
|
-
},
|
|
916
|
-
)
|
|
917
|
-
except Exception:
|
|
918
|
-
# Tracking failure should not break execution
|
|
919
|
-
pass
|
|
920
|
-
|
|
921
|
-
try:
|
|
922
|
-
# Publish live event: executing
|
|
923
|
-
self._publish_live_event(
|
|
924
|
-
"spawner_phase",
|
|
925
|
-
"codex",
|
|
926
|
-
phase="executing",
|
|
927
|
-
details="Running Codex CLI",
|
|
928
|
-
)
|
|
929
|
-
|
|
930
|
-
# Record subprocess invocation if tracker is available
|
|
931
|
-
subprocess_event_id = None
|
|
932
|
-
print(
|
|
933
|
-
f"DEBUG: tracker={tracker is not None}, parent_event_id={parent_event_id}",
|
|
934
|
-
file=sys.stderr,
|
|
935
|
-
)
|
|
936
|
-
if tracker and parent_event_id:
|
|
937
|
-
print(
|
|
938
|
-
"DEBUG: Recording subprocess invocation for Codex...",
|
|
939
|
-
file=sys.stderr,
|
|
940
|
-
)
|
|
941
|
-
try:
|
|
942
|
-
subprocess_event = tracker.record_tool_call(
|
|
943
|
-
tool_name="subprocess.codex",
|
|
944
|
-
tool_input={"cmd": cmd},
|
|
945
|
-
phase_event_id=parent_event_id,
|
|
946
|
-
spawned_agent="gpt-4",
|
|
947
|
-
)
|
|
948
|
-
if subprocess_event:
|
|
949
|
-
subprocess_event_id = subprocess_event.get("event_id")
|
|
950
|
-
print(
|
|
951
|
-
f"DEBUG: Subprocess event created for Codex: {subprocess_event_id}",
|
|
952
|
-
file=sys.stderr,
|
|
953
|
-
)
|
|
954
|
-
else:
|
|
955
|
-
print("DEBUG: subprocess_event was None", file=sys.stderr)
|
|
956
|
-
except Exception as e:
|
|
957
|
-
# Tracking failure should not break execution
|
|
958
|
-
print(
|
|
959
|
-
f"DEBUG: Exception recording Codex subprocess: {e}",
|
|
960
|
-
file=sys.stderr,
|
|
961
|
-
)
|
|
962
|
-
pass
|
|
963
|
-
else:
|
|
964
|
-
print(
|
|
965
|
-
f"DEBUG: Skipping Codex subprocess tracking - tracker={tracker is not None}, parent_event_id={parent_event_id}",
|
|
966
|
-
file=sys.stderr,
|
|
967
|
-
)
|
|
968
|
-
|
|
969
|
-
result = subprocess.run(
|
|
970
|
-
cmd,
|
|
971
|
-
stdout=subprocess.PIPE,
|
|
972
|
-
stderr=subprocess.DEVNULL,
|
|
973
|
-
text=True,
|
|
974
|
-
timeout=timeout,
|
|
975
|
-
)
|
|
976
|
-
|
|
977
|
-
# Complete subprocess invocation tracking
|
|
978
|
-
if tracker and subprocess_event_id:
|
|
979
|
-
try:
|
|
980
|
-
tracker.complete_tool_call(
|
|
981
|
-
event_id=subprocess_event_id,
|
|
982
|
-
output_summary=result.stdout[:500] if result.stdout else "",
|
|
983
|
-
success=result.returncode == 0,
|
|
984
|
-
)
|
|
985
|
-
except Exception:
|
|
986
|
-
# Tracking failure should not break execution
|
|
987
|
-
pass
|
|
988
|
-
|
|
989
|
-
# Publish live event: processing
|
|
990
|
-
self._publish_live_event(
|
|
991
|
-
"spawner_phase",
|
|
992
|
-
"codex",
|
|
993
|
-
phase="processing",
|
|
994
|
-
details="Parsing Codex response",
|
|
995
|
-
)
|
|
996
|
-
|
|
997
|
-
if not output_json:
|
|
998
|
-
# Plain text mode - return as-is
|
|
999
|
-
duration = time.time() - start_time
|
|
1000
|
-
success = result.returncode == 0
|
|
1001
|
-
self._publish_live_event(
|
|
1002
|
-
"spawner_complete",
|
|
1003
|
-
"codex",
|
|
1004
|
-
success=success,
|
|
1005
|
-
duration=duration,
|
|
1006
|
-
response=result.stdout.strip()[:200] if success else None,
|
|
1007
|
-
error="Command failed" if not success else None,
|
|
1008
|
-
)
|
|
1009
|
-
return AIResult(
|
|
1010
|
-
success=success,
|
|
1011
|
-
response=result.stdout.strip(),
|
|
1012
|
-
tokens_used=None,
|
|
1013
|
-
error=None if success else "Command failed",
|
|
1014
|
-
raw_output=result.stdout,
|
|
1015
|
-
tracked_events=tracked_events,
|
|
1016
|
-
)
|
|
1017
|
-
|
|
1018
|
-
# Parse JSONL output
|
|
1019
|
-
events = []
|
|
1020
|
-
parse_errors = []
|
|
1021
|
-
|
|
1022
|
-
# Use tracking parser if SDK is available
|
|
1023
|
-
if sdk:
|
|
1024
|
-
tracked_events = self._parse_and_track_codex_events(result.stdout, sdk)
|
|
1025
|
-
events = tracked_events
|
|
1026
|
-
else:
|
|
1027
|
-
# Fallback to regular parsing without tracking
|
|
1028
|
-
for line_num, line in enumerate(result.stdout.splitlines(), start=1):
|
|
1029
|
-
if line.strip():
|
|
1030
|
-
try:
|
|
1031
|
-
events.append(json.loads(line))
|
|
1032
|
-
except json.JSONDecodeError as e:
|
|
1033
|
-
parse_errors.append(
|
|
1034
|
-
{
|
|
1035
|
-
"line_number": line_num,
|
|
1036
|
-
"error": str(e),
|
|
1037
|
-
"content": line[
|
|
1038
|
-
:100
|
|
1039
|
-
], # First 100 chars for debugging
|
|
1040
|
-
}
|
|
1041
|
-
)
|
|
1042
|
-
continue
|
|
1043
|
-
|
|
1044
|
-
# Extract agent message
|
|
1045
|
-
response = None
|
|
1046
|
-
for event in events:
|
|
1047
|
-
if event.get("type") == "item.completed":
|
|
1048
|
-
item = event.get("item", {})
|
|
1049
|
-
if item.get("type") == "agent_message":
|
|
1050
|
-
response = item.get("text")
|
|
1051
|
-
|
|
1052
|
-
# Extract token usage from turn.completed event
|
|
1053
|
-
tokens = None
|
|
1054
|
-
for event in events:
|
|
1055
|
-
if event.get("type") == "turn.completed":
|
|
1056
|
-
usage = event.get("usage", {})
|
|
1057
|
-
# Sum all token types
|
|
1058
|
-
tokens = sum(usage.values())
|
|
1059
|
-
|
|
1060
|
-
# Publish live event: complete
|
|
1061
|
-
duration = time.time() - start_time
|
|
1062
|
-
success = result.returncode == 0
|
|
1063
|
-
self._publish_live_event(
|
|
1064
|
-
"spawner_complete",
|
|
1065
|
-
"codex",
|
|
1066
|
-
success=success,
|
|
1067
|
-
duration=duration,
|
|
1068
|
-
response=response[:200] if response else None,
|
|
1069
|
-
tokens=tokens,
|
|
1070
|
-
error="Command failed" if not success else None,
|
|
1071
|
-
)
|
|
1072
|
-
return AIResult(
|
|
1073
|
-
success=success,
|
|
1074
|
-
response=response or "",
|
|
1075
|
-
tokens_used=tokens,
|
|
1076
|
-
error=None if success else "Command failed",
|
|
1077
|
-
raw_output={
|
|
1078
|
-
"events": events,
|
|
1079
|
-
"parse_errors": parse_errors if parse_errors else None,
|
|
1080
|
-
},
|
|
1081
|
-
tracked_events=tracked_events,
|
|
1082
|
-
)
|
|
1083
|
-
|
|
1084
|
-
except FileNotFoundError:
|
|
1085
|
-
duration = time.time() - start_time
|
|
1086
|
-
self._publish_live_event(
|
|
1087
|
-
"spawner_complete",
|
|
1088
|
-
"codex",
|
|
1089
|
-
success=False,
|
|
1090
|
-
duration=duration,
|
|
1091
|
-
error="CLI not found",
|
|
1092
|
-
)
|
|
1093
|
-
return AIResult(
|
|
1094
|
-
success=False,
|
|
1095
|
-
response="",
|
|
1096
|
-
tokens_used=None,
|
|
1097
|
-
error="Codex CLI not found. Install from: https://github.com/openai/codex",
|
|
1098
|
-
raw_output=None,
|
|
1099
|
-
tracked_events=tracked_events,
|
|
1100
|
-
)
|
|
1101
|
-
except subprocess.TimeoutExpired as e:
|
|
1102
|
-
duration = time.time() - start_time
|
|
1103
|
-
self._publish_live_event(
|
|
1104
|
-
"spawner_complete",
|
|
1105
|
-
"codex",
|
|
1106
|
-
success=False,
|
|
1107
|
-
duration=duration,
|
|
1108
|
-
error=f"Timed out after {timeout} seconds",
|
|
1109
|
-
)
|
|
1110
|
-
return AIResult(
|
|
1111
|
-
success=False,
|
|
1112
|
-
response="",
|
|
1113
|
-
tokens_used=None,
|
|
1114
|
-
error=f"Timed out after {timeout} seconds",
|
|
1115
|
-
raw_output={
|
|
1116
|
-
"partial_stdout": e.stdout.decode() if e.stdout else None,
|
|
1117
|
-
"partial_stderr": e.stderr.decode() if e.stderr else None,
|
|
1118
|
-
}
|
|
1119
|
-
if e.stdout or e.stderr
|
|
1120
|
-
else None,
|
|
1121
|
-
tracked_events=tracked_events,
|
|
1122
|
-
)
|
|
1123
|
-
except Exception as e:
|
|
1124
|
-
duration = time.time() - start_time
|
|
1125
|
-
self._publish_live_event(
|
|
1126
|
-
"spawner_complete",
|
|
1127
|
-
"codex",
|
|
1128
|
-
success=False,
|
|
1129
|
-
duration=duration,
|
|
1130
|
-
error=str(e),
|
|
1131
|
-
)
|
|
1132
|
-
return AIResult(
|
|
1133
|
-
success=False,
|
|
1134
|
-
response="",
|
|
1135
|
-
tokens_used=None,
|
|
1136
|
-
error=f"Unexpected error: {type(e).__name__}: {e}",
|
|
1137
|
-
raw_output=None,
|
|
1138
|
-
tracked_events=tracked_events,
|
|
1139
|
-
)
|
|
1140
185
|
|
|
1141
186
|
def spawn_copilot(
|
|
1142
187
|
self,
|
|
@@ -1165,229 +210,16 @@ class HeadlessSpawner:
|
|
|
1165
210
|
Returns:
|
|
1166
211
|
AIResult with response, error, and tracked events if tracking enabled
|
|
1167
212
|
"""
|
|
1168
|
-
|
|
1169
|
-
sdk = None
|
|
1170
|
-
tracked_events = []
|
|
1171
|
-
if track_in_htmlgraph:
|
|
1172
|
-
sdk = self._get_sdk()
|
|
1173
|
-
|
|
1174
|
-
# Publish live event: spawner starting
|
|
1175
|
-
self._publish_live_event(
|
|
1176
|
-
"spawner_start",
|
|
1177
|
-
"copilot",
|
|
213
|
+
return self._copilot_spawner.spawn(
|
|
1178
214
|
prompt=prompt,
|
|
215
|
+
allow_tools=allow_tools,
|
|
216
|
+
allow_all_tools=allow_all_tools,
|
|
217
|
+
deny_tools=deny_tools,
|
|
218
|
+
track_in_htmlgraph=track_in_htmlgraph,
|
|
219
|
+
timeout=timeout,
|
|
220
|
+
tracker=tracker,
|
|
221
|
+
parent_event_id=parent_event_id,
|
|
1179
222
|
)
|
|
1180
|
-
start_time = time.time()
|
|
1181
|
-
|
|
1182
|
-
cmd = ["copilot", "-p", prompt]
|
|
1183
|
-
|
|
1184
|
-
# Add allow all tools flag
|
|
1185
|
-
if allow_all_tools:
|
|
1186
|
-
cmd.append("--allow-all-tools")
|
|
1187
|
-
|
|
1188
|
-
# Add tool permissions
|
|
1189
|
-
if allow_tools:
|
|
1190
|
-
for tool in allow_tools:
|
|
1191
|
-
cmd.extend(["--allow-tool", tool])
|
|
1192
|
-
|
|
1193
|
-
# Add denied tools
|
|
1194
|
-
if deny_tools:
|
|
1195
|
-
for tool in deny_tools:
|
|
1196
|
-
cmd.extend(["--deny-tool", tool])
|
|
1197
|
-
|
|
1198
|
-
# Track spawner start if SDK available
|
|
1199
|
-
if sdk:
|
|
1200
|
-
try:
|
|
1201
|
-
sdk.track_activity(
|
|
1202
|
-
tool="copilot_spawn_start",
|
|
1203
|
-
summary=f"Spawning Copilot: {prompt[:80]}",
|
|
1204
|
-
payload={"prompt_length": len(prompt)},
|
|
1205
|
-
)
|
|
1206
|
-
except Exception:
|
|
1207
|
-
# Tracking failure should not break execution
|
|
1208
|
-
pass
|
|
1209
|
-
|
|
1210
|
-
try:
|
|
1211
|
-
# Publish live event: executing
|
|
1212
|
-
self._publish_live_event(
|
|
1213
|
-
"spawner_phase",
|
|
1214
|
-
"copilot",
|
|
1215
|
-
phase="executing",
|
|
1216
|
-
details="Running Copilot CLI",
|
|
1217
|
-
)
|
|
1218
|
-
|
|
1219
|
-
# Record subprocess invocation if tracker is available
|
|
1220
|
-
subprocess_event_id = None
|
|
1221
|
-
print(
|
|
1222
|
-
f"DEBUG: tracker={tracker is not None}, parent_event_id={parent_event_id}",
|
|
1223
|
-
file=sys.stderr,
|
|
1224
|
-
)
|
|
1225
|
-
if tracker and parent_event_id:
|
|
1226
|
-
print(
|
|
1227
|
-
"DEBUG: Recording subprocess invocation for Copilot...",
|
|
1228
|
-
file=sys.stderr,
|
|
1229
|
-
)
|
|
1230
|
-
try:
|
|
1231
|
-
subprocess_event = tracker.record_tool_call(
|
|
1232
|
-
tool_name="subprocess.copilot",
|
|
1233
|
-
tool_input={"cmd": cmd},
|
|
1234
|
-
phase_event_id=parent_event_id,
|
|
1235
|
-
spawned_agent="github-copilot",
|
|
1236
|
-
)
|
|
1237
|
-
if subprocess_event:
|
|
1238
|
-
subprocess_event_id = subprocess_event.get("event_id")
|
|
1239
|
-
print(
|
|
1240
|
-
f"DEBUG: Subprocess event created for Copilot: {subprocess_event_id}",
|
|
1241
|
-
file=sys.stderr,
|
|
1242
|
-
)
|
|
1243
|
-
else:
|
|
1244
|
-
print("DEBUG: subprocess_event was None", file=sys.stderr)
|
|
1245
|
-
except Exception as e:
|
|
1246
|
-
# Tracking failure should not break execution
|
|
1247
|
-
print(
|
|
1248
|
-
f"DEBUG: Exception recording Copilot subprocess: {e}",
|
|
1249
|
-
file=sys.stderr,
|
|
1250
|
-
)
|
|
1251
|
-
pass
|
|
1252
|
-
else:
|
|
1253
|
-
print(
|
|
1254
|
-
f"DEBUG: Skipping Copilot subprocess tracking - tracker={tracker is not None}, parent_event_id={parent_event_id}",
|
|
1255
|
-
file=sys.stderr,
|
|
1256
|
-
)
|
|
1257
|
-
|
|
1258
|
-
result = subprocess.run(
|
|
1259
|
-
cmd,
|
|
1260
|
-
capture_output=True,
|
|
1261
|
-
text=True,
|
|
1262
|
-
timeout=timeout,
|
|
1263
|
-
)
|
|
1264
|
-
|
|
1265
|
-
# Complete subprocess invocation tracking
|
|
1266
|
-
if tracker and subprocess_event_id:
|
|
1267
|
-
try:
|
|
1268
|
-
tracker.complete_tool_call(
|
|
1269
|
-
event_id=subprocess_event_id,
|
|
1270
|
-
output_summary=result.stdout[:500] if result.stdout else "",
|
|
1271
|
-
success=result.returncode == 0,
|
|
1272
|
-
)
|
|
1273
|
-
except Exception:
|
|
1274
|
-
# Tracking failure should not break execution
|
|
1275
|
-
pass
|
|
1276
|
-
|
|
1277
|
-
# Publish live event: processing
|
|
1278
|
-
self._publish_live_event(
|
|
1279
|
-
"spawner_phase",
|
|
1280
|
-
"copilot",
|
|
1281
|
-
phase="processing",
|
|
1282
|
-
details="Parsing Copilot response",
|
|
1283
|
-
)
|
|
1284
|
-
|
|
1285
|
-
# Parse output: response is before stats block
|
|
1286
|
-
lines = result.stdout.split("\n")
|
|
1287
|
-
|
|
1288
|
-
# Find where stats start (look for "Total usage est:" or "Usage by model")
|
|
1289
|
-
stats_start = len(lines)
|
|
1290
|
-
for i, line in enumerate(lines):
|
|
1291
|
-
if "Total usage est" in line or "Usage by model" in line:
|
|
1292
|
-
stats_start = i
|
|
1293
|
-
break
|
|
1294
|
-
|
|
1295
|
-
# Response is everything before stats
|
|
1296
|
-
response = "\n".join(lines[:stats_start]).strip()
|
|
1297
|
-
|
|
1298
|
-
# Try to extract token count from stats
|
|
1299
|
-
tokens = None
|
|
1300
|
-
for line in lines[stats_start:]:
|
|
1301
|
-
# Look for token counts like "25.8k input, 5 output"
|
|
1302
|
-
if "input" in line and "output" in line:
|
|
1303
|
-
# Simple extraction: just note we found stats
|
|
1304
|
-
# TODO: More sophisticated parsing if needed
|
|
1305
|
-
tokens = 0 # Placeholder
|
|
1306
|
-
break
|
|
1307
|
-
|
|
1308
|
-
# Track Copilot execution if SDK available
|
|
1309
|
-
if sdk:
|
|
1310
|
-
tracked_events = self._parse_and_track_copilot_events(
|
|
1311
|
-
prompt, response, sdk
|
|
1312
|
-
)
|
|
1313
|
-
|
|
1314
|
-
# Publish live event: complete
|
|
1315
|
-
duration = time.time() - start_time
|
|
1316
|
-
success = result.returncode == 0
|
|
1317
|
-
self._publish_live_event(
|
|
1318
|
-
"spawner_complete",
|
|
1319
|
-
"copilot",
|
|
1320
|
-
success=success,
|
|
1321
|
-
duration=duration,
|
|
1322
|
-
response=response[:200] if response else None,
|
|
1323
|
-
tokens=tokens,
|
|
1324
|
-
error=result.stderr if not success else None,
|
|
1325
|
-
)
|
|
1326
|
-
return AIResult(
|
|
1327
|
-
success=success,
|
|
1328
|
-
response=response,
|
|
1329
|
-
tokens_used=tokens,
|
|
1330
|
-
error=None if success else result.stderr,
|
|
1331
|
-
raw_output=result.stdout,
|
|
1332
|
-
tracked_events=tracked_events,
|
|
1333
|
-
)
|
|
1334
|
-
|
|
1335
|
-
except FileNotFoundError:
|
|
1336
|
-
duration = time.time() - start_time
|
|
1337
|
-
self._publish_live_event(
|
|
1338
|
-
"spawner_complete",
|
|
1339
|
-
"copilot",
|
|
1340
|
-
success=False,
|
|
1341
|
-
duration=duration,
|
|
1342
|
-
error="CLI not found",
|
|
1343
|
-
)
|
|
1344
|
-
return AIResult(
|
|
1345
|
-
success=False,
|
|
1346
|
-
response="",
|
|
1347
|
-
tokens_used=None,
|
|
1348
|
-
error="Copilot CLI not found. Install from: https://docs.github.com/en/copilot/using-github-copilot/using-github-copilot-in-the-command-line",
|
|
1349
|
-
raw_output=None,
|
|
1350
|
-
tracked_events=tracked_events,
|
|
1351
|
-
)
|
|
1352
|
-
except subprocess.TimeoutExpired as e:
|
|
1353
|
-
duration = time.time() - start_time
|
|
1354
|
-
self._publish_live_event(
|
|
1355
|
-
"spawner_complete",
|
|
1356
|
-
"copilot",
|
|
1357
|
-
success=False,
|
|
1358
|
-
duration=duration,
|
|
1359
|
-
error=f"Timed out after {timeout} seconds",
|
|
1360
|
-
)
|
|
1361
|
-
return AIResult(
|
|
1362
|
-
success=False,
|
|
1363
|
-
response="",
|
|
1364
|
-
tokens_used=None,
|
|
1365
|
-
error=f"Timed out after {timeout} seconds",
|
|
1366
|
-
raw_output={
|
|
1367
|
-
"partial_stdout": e.stdout.decode() if e.stdout else None,
|
|
1368
|
-
"partial_stderr": e.stderr.decode() if e.stderr else None,
|
|
1369
|
-
}
|
|
1370
|
-
if e.stdout or e.stderr
|
|
1371
|
-
else None,
|
|
1372
|
-
tracked_events=tracked_events,
|
|
1373
|
-
)
|
|
1374
|
-
except Exception as e:
|
|
1375
|
-
duration = time.time() - start_time
|
|
1376
|
-
self._publish_live_event(
|
|
1377
|
-
"spawner_complete",
|
|
1378
|
-
"copilot",
|
|
1379
|
-
success=False,
|
|
1380
|
-
duration=duration,
|
|
1381
|
-
error=str(e),
|
|
1382
|
-
)
|
|
1383
|
-
return AIResult(
|
|
1384
|
-
success=False,
|
|
1385
|
-
response="",
|
|
1386
|
-
tokens_used=None,
|
|
1387
|
-
error=f"Unexpected error: {type(e).__name__}: {e}",
|
|
1388
|
-
raw_output=None,
|
|
1389
|
-
tracked_events=tracked_events,
|
|
1390
|
-
)
|
|
1391
223
|
|
|
1392
224
|
def spawn_claude(
|
|
1393
225
|
self,
|
|
@@ -1434,104 +266,12 @@ class HeadlessSpawner:
|
|
|
1434
266
|
... print(result.response) # "4"
|
|
1435
267
|
... print(f"Cost: ${result.raw_output['total_cost_usd']}")
|
|
1436
268
|
"""
|
|
1437
|
-
|
|
1438
|
-
|
|
1439
|
-
|
|
1440
|
-
|
|
1441
|
-
|
|
1442
|
-
|
|
1443
|
-
|
|
1444
|
-
|
|
1445
|
-
|
|
1446
|
-
if resume:
|
|
1447
|
-
cmd.extend(["--resume", resume])
|
|
1448
|
-
|
|
1449
|
-
# Add verbose flag
|
|
1450
|
-
if verbose:
|
|
1451
|
-
cmd.append("--verbose")
|
|
1452
|
-
|
|
1453
|
-
# Add extra args
|
|
1454
|
-
if extra_args:
|
|
1455
|
-
cmd.extend(extra_args)
|
|
1456
|
-
|
|
1457
|
-
# Use -- separator to ensure prompt isn't consumed by variadic args
|
|
1458
|
-
cmd.append("--")
|
|
1459
|
-
cmd.append(prompt)
|
|
1460
|
-
|
|
1461
|
-
try:
|
|
1462
|
-
result = subprocess.run(
|
|
1463
|
-
cmd,
|
|
1464
|
-
capture_output=True,
|
|
1465
|
-
text=True,
|
|
1466
|
-
timeout=timeout,
|
|
1467
|
-
)
|
|
1468
|
-
|
|
1469
|
-
if output_format == "json":
|
|
1470
|
-
# Parse JSON output
|
|
1471
|
-
try:
|
|
1472
|
-
output = json.loads(result.stdout)
|
|
1473
|
-
except json.JSONDecodeError as e:
|
|
1474
|
-
return AIResult(
|
|
1475
|
-
success=False,
|
|
1476
|
-
response="",
|
|
1477
|
-
tokens_used=None,
|
|
1478
|
-
error=f"Failed to parse JSON output: {e}",
|
|
1479
|
-
raw_output=result.stdout,
|
|
1480
|
-
)
|
|
1481
|
-
|
|
1482
|
-
# Extract result and metadata
|
|
1483
|
-
usage = output.get("usage", {})
|
|
1484
|
-
tokens = (
|
|
1485
|
-
usage.get("input_tokens", 0)
|
|
1486
|
-
+ usage.get("cache_creation_input_tokens", 0)
|
|
1487
|
-
+ usage.get("cache_read_input_tokens", 0)
|
|
1488
|
-
+ usage.get("output_tokens", 0)
|
|
1489
|
-
)
|
|
1490
|
-
|
|
1491
|
-
return AIResult(
|
|
1492
|
-
success=output.get("type") == "result"
|
|
1493
|
-
and not output.get("is_error"),
|
|
1494
|
-
response=output.get("result", ""),
|
|
1495
|
-
tokens_used=tokens,
|
|
1496
|
-
error=output.get("error") if output.get("is_error") else None,
|
|
1497
|
-
raw_output=output,
|
|
1498
|
-
)
|
|
1499
|
-
else:
|
|
1500
|
-
# Plain text output
|
|
1501
|
-
return AIResult(
|
|
1502
|
-
success=result.returncode == 0,
|
|
1503
|
-
response=result.stdout.strip(),
|
|
1504
|
-
tokens_used=None,
|
|
1505
|
-
error=None if result.returncode == 0 else result.stderr,
|
|
1506
|
-
raw_output=result.stdout,
|
|
1507
|
-
)
|
|
1508
|
-
|
|
1509
|
-
except FileNotFoundError:
|
|
1510
|
-
return AIResult(
|
|
1511
|
-
success=False,
|
|
1512
|
-
response="",
|
|
1513
|
-
tokens_used=None,
|
|
1514
|
-
error="Claude CLI not found. Install Claude Code from: https://claude.com/claude-code",
|
|
1515
|
-
raw_output=None,
|
|
1516
|
-
)
|
|
1517
|
-
except subprocess.TimeoutExpired as e:
|
|
1518
|
-
return AIResult(
|
|
1519
|
-
success=False,
|
|
1520
|
-
response="",
|
|
1521
|
-
tokens_used=None,
|
|
1522
|
-
error=f"Timed out after {timeout} seconds",
|
|
1523
|
-
raw_output={
|
|
1524
|
-
"partial_stdout": e.stdout.decode() if e.stdout else None,
|
|
1525
|
-
"partial_stderr": e.stderr.decode() if e.stderr else None,
|
|
1526
|
-
}
|
|
1527
|
-
if e.stdout or e.stderr
|
|
1528
|
-
else None,
|
|
1529
|
-
)
|
|
1530
|
-
except Exception as e:
|
|
1531
|
-
return AIResult(
|
|
1532
|
-
success=False,
|
|
1533
|
-
response="",
|
|
1534
|
-
tokens_used=None,
|
|
1535
|
-
error=f"Unexpected error: {type(e).__name__}: {e}",
|
|
1536
|
-
raw_output=None,
|
|
1537
|
-
)
|
|
269
|
+
return self._claude_spawner.spawn(
|
|
270
|
+
prompt=prompt,
|
|
271
|
+
output_format=output_format,
|
|
272
|
+
permission_mode=permission_mode,
|
|
273
|
+
resume=resume,
|
|
274
|
+
verbose=verbose,
|
|
275
|
+
timeout=timeout,
|
|
276
|
+
extra_args=extra_args,
|
|
277
|
+
)
|