gobby 0.2.5__py3-none-any.whl → 0.2.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/adapters/claude_code.py +13 -4
- gobby/adapters/codex.py +43 -3
- gobby/agents/runner.py +8 -0
- gobby/cli/__init__.py +6 -0
- gobby/cli/clones.py +419 -0
- gobby/cli/conductor.py +266 -0
- gobby/cli/installers/antigravity.py +3 -9
- gobby/cli/installers/claude.py +9 -9
- gobby/cli/installers/codex.py +2 -8
- gobby/cli/installers/gemini.py +2 -8
- gobby/cli/installers/shared.py +71 -8
- gobby/cli/skills.py +858 -0
- gobby/cli/tasks/ai.py +0 -440
- gobby/cli/tasks/crud.py +44 -6
- gobby/cli/tasks/main.py +0 -4
- gobby/cli/tui.py +2 -2
- gobby/cli/utils.py +3 -3
- gobby/clones/__init__.py +13 -0
- gobby/clones/git.py +547 -0
- gobby/conductor/__init__.py +16 -0
- gobby/conductor/alerts.py +135 -0
- gobby/conductor/loop.py +164 -0
- gobby/conductor/monitors/__init__.py +11 -0
- gobby/conductor/monitors/agents.py +116 -0
- gobby/conductor/monitors/tasks.py +155 -0
- gobby/conductor/pricing.py +234 -0
- gobby/conductor/token_tracker.py +160 -0
- gobby/config/app.py +63 -1
- gobby/config/search.py +110 -0
- gobby/config/servers.py +1 -1
- gobby/config/skills.py +43 -0
- gobby/config/tasks.py +6 -14
- gobby/hooks/event_handlers.py +145 -2
- gobby/hooks/hook_manager.py +48 -2
- gobby/hooks/skill_manager.py +130 -0
- gobby/install/claude/hooks/hook_dispatcher.py +4 -4
- gobby/install/codex/hooks/hook_dispatcher.py +1 -1
- gobby/install/gemini/hooks/hook_dispatcher.py +87 -12
- gobby/llm/claude.py +22 -34
- gobby/llm/claude_executor.py +46 -256
- gobby/llm/codex_executor.py +59 -291
- gobby/llm/executor.py +21 -0
- gobby/llm/gemini.py +134 -110
- gobby/llm/litellm_executor.py +143 -6
- gobby/llm/resolver.py +95 -33
- gobby/mcp_proxy/instructions.py +54 -0
- gobby/mcp_proxy/models.py +15 -0
- gobby/mcp_proxy/registries.py +68 -5
- gobby/mcp_proxy/server.py +33 -3
- gobby/mcp_proxy/services/tool_proxy.py +81 -1
- gobby/mcp_proxy/stdio.py +2 -1
- gobby/mcp_proxy/tools/__init__.py +0 -2
- gobby/mcp_proxy/tools/agent_messaging.py +317 -0
- gobby/mcp_proxy/tools/clones.py +903 -0
- gobby/mcp_proxy/tools/memory.py +1 -24
- gobby/mcp_proxy/tools/metrics.py +65 -1
- gobby/mcp_proxy/tools/orchestration/__init__.py +3 -0
- gobby/mcp_proxy/tools/orchestration/cleanup.py +151 -0
- gobby/mcp_proxy/tools/orchestration/wait.py +467 -0
- gobby/mcp_proxy/tools/session_messages.py +1 -2
- gobby/mcp_proxy/tools/skills/__init__.py +631 -0
- gobby/mcp_proxy/tools/task_orchestration.py +7 -0
- gobby/mcp_proxy/tools/task_readiness.py +14 -0
- gobby/mcp_proxy/tools/task_sync.py +1 -1
- gobby/mcp_proxy/tools/tasks/_context.py +0 -20
- gobby/mcp_proxy/tools/tasks/_crud.py +91 -4
- gobby/mcp_proxy/tools/tasks/_expansion.py +348 -0
- gobby/mcp_proxy/tools/tasks/_factory.py +6 -16
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +60 -29
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +18 -29
- gobby/mcp_proxy/tools/workflows.py +1 -1
- gobby/mcp_proxy/tools/worktrees.py +5 -0
- gobby/memory/backends/__init__.py +6 -1
- gobby/memory/backends/mem0.py +6 -1
- gobby/memory/extractor.py +477 -0
- gobby/memory/manager.py +11 -2
- gobby/prompts/defaults/handoff/compact.md +63 -0
- gobby/prompts/defaults/handoff/session_end.md +57 -0
- gobby/prompts/defaults/memory/extract.md +61 -0
- gobby/runner.py +37 -16
- gobby/search/__init__.py +48 -6
- gobby/search/backends/__init__.py +159 -0
- gobby/search/backends/embedding.py +225 -0
- gobby/search/embeddings.py +238 -0
- gobby/search/models.py +148 -0
- gobby/search/unified.py +496 -0
- gobby/servers/http.py +23 -8
- gobby/servers/routes/admin.py +280 -0
- gobby/servers/routes/mcp/tools.py +241 -52
- gobby/servers/websocket.py +2 -2
- gobby/sessions/analyzer.py +2 -0
- gobby/sessions/transcripts/base.py +1 -0
- gobby/sessions/transcripts/claude.py +64 -5
- gobby/skills/__init__.py +91 -0
- gobby/skills/loader.py +685 -0
- gobby/skills/manager.py +384 -0
- gobby/skills/parser.py +258 -0
- gobby/skills/search.py +463 -0
- gobby/skills/sync.py +119 -0
- gobby/skills/updater.py +385 -0
- gobby/skills/validator.py +368 -0
- gobby/storage/clones.py +378 -0
- gobby/storage/database.py +1 -1
- gobby/storage/memories.py +43 -13
- gobby/storage/migrations.py +180 -6
- gobby/storage/sessions.py +73 -0
- gobby/storage/skills.py +749 -0
- gobby/storage/tasks/_crud.py +4 -4
- gobby/storage/tasks/_lifecycle.py +41 -6
- gobby/storage/tasks/_manager.py +14 -5
- gobby/storage/tasks/_models.py +8 -3
- gobby/sync/memories.py +39 -4
- gobby/sync/tasks.py +83 -6
- gobby/tasks/__init__.py +1 -2
- gobby/tasks/validation.py +24 -15
- gobby/tui/api_client.py +4 -7
- gobby/tui/app.py +5 -3
- gobby/tui/screens/orchestrator.py +1 -2
- gobby/tui/screens/tasks.py +2 -4
- gobby/tui/ws_client.py +1 -1
- gobby/utils/daemon_client.py +2 -2
- gobby/workflows/actions.py +84 -2
- gobby/workflows/context_actions.py +43 -0
- gobby/workflows/detection_helpers.py +115 -31
- gobby/workflows/engine.py +13 -2
- gobby/workflows/lifecycle_evaluator.py +29 -1
- gobby/workflows/loader.py +19 -6
- gobby/workflows/memory_actions.py +74 -0
- gobby/workflows/summary_actions.py +17 -0
- gobby/workflows/task_enforcement_actions.py +448 -6
- {gobby-0.2.5.dist-info → gobby-0.2.6.dist-info}/METADATA +82 -21
- {gobby-0.2.5.dist-info → gobby-0.2.6.dist-info}/RECORD +136 -107
- gobby/install/codex/prompts/forget.md +0 -7
- gobby/install/codex/prompts/memories.md +0 -7
- gobby/install/codex/prompts/recall.md +0 -7
- gobby/install/codex/prompts/remember.md +0 -13
- gobby/llm/gemini_executor.py +0 -339
- gobby/mcp_proxy/tools/task_expansion.py +0 -591
- gobby/tasks/context.py +0 -747
- gobby/tasks/criteria.py +0 -342
- gobby/tasks/expansion.py +0 -626
- gobby/tasks/prompts/expand.py +0 -327
- gobby/tasks/research.py +0 -421
- gobby/tasks/tdd.py +0 -352
- {gobby-0.2.5.dist-info → gobby-0.2.6.dist-info}/WHEEL +0 -0
- {gobby-0.2.5.dist-info → gobby-0.2.6.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.5.dist-info → gobby-0.2.6.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.5.dist-info → gobby-0.2.6.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,903 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Internal MCP tools for Gobby Clone Management.
|
|
3
|
+
|
|
4
|
+
Exposes functionality for:
|
|
5
|
+
- Creating git clones for isolated development
|
|
6
|
+
- Managing clone lifecycle (get, list, delete)
|
|
7
|
+
- Syncing clones with remote repositories
|
|
8
|
+
|
|
9
|
+
These tools are registered with the InternalToolRegistry and accessed
|
|
10
|
+
via the downstream proxy pattern (call_tool, list_tools, get_tool_schema).
|
|
11
|
+
"""
|
|
12
|
+
|
|
13
|
+
from __future__ import annotations
|
|
14
|
+
|
|
15
|
+
import logging
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import TYPE_CHECKING, Any, Literal
|
|
18
|
+
|
|
19
|
+
from gobby.mcp_proxy.tools.internal import InternalToolRegistry
|
|
20
|
+
|
|
21
|
+
if TYPE_CHECKING:
|
|
22
|
+
from gobby.agents.runner import AgentRunner
|
|
23
|
+
from gobby.clones.git import CloneGitManager
|
|
24
|
+
from gobby.storage.clones import LocalCloneManager
|
|
25
|
+
|
|
26
|
+
logger = logging.getLogger(__name__)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def create_clones_registry(
|
|
30
|
+
clone_storage: LocalCloneManager,
|
|
31
|
+
git_manager: CloneGitManager,
|
|
32
|
+
project_id: str,
|
|
33
|
+
agent_runner: AgentRunner | None = None,
|
|
34
|
+
) -> InternalToolRegistry:
|
|
35
|
+
"""
|
|
36
|
+
Create the gobby-clones MCP server registry.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
clone_storage: Clone storage manager for CRUD operations
|
|
40
|
+
git_manager: Git manager for clone operations
|
|
41
|
+
project_id: Default project ID for new clones
|
|
42
|
+
agent_runner: Optional agent runner for spawning agents in clones
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
InternalToolRegistry with clone management tools
|
|
46
|
+
"""
|
|
47
|
+
registry = InternalToolRegistry(
|
|
48
|
+
name="gobby-clones",
|
|
49
|
+
description="Git clone management for isolated development",
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
# ===== create_clone =====
|
|
53
|
+
async def create_clone(
|
|
54
|
+
branch_name: str,
|
|
55
|
+
clone_path: str,
|
|
56
|
+
remote_url: str | None = None,
|
|
57
|
+
task_id: str | None = None,
|
|
58
|
+
base_branch: str = "main",
|
|
59
|
+
depth: int = 1,
|
|
60
|
+
) -> dict[str, Any]:
|
|
61
|
+
"""
|
|
62
|
+
Create a new git clone.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
branch_name: Branch to clone
|
|
66
|
+
clone_path: Path where clone will be created
|
|
67
|
+
remote_url: Remote URL (defaults to origin of parent repo)
|
|
68
|
+
task_id: Optional task ID to link
|
|
69
|
+
base_branch: Base branch for the clone
|
|
70
|
+
depth: Clone depth (default: 1 for shallow)
|
|
71
|
+
|
|
72
|
+
Returns:
|
|
73
|
+
Dict with clone info or error
|
|
74
|
+
"""
|
|
75
|
+
try:
|
|
76
|
+
# Get remote URL if not provided
|
|
77
|
+
if not remote_url:
|
|
78
|
+
remote_url = git_manager.get_remote_url()
|
|
79
|
+
if not remote_url:
|
|
80
|
+
return {
|
|
81
|
+
"success": False,
|
|
82
|
+
"error": "No remote URL provided and could not get from repository",
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
# Create the clone
|
|
86
|
+
result = git_manager.shallow_clone(
|
|
87
|
+
remote_url=remote_url,
|
|
88
|
+
clone_path=clone_path,
|
|
89
|
+
branch=branch_name,
|
|
90
|
+
depth=depth,
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
if not result.success:
|
|
94
|
+
return {
|
|
95
|
+
"success": False,
|
|
96
|
+
"error": f"Clone failed: {result.error or result.message}",
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
# Store clone record
|
|
100
|
+
clone = clone_storage.create(
|
|
101
|
+
project_id=project_id,
|
|
102
|
+
branch_name=branch_name,
|
|
103
|
+
clone_path=clone_path,
|
|
104
|
+
base_branch=base_branch,
|
|
105
|
+
task_id=task_id,
|
|
106
|
+
remote_url=remote_url,
|
|
107
|
+
)
|
|
108
|
+
|
|
109
|
+
return {
|
|
110
|
+
"success": True,
|
|
111
|
+
"clone": clone.to_dict(),
|
|
112
|
+
"message": f"Created clone at {clone_path}",
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
except Exception as e:
|
|
116
|
+
logger.error(f"Error creating clone: {e}")
|
|
117
|
+
return {
|
|
118
|
+
"success": False,
|
|
119
|
+
"error": str(e),
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
registry.register(
|
|
123
|
+
name="create_clone",
|
|
124
|
+
description="Create a new git clone for isolated development",
|
|
125
|
+
input_schema={
|
|
126
|
+
"type": "object",
|
|
127
|
+
"properties": {
|
|
128
|
+
"branch_name": {
|
|
129
|
+
"type": "string",
|
|
130
|
+
"description": "Branch to clone",
|
|
131
|
+
},
|
|
132
|
+
"clone_path": {
|
|
133
|
+
"type": "string",
|
|
134
|
+
"description": "Path where clone will be created",
|
|
135
|
+
},
|
|
136
|
+
"remote_url": {
|
|
137
|
+
"type": "string",
|
|
138
|
+
"description": "Remote URL (defaults to origin of parent repo)",
|
|
139
|
+
},
|
|
140
|
+
"task_id": {
|
|
141
|
+
"type": "string",
|
|
142
|
+
"description": "Optional task ID to link",
|
|
143
|
+
},
|
|
144
|
+
"base_branch": {
|
|
145
|
+
"type": "string",
|
|
146
|
+
"description": "Base branch for the clone",
|
|
147
|
+
"default": "main",
|
|
148
|
+
},
|
|
149
|
+
"depth": {
|
|
150
|
+
"type": "integer",
|
|
151
|
+
"description": "Clone depth (default: 1 for shallow)",
|
|
152
|
+
"default": 1,
|
|
153
|
+
},
|
|
154
|
+
},
|
|
155
|
+
"required": ["branch_name", "clone_path"],
|
|
156
|
+
},
|
|
157
|
+
func=create_clone,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
# ===== get_clone =====
|
|
161
|
+
async def get_clone(clone_id: str) -> dict[str, Any]:
|
|
162
|
+
"""
|
|
163
|
+
Get clone by ID.
|
|
164
|
+
|
|
165
|
+
Args:
|
|
166
|
+
clone_id: Clone ID
|
|
167
|
+
|
|
168
|
+
Returns:
|
|
169
|
+
Dict with clone info or error
|
|
170
|
+
"""
|
|
171
|
+
clone = clone_storage.get(clone_id)
|
|
172
|
+
if not clone:
|
|
173
|
+
return {
|
|
174
|
+
"success": False,
|
|
175
|
+
"error": f"Clone not found: {clone_id}",
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
return {
|
|
179
|
+
"success": True,
|
|
180
|
+
"clone": clone.to_dict(),
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
registry.register(
|
|
184
|
+
name="get_clone",
|
|
185
|
+
description="Get clone by ID",
|
|
186
|
+
input_schema={
|
|
187
|
+
"type": "object",
|
|
188
|
+
"properties": {
|
|
189
|
+
"clone_id": {
|
|
190
|
+
"type": "string",
|
|
191
|
+
"description": "Clone ID",
|
|
192
|
+
},
|
|
193
|
+
},
|
|
194
|
+
"required": ["clone_id"],
|
|
195
|
+
},
|
|
196
|
+
func=get_clone,
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
# ===== list_clones =====
|
|
200
|
+
async def list_clones(
|
|
201
|
+
status: str | None = None,
|
|
202
|
+
limit: int = 50,
|
|
203
|
+
) -> dict[str, Any]:
|
|
204
|
+
"""
|
|
205
|
+
List clones with optional filters.
|
|
206
|
+
|
|
207
|
+
Args:
|
|
208
|
+
status: Filter by status (active, syncing, stale, cleanup)
|
|
209
|
+
limit: Maximum number of results
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
Dict with list of clones
|
|
213
|
+
"""
|
|
214
|
+
clones = clone_storage.list_clones(
|
|
215
|
+
project_id=project_id,
|
|
216
|
+
status=status,
|
|
217
|
+
limit=limit,
|
|
218
|
+
)
|
|
219
|
+
|
|
220
|
+
return {
|
|
221
|
+
"success": True,
|
|
222
|
+
"clones": [c.to_dict() for c in clones],
|
|
223
|
+
"count": len(clones),
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
registry.register(
|
|
227
|
+
name="list_clones",
|
|
228
|
+
description="List clones with optional status filter",
|
|
229
|
+
input_schema={
|
|
230
|
+
"type": "object",
|
|
231
|
+
"properties": {
|
|
232
|
+
"status": {
|
|
233
|
+
"type": "string",
|
|
234
|
+
"description": "Filter by status (active, syncing, stale, cleanup)",
|
|
235
|
+
"enum": ["active", "syncing", "stale", "cleanup"],
|
|
236
|
+
},
|
|
237
|
+
"limit": {
|
|
238
|
+
"type": "integer",
|
|
239
|
+
"description": "Maximum number of results",
|
|
240
|
+
"default": 50,
|
|
241
|
+
},
|
|
242
|
+
},
|
|
243
|
+
},
|
|
244
|
+
func=list_clones,
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
# ===== delete_clone =====
|
|
248
|
+
async def delete_clone(
|
|
249
|
+
clone_id: str,
|
|
250
|
+
force: bool = False,
|
|
251
|
+
) -> dict[str, Any]:
|
|
252
|
+
"""
|
|
253
|
+
Delete a clone.
|
|
254
|
+
|
|
255
|
+
Args:
|
|
256
|
+
clone_id: Clone ID to delete
|
|
257
|
+
force: Force deletion even if there are uncommitted changes
|
|
258
|
+
|
|
259
|
+
Returns:
|
|
260
|
+
Dict with success status
|
|
261
|
+
"""
|
|
262
|
+
clone = clone_storage.get(clone_id)
|
|
263
|
+
if not clone:
|
|
264
|
+
return {
|
|
265
|
+
"success": False,
|
|
266
|
+
"error": f"Clone not found: {clone_id}",
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
# Store clone info for potential rollback
|
|
270
|
+
clone_path = clone.clone_path
|
|
271
|
+
|
|
272
|
+
# Delete the database record first (can be rolled back more easily)
|
|
273
|
+
try:
|
|
274
|
+
clone_storage.delete(clone_id)
|
|
275
|
+
except Exception as e:
|
|
276
|
+
logger.error(f"Failed to delete clone record {clone_id}: {e}")
|
|
277
|
+
return {
|
|
278
|
+
"success": False,
|
|
279
|
+
"error": f"Failed to delete clone record: {e}",
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
# Delete the files
|
|
283
|
+
result = git_manager.delete_clone(clone_path, force=force)
|
|
284
|
+
if not result.success:
|
|
285
|
+
# Rollback: recreate the clone record since file deletion failed
|
|
286
|
+
logger.error(
|
|
287
|
+
f"Failed to delete clone files for {clone_id}, "
|
|
288
|
+
f"attempting to restore record: {result.error or result.message}"
|
|
289
|
+
)
|
|
290
|
+
try:
|
|
291
|
+
clone_storage.create(
|
|
292
|
+
project_id=clone.project_id,
|
|
293
|
+
branch_name=clone.branch_name,
|
|
294
|
+
clone_path=clone_path,
|
|
295
|
+
base_branch=clone.base_branch,
|
|
296
|
+
task_id=clone.task_id,
|
|
297
|
+
remote_url=clone.remote_url,
|
|
298
|
+
)
|
|
299
|
+
logger.info(f"Restored clone record for {clone_id} after file deletion failure")
|
|
300
|
+
except Exception as restore_error:
|
|
301
|
+
logger.error(
|
|
302
|
+
f"Failed to restore clone record {clone_id}: {restore_error}. "
|
|
303
|
+
f"Clone is now orphaned in database."
|
|
304
|
+
)
|
|
305
|
+
return {
|
|
306
|
+
"success": False,
|
|
307
|
+
"error": f"Failed to delete clone files: {result.error or result.message}",
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
return {
|
|
311
|
+
"success": True,
|
|
312
|
+
"message": f"Deleted clone {clone_id}",
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
registry.register(
|
|
316
|
+
name="delete_clone",
|
|
317
|
+
description="Delete a clone and its files",
|
|
318
|
+
input_schema={
|
|
319
|
+
"type": "object",
|
|
320
|
+
"properties": {
|
|
321
|
+
"clone_id": {
|
|
322
|
+
"type": "string",
|
|
323
|
+
"description": "Clone ID to delete",
|
|
324
|
+
},
|
|
325
|
+
"force": {
|
|
326
|
+
"type": "boolean",
|
|
327
|
+
"description": "Force deletion even with uncommitted changes",
|
|
328
|
+
"default": False,
|
|
329
|
+
},
|
|
330
|
+
},
|
|
331
|
+
"required": ["clone_id"],
|
|
332
|
+
},
|
|
333
|
+
func=delete_clone,
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
# ===== sync_clone =====
|
|
337
|
+
async def sync_clone(
|
|
338
|
+
clone_id: str,
|
|
339
|
+
direction: Literal["pull", "push", "both"] = "pull",
|
|
340
|
+
) -> dict[str, Any]:
|
|
341
|
+
"""
|
|
342
|
+
Sync a clone with its remote.
|
|
343
|
+
|
|
344
|
+
Args:
|
|
345
|
+
clone_id: Clone ID to sync
|
|
346
|
+
direction: Sync direction (pull, push, or both)
|
|
347
|
+
|
|
348
|
+
Returns:
|
|
349
|
+
Dict with sync result
|
|
350
|
+
"""
|
|
351
|
+
clone = clone_storage.get(clone_id)
|
|
352
|
+
if not clone:
|
|
353
|
+
return {
|
|
354
|
+
"success": False,
|
|
355
|
+
"error": f"Clone not found: {clone_id}",
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
# Mark as syncing
|
|
359
|
+
clone_storage.mark_syncing(clone_id)
|
|
360
|
+
|
|
361
|
+
try:
|
|
362
|
+
result = git_manager.sync_clone(
|
|
363
|
+
clone_path=clone.clone_path,
|
|
364
|
+
direction=direction,
|
|
365
|
+
)
|
|
366
|
+
|
|
367
|
+
if result.success:
|
|
368
|
+
# Record successful sync and mark as active
|
|
369
|
+
clone_storage.record_sync(clone_id)
|
|
370
|
+
clone_storage.update(clone_id, status="active")
|
|
371
|
+
return {
|
|
372
|
+
"success": True,
|
|
373
|
+
"message": f"Synced clone {clone_id} ({direction})",
|
|
374
|
+
}
|
|
375
|
+
else:
|
|
376
|
+
return {
|
|
377
|
+
"success": False,
|
|
378
|
+
"error": f"Sync failed: {result.error or result.message}",
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
except Exception as e:
|
|
382
|
+
return {
|
|
383
|
+
"success": False,
|
|
384
|
+
"error": str(e),
|
|
385
|
+
}
|
|
386
|
+
finally:
|
|
387
|
+
# Ensure status is reset to active if record_sync didn't complete
|
|
388
|
+
clone = clone_storage.get(clone_id)
|
|
389
|
+
if clone and clone.status == "syncing":
|
|
390
|
+
clone_storage.update(clone_id, status="active")
|
|
391
|
+
|
|
392
|
+
registry.register(
|
|
393
|
+
name="sync_clone",
|
|
394
|
+
description="Sync a clone with its remote repository",
|
|
395
|
+
input_schema={
|
|
396
|
+
"type": "object",
|
|
397
|
+
"properties": {
|
|
398
|
+
"clone_id": {
|
|
399
|
+
"type": "string",
|
|
400
|
+
"description": "Clone ID to sync",
|
|
401
|
+
},
|
|
402
|
+
"direction": {
|
|
403
|
+
"type": "string",
|
|
404
|
+
"description": "Sync direction",
|
|
405
|
+
"enum": ["pull", "push", "both"],
|
|
406
|
+
"default": "pull",
|
|
407
|
+
},
|
|
408
|
+
},
|
|
409
|
+
"required": ["clone_id"],
|
|
410
|
+
},
|
|
411
|
+
func=sync_clone,
|
|
412
|
+
)
|
|
413
|
+
|
|
414
|
+
# ===== merge_clone_to_target =====
|
|
415
|
+
async def merge_clone_to_target(
|
|
416
|
+
clone_id: str,
|
|
417
|
+
target_branch: str = "main",
|
|
418
|
+
) -> dict[str, Any]:
|
|
419
|
+
"""
|
|
420
|
+
Merge clone branch to target branch in main repository.
|
|
421
|
+
|
|
422
|
+
Performs:
|
|
423
|
+
1. Push clone changes to remote (sync_clone push)
|
|
424
|
+
2. Fetch branch in main repo
|
|
425
|
+
3. Attempt merge to target branch
|
|
426
|
+
|
|
427
|
+
On success, sets cleanup_after to 7 days from now.
|
|
428
|
+
|
|
429
|
+
Args:
|
|
430
|
+
clone_id: Clone ID to merge
|
|
431
|
+
target_branch: Target branch to merge into (default: main)
|
|
432
|
+
|
|
433
|
+
Returns:
|
|
434
|
+
Dict with merge result and conflict info if any
|
|
435
|
+
"""
|
|
436
|
+
from datetime import UTC, datetime, timedelta
|
|
437
|
+
|
|
438
|
+
clone = clone_storage.get(clone_id)
|
|
439
|
+
if not clone:
|
|
440
|
+
return {
|
|
441
|
+
"success": False,
|
|
442
|
+
"error": f"Clone not found: {clone_id}",
|
|
443
|
+
}
|
|
444
|
+
|
|
445
|
+
# Step 1: Push clone changes to remote
|
|
446
|
+
clone_storage.mark_syncing(clone_id)
|
|
447
|
+
sync_result = git_manager.sync_clone(
|
|
448
|
+
clone_path=clone.clone_path,
|
|
449
|
+
direction="push",
|
|
450
|
+
)
|
|
451
|
+
|
|
452
|
+
if not sync_result.success:
|
|
453
|
+
clone_storage.update(clone_id, status="active")
|
|
454
|
+
return {
|
|
455
|
+
"success": False,
|
|
456
|
+
"error": f"Sync failed: {sync_result.error or sync_result.message}",
|
|
457
|
+
"step": "sync",
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
clone_storage.record_sync(clone_id)
|
|
461
|
+
|
|
462
|
+
# Step 2: Merge in main repo
|
|
463
|
+
merge_result = git_manager.merge_branch(
|
|
464
|
+
source_branch=clone.branch_name,
|
|
465
|
+
target_branch=target_branch,
|
|
466
|
+
)
|
|
467
|
+
|
|
468
|
+
if not merge_result.success:
|
|
469
|
+
# Check for conflicts
|
|
470
|
+
if merge_result.error == "merge_conflict":
|
|
471
|
+
conflicted_files = merge_result.output.split("\n") if merge_result.output else []
|
|
472
|
+
return {
|
|
473
|
+
"success": False,
|
|
474
|
+
"has_conflicts": True,
|
|
475
|
+
"conflicted_files": conflicted_files,
|
|
476
|
+
"error": merge_result.message,
|
|
477
|
+
"step": "merge",
|
|
478
|
+
"message": (
|
|
479
|
+
f"Merge conflicts detected in {len(conflicted_files)} files. "
|
|
480
|
+
"Use gobby-merge tools to resolve."
|
|
481
|
+
),
|
|
482
|
+
}
|
|
483
|
+
|
|
484
|
+
return {
|
|
485
|
+
"success": False,
|
|
486
|
+
"has_conflicts": False,
|
|
487
|
+
"error": merge_result.error or merge_result.message,
|
|
488
|
+
"step": "merge",
|
|
489
|
+
}
|
|
490
|
+
|
|
491
|
+
# Step 3: Success - set cleanup_after
|
|
492
|
+
cleanup_after = (datetime.now(UTC) + timedelta(days=7)).isoformat()
|
|
493
|
+
clone_storage.update(clone_id, cleanup_after=cleanup_after)
|
|
494
|
+
|
|
495
|
+
return {
|
|
496
|
+
"success": True,
|
|
497
|
+
"message": f"Successfully merged {clone.branch_name} into {target_branch}",
|
|
498
|
+
"cleanup_after": cleanup_after,
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
registry.register(
|
|
502
|
+
name="merge_clone_to_target",
|
|
503
|
+
description="Merge clone branch to target branch in main repository",
|
|
504
|
+
input_schema={
|
|
505
|
+
"type": "object",
|
|
506
|
+
"properties": {
|
|
507
|
+
"clone_id": {
|
|
508
|
+
"type": "string",
|
|
509
|
+
"description": "Clone ID to merge",
|
|
510
|
+
},
|
|
511
|
+
"target_branch": {
|
|
512
|
+
"type": "string",
|
|
513
|
+
"description": "Target branch to merge into",
|
|
514
|
+
"default": "main",
|
|
515
|
+
},
|
|
516
|
+
},
|
|
517
|
+
"required": ["clone_id"],
|
|
518
|
+
},
|
|
519
|
+
func=merge_clone_to_target,
|
|
520
|
+
)
|
|
521
|
+
|
|
522
|
+
# ===== spawn_agent_in_clone =====
|
|
523
|
+
async def spawn_agent_in_clone(
|
|
524
|
+
prompt: str,
|
|
525
|
+
branch_name: str,
|
|
526
|
+
parent_session_id: str | None = None,
|
|
527
|
+
task_id: str | None = None,
|
|
528
|
+
base_branch: str = "main",
|
|
529
|
+
clone_path: str | None = None,
|
|
530
|
+
mode: str = "terminal",
|
|
531
|
+
terminal: str = "auto",
|
|
532
|
+
provider: Literal["claude", "gemini", "codex", "antigravity"] = "claude",
|
|
533
|
+
model: str | None = None,
|
|
534
|
+
workflow: str | None = None,
|
|
535
|
+
timeout: float = 120.0,
|
|
536
|
+
max_turns: int = 10,
|
|
537
|
+
) -> dict[str, Any]:
|
|
538
|
+
"""
|
|
539
|
+
Create a clone (if needed) and spawn an agent in it.
|
|
540
|
+
|
|
541
|
+
This combines clone creation with agent spawning for isolated development.
|
|
542
|
+
Unlike worktrees, clones are full repository copies that can be worked on
|
|
543
|
+
independently without affecting the main repository.
|
|
544
|
+
|
|
545
|
+
Args:
|
|
546
|
+
prompt: The task/prompt for the agent.
|
|
547
|
+
branch_name: Name for the branch in the clone.
|
|
548
|
+
parent_session_id: Parent session ID for context (required).
|
|
549
|
+
task_id: Optional task ID to link to this clone.
|
|
550
|
+
base_branch: Branch to clone from (default: main).
|
|
551
|
+
clone_path: Optional custom path for the clone.
|
|
552
|
+
mode: Execution mode (terminal, embedded, headless).
|
|
553
|
+
terminal: Terminal for terminal/embedded modes (auto, ghostty, etc.).
|
|
554
|
+
provider: LLM provider (claude, gemini, etc.).
|
|
555
|
+
model: Optional model override.
|
|
556
|
+
workflow: Workflow name to execute.
|
|
557
|
+
timeout: Execution timeout in seconds (default: 120).
|
|
558
|
+
max_turns: Maximum turns (default: 10).
|
|
559
|
+
|
|
560
|
+
Returns:
|
|
561
|
+
Dict with clone_id, run_id, and status.
|
|
562
|
+
"""
|
|
563
|
+
if agent_runner is None:
|
|
564
|
+
return {
|
|
565
|
+
"success": False,
|
|
566
|
+
"error": "Agent runner not configured. Cannot spawn agent.",
|
|
567
|
+
}
|
|
568
|
+
|
|
569
|
+
if parent_session_id is None:
|
|
570
|
+
return {
|
|
571
|
+
"success": False,
|
|
572
|
+
"error": "parent_session_id is required for agent spawning.",
|
|
573
|
+
}
|
|
574
|
+
|
|
575
|
+
# Handle mode aliases and validation
|
|
576
|
+
if mode == "interactive":
|
|
577
|
+
mode = "terminal"
|
|
578
|
+
|
|
579
|
+
valid_modes = ["terminal", "embedded", "headless"]
|
|
580
|
+
if mode not in valid_modes:
|
|
581
|
+
return {
|
|
582
|
+
"success": False,
|
|
583
|
+
"error": (
|
|
584
|
+
f"Invalid mode '{mode}'. Must be one of: {', '.join(valid_modes)}. "
|
|
585
|
+
f"Note: 'in_process' mode is not supported for spawn_agent_in_clone."
|
|
586
|
+
),
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
# Normalize terminal parameter to lowercase
|
|
590
|
+
if isinstance(terminal, str):
|
|
591
|
+
terminal = terminal.lower()
|
|
592
|
+
|
|
593
|
+
# Check spawn depth limit
|
|
594
|
+
can_spawn, reason, _depth = agent_runner.can_spawn(parent_session_id)
|
|
595
|
+
if not can_spawn:
|
|
596
|
+
return {
|
|
597
|
+
"success": False,
|
|
598
|
+
"error": reason,
|
|
599
|
+
}
|
|
600
|
+
|
|
601
|
+
# Check if clone already exists for this branch
|
|
602
|
+
existing = clone_storage.get_by_branch(project_id, branch_name)
|
|
603
|
+
if existing:
|
|
604
|
+
clone = existing
|
|
605
|
+
logger.info(f"Using existing clone for branch '{branch_name}'")
|
|
606
|
+
else:
|
|
607
|
+
# Get remote URL
|
|
608
|
+
remote_url = git_manager.get_remote_url() if git_manager else None
|
|
609
|
+
if not remote_url:
|
|
610
|
+
return {
|
|
611
|
+
"success": False,
|
|
612
|
+
"error": "No remote URL available. Cannot create clone.",
|
|
613
|
+
}
|
|
614
|
+
|
|
615
|
+
# Generate clone path if not provided
|
|
616
|
+
if clone_path is None:
|
|
617
|
+
import platform
|
|
618
|
+
import tempfile
|
|
619
|
+
|
|
620
|
+
if platform.system() == "Windows":
|
|
621
|
+
base = Path(tempfile.gettempdir()) / "gobby-clones"
|
|
622
|
+
else:
|
|
623
|
+
# nosec B108: /tmp is intentional for clones - they're temporary
|
|
624
|
+
base = Path("/tmp").resolve() / "gobby-clones" # nosec B108
|
|
625
|
+
base.mkdir(parents=True, exist_ok=True)
|
|
626
|
+
safe_branch = branch_name.replace("/", "-")
|
|
627
|
+
clone_path = str(base / f"{project_id}-{safe_branch}")
|
|
628
|
+
|
|
629
|
+
# Create the clone
|
|
630
|
+
result = git_manager.shallow_clone(
|
|
631
|
+
remote_url=remote_url,
|
|
632
|
+
clone_path=clone_path,
|
|
633
|
+
branch=base_branch,
|
|
634
|
+
depth=1,
|
|
635
|
+
)
|
|
636
|
+
|
|
637
|
+
if not result.success:
|
|
638
|
+
return {
|
|
639
|
+
"success": False,
|
|
640
|
+
"error": f"Clone failed: {result.error or result.message}",
|
|
641
|
+
}
|
|
642
|
+
|
|
643
|
+
# Store clone record
|
|
644
|
+
clone = clone_storage.create(
|
|
645
|
+
project_id=project_id,
|
|
646
|
+
branch_name=branch_name,
|
|
647
|
+
clone_path=clone_path,
|
|
648
|
+
base_branch=base_branch,
|
|
649
|
+
task_id=task_id,
|
|
650
|
+
remote_url=remote_url,
|
|
651
|
+
)
|
|
652
|
+
|
|
653
|
+
# Import AgentConfig and get machine_id
|
|
654
|
+
from gobby.agents.runner import AgentConfig
|
|
655
|
+
from gobby.utils.machine_id import get_machine_id
|
|
656
|
+
|
|
657
|
+
machine_id = get_machine_id()
|
|
658
|
+
|
|
659
|
+
# Create agent config
|
|
660
|
+
config = AgentConfig(
|
|
661
|
+
prompt=prompt,
|
|
662
|
+
parent_session_id=parent_session_id,
|
|
663
|
+
project_id=project_id,
|
|
664
|
+
machine_id=machine_id,
|
|
665
|
+
source=provider,
|
|
666
|
+
workflow=workflow,
|
|
667
|
+
task=task_id,
|
|
668
|
+
session_context="summary_markdown",
|
|
669
|
+
mode=mode,
|
|
670
|
+
terminal=terminal,
|
|
671
|
+
provider=provider,
|
|
672
|
+
model=model,
|
|
673
|
+
max_turns=max_turns,
|
|
674
|
+
timeout=timeout,
|
|
675
|
+
project_path=clone.clone_path,
|
|
676
|
+
)
|
|
677
|
+
|
|
678
|
+
# Prepare the run
|
|
679
|
+
from gobby.llm.executor import AgentResult
|
|
680
|
+
|
|
681
|
+
prepare_result = agent_runner.prepare_run(config)
|
|
682
|
+
if isinstance(prepare_result, AgentResult):
|
|
683
|
+
return {
|
|
684
|
+
"success": False,
|
|
685
|
+
"clone_id": clone.id,
|
|
686
|
+
"clone_path": clone.clone_path,
|
|
687
|
+
"branch_name": clone.branch_name,
|
|
688
|
+
"error": prepare_result.error,
|
|
689
|
+
}
|
|
690
|
+
|
|
691
|
+
context = prepare_result
|
|
692
|
+
if context.session is None or context.run is None:
|
|
693
|
+
return {
|
|
694
|
+
"success": False,
|
|
695
|
+
"clone_id": clone.id,
|
|
696
|
+
"error": "Internal error: context missing session or run",
|
|
697
|
+
}
|
|
698
|
+
|
|
699
|
+
child_session = context.session
|
|
700
|
+
agent_run = context.run
|
|
701
|
+
|
|
702
|
+
# Claim clone for the child session
|
|
703
|
+
clone_storage.claim(clone.id, child_session.id)
|
|
704
|
+
|
|
705
|
+
# Build enhanced prompt with clone context
|
|
706
|
+
context_lines = [
|
|
707
|
+
"## CRITICAL: Clone Context",
|
|
708
|
+
"You are working in an ISOLATED git clone, NOT the main repository.",
|
|
709
|
+
"",
|
|
710
|
+
f"**Your workspace:** {clone.clone_path}",
|
|
711
|
+
f"**Your branch:** {clone.branch_name}",
|
|
712
|
+
]
|
|
713
|
+
if task_id:
|
|
714
|
+
context_lines.append(f"**Your task:** {task_id}")
|
|
715
|
+
context_lines.extend(
|
|
716
|
+
[
|
|
717
|
+
"",
|
|
718
|
+
"**IMPORTANT RULES:**",
|
|
719
|
+
f"1. ALL file operations must be within {clone.clone_path}",
|
|
720
|
+
"2. Do NOT access the main repository",
|
|
721
|
+
"3. Run `pwd` to verify your location before any file operations",
|
|
722
|
+
f"4. Commit to YOUR branch ({clone.branch_name})",
|
|
723
|
+
"5. When your assigned task is complete, STOP - do not claim other tasks",
|
|
724
|
+
"",
|
|
725
|
+
"---",
|
|
726
|
+
"",
|
|
727
|
+
]
|
|
728
|
+
)
|
|
729
|
+
enhanced_prompt = "\n".join(context_lines) + prompt
|
|
730
|
+
|
|
731
|
+
# Spawn based on mode
|
|
732
|
+
if mode == "terminal":
|
|
733
|
+
from gobby.agents.spawn import TerminalSpawner
|
|
734
|
+
|
|
735
|
+
terminal_spawner = TerminalSpawner()
|
|
736
|
+
terminal_result = terminal_spawner.spawn_agent(
|
|
737
|
+
cli=provider,
|
|
738
|
+
cwd=clone.clone_path,
|
|
739
|
+
session_id=child_session.id,
|
|
740
|
+
parent_session_id=parent_session_id,
|
|
741
|
+
agent_run_id=agent_run.id,
|
|
742
|
+
project_id=project_id,
|
|
743
|
+
workflow_name=workflow,
|
|
744
|
+
agent_depth=child_session.agent_depth,
|
|
745
|
+
max_agent_depth=agent_runner._child_session_manager.max_agent_depth,
|
|
746
|
+
terminal=terminal,
|
|
747
|
+
prompt=enhanced_prompt,
|
|
748
|
+
)
|
|
749
|
+
|
|
750
|
+
if not terminal_result.success:
|
|
751
|
+
return {
|
|
752
|
+
"success": False,
|
|
753
|
+
"clone_id": clone.id,
|
|
754
|
+
"clone_path": clone.clone_path,
|
|
755
|
+
"branch_name": clone.branch_name,
|
|
756
|
+
"run_id": agent_run.id,
|
|
757
|
+
"child_session_id": child_session.id,
|
|
758
|
+
"error": terminal_result.error or terminal_result.message,
|
|
759
|
+
}
|
|
760
|
+
|
|
761
|
+
return {
|
|
762
|
+
"success": True,
|
|
763
|
+
"clone_id": clone.id,
|
|
764
|
+
"clone_path": clone.clone_path,
|
|
765
|
+
"branch_name": clone.branch_name,
|
|
766
|
+
"run_id": agent_run.id,
|
|
767
|
+
"child_session_id": child_session.id,
|
|
768
|
+
"status": "pending",
|
|
769
|
+
"message": f"Agent spawned in {terminal_result.terminal_type} (PID: {terminal_result.pid})",
|
|
770
|
+
"terminal_type": terminal_result.terminal_type,
|
|
771
|
+
"pid": terminal_result.pid,
|
|
772
|
+
}
|
|
773
|
+
|
|
774
|
+
elif mode == "embedded":
|
|
775
|
+
from gobby.agents.spawn import EmbeddedSpawner
|
|
776
|
+
|
|
777
|
+
embedded_spawner = EmbeddedSpawner()
|
|
778
|
+
embedded_result = embedded_spawner.spawn_agent(
|
|
779
|
+
cli=provider,
|
|
780
|
+
cwd=clone.clone_path,
|
|
781
|
+
session_id=child_session.id,
|
|
782
|
+
parent_session_id=parent_session_id,
|
|
783
|
+
agent_run_id=agent_run.id,
|
|
784
|
+
project_id=project_id,
|
|
785
|
+
workflow_name=workflow,
|
|
786
|
+
agent_depth=child_session.agent_depth,
|
|
787
|
+
max_agent_depth=agent_runner._child_session_manager.max_agent_depth,
|
|
788
|
+
prompt=enhanced_prompt,
|
|
789
|
+
)
|
|
790
|
+
|
|
791
|
+
return {
|
|
792
|
+
"success": embedded_result.success,
|
|
793
|
+
"clone_id": clone.id,
|
|
794
|
+
"clone_path": clone.clone_path,
|
|
795
|
+
"branch_name": clone.branch_name,
|
|
796
|
+
"run_id": agent_run.id,
|
|
797
|
+
"child_session_id": child_session.id,
|
|
798
|
+
"status": "pending" if embedded_result.success else "error",
|
|
799
|
+
"error": embedded_result.error if not embedded_result.success else None,
|
|
800
|
+
}
|
|
801
|
+
|
|
802
|
+
else: # headless
|
|
803
|
+
from gobby.agents.spawn import HeadlessSpawner
|
|
804
|
+
|
|
805
|
+
headless_spawner = HeadlessSpawner()
|
|
806
|
+
headless_result = headless_spawner.spawn_agent(
|
|
807
|
+
cli=provider,
|
|
808
|
+
cwd=clone.clone_path,
|
|
809
|
+
session_id=child_session.id,
|
|
810
|
+
parent_session_id=parent_session_id,
|
|
811
|
+
agent_run_id=agent_run.id,
|
|
812
|
+
project_id=project_id,
|
|
813
|
+
workflow_name=workflow,
|
|
814
|
+
agent_depth=child_session.agent_depth,
|
|
815
|
+
max_agent_depth=agent_runner._child_session_manager.max_agent_depth,
|
|
816
|
+
prompt=enhanced_prompt,
|
|
817
|
+
)
|
|
818
|
+
|
|
819
|
+
return {
|
|
820
|
+
"success": headless_result.success,
|
|
821
|
+
"clone_id": clone.id,
|
|
822
|
+
"clone_path": clone.clone_path,
|
|
823
|
+
"branch_name": clone.branch_name,
|
|
824
|
+
"run_id": agent_run.id,
|
|
825
|
+
"child_session_id": child_session.id,
|
|
826
|
+
"status": "pending" if headless_result.success else "error",
|
|
827
|
+
"pid": headless_result.pid if headless_result.success else None,
|
|
828
|
+
"error": headless_result.error if not headless_result.success else None,
|
|
829
|
+
}
|
|
830
|
+
|
|
831
|
+
registry.register(
|
|
832
|
+
name="spawn_agent_in_clone",
|
|
833
|
+
description="Create a clone and spawn an agent to work in it",
|
|
834
|
+
input_schema={
|
|
835
|
+
"type": "object",
|
|
836
|
+
"properties": {
|
|
837
|
+
"prompt": {
|
|
838
|
+
"type": "string",
|
|
839
|
+
"description": "The task/prompt for the agent",
|
|
840
|
+
},
|
|
841
|
+
"branch_name": {
|
|
842
|
+
"type": "string",
|
|
843
|
+
"description": "Name for the branch in the clone",
|
|
844
|
+
},
|
|
845
|
+
"parent_session_id": {
|
|
846
|
+
"type": "string",
|
|
847
|
+
"description": "Parent session ID for context (required)",
|
|
848
|
+
},
|
|
849
|
+
"task_id": {
|
|
850
|
+
"type": "string",
|
|
851
|
+
"description": "Optional task ID to link to this clone",
|
|
852
|
+
},
|
|
853
|
+
"base_branch": {
|
|
854
|
+
"type": "string",
|
|
855
|
+
"description": "Branch to clone from",
|
|
856
|
+
"default": "main",
|
|
857
|
+
},
|
|
858
|
+
"clone_path": {
|
|
859
|
+
"type": "string",
|
|
860
|
+
"description": "Optional custom path for the clone",
|
|
861
|
+
},
|
|
862
|
+
"mode": {
|
|
863
|
+
"type": "string",
|
|
864
|
+
"description": "Execution mode",
|
|
865
|
+
"enum": ["terminal", "embedded", "headless"],
|
|
866
|
+
"default": "terminal",
|
|
867
|
+
},
|
|
868
|
+
"terminal": {
|
|
869
|
+
"type": "string",
|
|
870
|
+
"description": "Terminal type for terminal/embedded modes",
|
|
871
|
+
"default": "auto",
|
|
872
|
+
},
|
|
873
|
+
"provider": {
|
|
874
|
+
"type": "string",
|
|
875
|
+
"description": "LLM provider",
|
|
876
|
+
"enum": ["claude", "gemini", "codex", "antigravity"],
|
|
877
|
+
"default": "claude",
|
|
878
|
+
},
|
|
879
|
+
"model": {
|
|
880
|
+
"type": "string",
|
|
881
|
+
"description": "Optional model override",
|
|
882
|
+
},
|
|
883
|
+
"workflow": {
|
|
884
|
+
"type": "string",
|
|
885
|
+
"description": "Workflow name to execute",
|
|
886
|
+
},
|
|
887
|
+
"timeout": {
|
|
888
|
+
"type": "number",
|
|
889
|
+
"description": "Execution timeout in seconds",
|
|
890
|
+
"default": 120.0,
|
|
891
|
+
},
|
|
892
|
+
"max_turns": {
|
|
893
|
+
"type": "integer",
|
|
894
|
+
"description": "Maximum turns",
|
|
895
|
+
"default": 10,
|
|
896
|
+
},
|
|
897
|
+
},
|
|
898
|
+
"required": ["prompt", "branch_name", "parent_session_id"],
|
|
899
|
+
},
|
|
900
|
+
func=spawn_agent_in_clone,
|
|
901
|
+
)
|
|
902
|
+
|
|
903
|
+
return registry
|