gobby 0.2.6__py3-none-any.whl → 0.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. gobby/__init__.py +1 -1
  2. gobby/adapters/__init__.py +2 -1
  3. gobby/adapters/codex_impl/__init__.py +28 -0
  4. gobby/adapters/codex_impl/adapter.py +722 -0
  5. gobby/adapters/codex_impl/client.py +679 -0
  6. gobby/adapters/codex_impl/protocol.py +20 -0
  7. gobby/adapters/codex_impl/types.py +68 -0
  8. gobby/agents/definitions.py +11 -1
  9. gobby/agents/isolation.py +395 -0
  10. gobby/agents/sandbox.py +261 -0
  11. gobby/agents/spawn.py +42 -287
  12. gobby/agents/spawn_executor.py +385 -0
  13. gobby/agents/spawners/__init__.py +24 -0
  14. gobby/agents/spawners/command_builder.py +189 -0
  15. gobby/agents/spawners/embedded.py +21 -2
  16. gobby/agents/spawners/headless.py +21 -2
  17. gobby/agents/spawners/prompt_manager.py +125 -0
  18. gobby/cli/install.py +4 -4
  19. gobby/cli/installers/claude.py +6 -0
  20. gobby/cli/installers/gemini.py +6 -0
  21. gobby/cli/installers/shared.py +103 -4
  22. gobby/cli/sessions.py +1 -1
  23. gobby/cli/utils.py +9 -2
  24. gobby/config/__init__.py +12 -97
  25. gobby/config/app.py +10 -94
  26. gobby/config/extensions.py +2 -2
  27. gobby/config/features.py +7 -130
  28. gobby/config/tasks.py +4 -28
  29. gobby/hooks/__init__.py +0 -13
  30. gobby/hooks/event_handlers.py +45 -2
  31. gobby/hooks/hook_manager.py +2 -2
  32. gobby/hooks/plugins.py +1 -1
  33. gobby/hooks/webhooks.py +1 -1
  34. gobby/llm/resolver.py +3 -2
  35. gobby/mcp_proxy/importer.py +62 -4
  36. gobby/mcp_proxy/instructions.py +2 -0
  37. gobby/mcp_proxy/registries.py +1 -4
  38. gobby/mcp_proxy/services/recommendation.py +43 -11
  39. gobby/mcp_proxy/tools/agents.py +31 -731
  40. gobby/mcp_proxy/tools/clones.py +0 -385
  41. gobby/mcp_proxy/tools/memory.py +2 -2
  42. gobby/mcp_proxy/tools/sessions/__init__.py +14 -0
  43. gobby/mcp_proxy/tools/sessions/_commits.py +232 -0
  44. gobby/mcp_proxy/tools/sessions/_crud.py +253 -0
  45. gobby/mcp_proxy/tools/sessions/_factory.py +63 -0
  46. gobby/mcp_proxy/tools/sessions/_handoff.py +499 -0
  47. gobby/mcp_proxy/tools/sessions/_messages.py +138 -0
  48. gobby/mcp_proxy/tools/skills/__init__.py +14 -29
  49. gobby/mcp_proxy/tools/spawn_agent.py +417 -0
  50. gobby/mcp_proxy/tools/tasks/_lifecycle.py +52 -18
  51. gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +1 -1
  52. gobby/mcp_proxy/tools/worktrees.py +0 -343
  53. gobby/memory/ingestion/__init__.py +5 -0
  54. gobby/memory/ingestion/multimodal.py +221 -0
  55. gobby/memory/manager.py +62 -283
  56. gobby/memory/search/__init__.py +10 -0
  57. gobby/memory/search/coordinator.py +248 -0
  58. gobby/memory/services/__init__.py +5 -0
  59. gobby/memory/services/crossref.py +142 -0
  60. gobby/prompts/loader.py +5 -2
  61. gobby/servers/http.py +1 -4
  62. gobby/servers/routes/admin.py +14 -0
  63. gobby/servers/routes/mcp/endpoints/__init__.py +61 -0
  64. gobby/servers/routes/mcp/endpoints/discovery.py +405 -0
  65. gobby/servers/routes/mcp/endpoints/execution.py +568 -0
  66. gobby/servers/routes/mcp/endpoints/registry.py +378 -0
  67. gobby/servers/routes/mcp/endpoints/server.py +304 -0
  68. gobby/servers/routes/mcp/hooks.py +1 -1
  69. gobby/servers/routes/mcp/tools.py +48 -1506
  70. gobby/sessions/lifecycle.py +1 -1
  71. gobby/sessions/processor.py +10 -0
  72. gobby/sessions/transcripts/base.py +1 -0
  73. gobby/sessions/transcripts/claude.py +15 -5
  74. gobby/skills/parser.py +30 -2
  75. gobby/storage/migrations.py +159 -372
  76. gobby/storage/sessions.py +43 -7
  77. gobby/storage/skills.py +37 -4
  78. gobby/storage/tasks/_lifecycle.py +18 -3
  79. gobby/sync/memories.py +1 -1
  80. gobby/tasks/external_validator.py +1 -1
  81. gobby/tasks/validation.py +22 -20
  82. gobby/tools/summarizer.py +91 -10
  83. gobby/utils/project_context.py +2 -3
  84. gobby/utils/status.py +13 -0
  85. gobby/workflows/actions.py +221 -1217
  86. gobby/workflows/artifact_actions.py +31 -0
  87. gobby/workflows/autonomous_actions.py +11 -0
  88. gobby/workflows/context_actions.py +50 -1
  89. gobby/workflows/enforcement/__init__.py +47 -0
  90. gobby/workflows/enforcement/blocking.py +269 -0
  91. gobby/workflows/enforcement/commit_policy.py +283 -0
  92. gobby/workflows/enforcement/handlers.py +269 -0
  93. gobby/workflows/enforcement/task_policy.py +542 -0
  94. gobby/workflows/git_utils.py +106 -0
  95. gobby/workflows/llm_actions.py +30 -0
  96. gobby/workflows/mcp_actions.py +20 -1
  97. gobby/workflows/memory_actions.py +80 -0
  98. gobby/workflows/safe_evaluator.py +183 -0
  99. gobby/workflows/session_actions.py +44 -0
  100. gobby/workflows/state_actions.py +60 -1
  101. gobby/workflows/stop_signal_actions.py +55 -0
  102. gobby/workflows/summary_actions.py +94 -1
  103. gobby/workflows/task_sync_actions.py +347 -0
  104. gobby/workflows/todo_actions.py +34 -1
  105. gobby/workflows/webhook_actions.py +185 -0
  106. {gobby-0.2.6.dist-info → gobby-0.2.7.dist-info}/METADATA +6 -1
  107. {gobby-0.2.6.dist-info → gobby-0.2.7.dist-info}/RECORD +111 -111
  108. {gobby-0.2.6.dist-info → gobby-0.2.7.dist-info}/WHEEL +1 -1
  109. gobby/adapters/codex.py +0 -1332
  110. gobby/install/claude/commands/gobby/bug.md +0 -51
  111. gobby/install/claude/commands/gobby/chore.md +0 -51
  112. gobby/install/claude/commands/gobby/epic.md +0 -52
  113. gobby/install/claude/commands/gobby/eval.md +0 -235
  114. gobby/install/claude/commands/gobby/feat.md +0 -49
  115. gobby/install/claude/commands/gobby/nit.md +0 -52
  116. gobby/install/claude/commands/gobby/ref.md +0 -52
  117. gobby/mcp_proxy/tools/session_messages.py +0 -1055
  118. gobby/prompts/defaults/expansion/system.md +0 -119
  119. gobby/prompts/defaults/expansion/user.md +0 -48
  120. gobby/prompts/defaults/external_validation/agent.md +0 -72
  121. gobby/prompts/defaults/external_validation/external.md +0 -63
  122. gobby/prompts/defaults/external_validation/spawn.md +0 -83
  123. gobby/prompts/defaults/external_validation/system.md +0 -6
  124. gobby/prompts/defaults/features/import_mcp.md +0 -22
  125. gobby/prompts/defaults/features/import_mcp_github.md +0 -17
  126. gobby/prompts/defaults/features/import_mcp_search.md +0 -16
  127. gobby/prompts/defaults/features/recommend_tools.md +0 -32
  128. gobby/prompts/defaults/features/recommend_tools_hybrid.md +0 -35
  129. gobby/prompts/defaults/features/recommend_tools_llm.md +0 -30
  130. gobby/prompts/defaults/features/server_description.md +0 -20
  131. gobby/prompts/defaults/features/server_description_system.md +0 -6
  132. gobby/prompts/defaults/features/task_description.md +0 -31
  133. gobby/prompts/defaults/features/task_description_system.md +0 -6
  134. gobby/prompts/defaults/features/tool_summary.md +0 -17
  135. gobby/prompts/defaults/features/tool_summary_system.md +0 -6
  136. gobby/prompts/defaults/handoff/compact.md +0 -63
  137. gobby/prompts/defaults/handoff/session_end.md +0 -57
  138. gobby/prompts/defaults/memory/extract.md +0 -61
  139. gobby/prompts/defaults/research/step.md +0 -58
  140. gobby/prompts/defaults/validation/criteria.md +0 -47
  141. gobby/prompts/defaults/validation/validate.md +0 -38
  142. gobby/storage/migrations_legacy.py +0 -1359
  143. gobby/workflows/task_enforcement_actions.py +0 -1343
  144. {gobby-0.2.6.dist-info → gobby-0.2.7.dist-info}/entry_points.txt +0 -0
  145. {gobby-0.2.6.dist-info → gobby-0.2.7.dist-info}/licenses/LICENSE.md +0 -0
  146. {gobby-0.2.6.dist-info → gobby-0.2.7.dist-info}/top_level.txt +0 -0
@@ -23,7 +23,7 @@ from gobby.mcp_proxy.tools.internal import InternalToolRegistry
23
23
  from gobby.skills.loader import SkillLoader, SkillLoadError
24
24
  from gobby.skills.search import SearchFilters, SkillSearch
25
25
  from gobby.skills.updater import SkillUpdater
26
- from gobby.storage.skills import LocalSkillManager
26
+ from gobby.storage.skills import ChangeEvent, LocalSkillManager, SkillChangeNotifier
27
27
 
28
28
  if TYPE_CHECKING:
29
29
  from gobby.storage.database import DatabaseProtocol
@@ -61,8 +61,9 @@ def create_skills_registry(
61
61
  description="Skill management - list_skills, get_skill, search_skills, install_skill, update_skill, remove_skill",
62
62
  )
63
63
 
64
- # Initialize storage
65
- storage = LocalSkillManager(db)
64
+ # Initialize change notifier and storage
65
+ notifier = SkillChangeNotifier()
66
+ storage = LocalSkillManager(db, notifier=notifier)
66
67
 
67
68
  # --- list_skills tool ---
68
69
 
@@ -224,6 +225,13 @@ def create_skills_registry(
224
225
  # Index on registry creation
225
226
  _index_skills()
226
227
 
228
+ # Wire up change notifier to re-index on any skill mutation
229
+ def _on_skill_change(event: ChangeEvent) -> None:
230
+ """Re-index skills when any skill is created, updated, or deleted."""
231
+ _index_skills()
232
+
233
+ notifier.add_listener(_on_skill_change)
234
+
227
235
  @registry.tool(
228
236
  name="search_skills",
229
237
  description="Search for skills by query. Returns ranked results with relevance scores. Supports filtering by category and tags.",
@@ -359,17 +367,9 @@ def create_skills_registry(
359
367
  # Store the name before deletion
360
368
  skill_name = skill.name
361
369
 
362
- # Delete the skill
370
+ # Delete the skill (notifier triggers re-indexing automatically)
363
371
  storage.delete_skill(skill.id)
364
372
 
365
- # Re-index skills after deletion
366
- skills = storage.list_skills(
367
- project_id=project_id,
368
- limit=10000,
369
- include_global=True,
370
- )
371
- await search.index_skills_async(skills)
372
-
373
373
  return {
374
374
  "success": True,
375
375
  "removed": True,
@@ -430,17 +430,9 @@ def create_skills_registry(
430
430
  }
431
431
 
432
432
  # Use SkillUpdater to refresh from source
433
+ # (notifier triggers re-indexing automatically if updated)
433
434
  result = updater.update_skill(skill.id)
434
435
 
435
- # Re-index skills if updated
436
- if result.updated:
437
- skills = storage.list_skills(
438
- project_id=project_id,
439
- limit=10000,
440
- include_global=True,
441
- )
442
- await search.index_skills_async(skills)
443
-
444
436
  return {
445
437
  "success": result.success,
446
438
  "updated": result.updated,
@@ -606,14 +598,7 @@ def create_skills_registry(
606
598
  project_id=skill_project_id,
607
599
  enabled=True,
608
600
  )
609
-
610
- # Re-index skills
611
- skills = storage.list_skills(
612
- project_id=project_id,
613
- limit=10000,
614
- include_global=True,
615
- )
616
- await search.index_skills_async(skills)
601
+ # Notifier triggers re-indexing automatically via create_skill
617
602
 
618
603
  return {
619
604
  "success": True,
@@ -0,0 +1,417 @@
1
+ """
2
+ Unified spawn_agent MCP tool.
3
+
4
+ Consolidates three separate agent spawning tools into one:
5
+ - start_agent
6
+ - spawn_agent_in_worktree
7
+ - spawn_agent_in_clone
8
+
9
+ One tool: spawn_agent(prompt, agent="generic", isolation="current"|"worktree"|"clone", ...)
10
+ """
11
+
12
+ from __future__ import annotations
13
+
14
+ import logging
15
+ import socket
16
+ import uuid
17
+ from pathlib import Path
18
+ from typing import TYPE_CHECKING, Any, Literal, cast
19
+
20
+ from gobby.agents.definitions import AgentDefinition, AgentDefinitionLoader
21
+ from gobby.agents.isolation import (
22
+ SpawnConfig,
23
+ get_isolation_handler,
24
+ )
25
+ from gobby.agents.sandbox import SandboxConfig
26
+ from gobby.agents.spawn_executor import SpawnRequest, execute_spawn
27
+ from gobby.mcp_proxy.tools.internal import InternalToolRegistry
28
+ from gobby.mcp_proxy.tools.tasks import resolve_task_id_for_mcp
29
+ from gobby.utils.project_context import get_project_context
30
+
31
+ if TYPE_CHECKING:
32
+ from gobby.agents.runner import AgentRunner
33
+ from gobby.storage.tasks import LocalTaskManager
34
+
35
+ logger = logging.getLogger(__name__)
36
+
37
+
38
+ async def spawn_agent_impl(
39
+ prompt: str,
40
+ runner: AgentRunner,
41
+ agent_def: AgentDefinition | None = None,
42
+ task_id: str | None = None,
43
+ task_manager: LocalTaskManager | None = None,
44
+ # Isolation
45
+ isolation: Literal["current", "worktree", "clone"] | None = None,
46
+ branch_name: str | None = None,
47
+ base_branch: str | None = None,
48
+ # Storage/managers for isolation
49
+ worktree_storage: Any | None = None,
50
+ git_manager: Any | None = None,
51
+ clone_storage: Any | None = None,
52
+ clone_manager: Any | None = None,
53
+ # Execution
54
+ workflow: str | None = None,
55
+ mode: Literal["terminal", "embedded", "headless"] | None = None,
56
+ terminal: str = "auto",
57
+ provider: str | None = None,
58
+ model: str | None = None,
59
+ # Limits
60
+ timeout: float | None = None,
61
+ max_turns: int | None = None,
62
+ # Sandbox
63
+ sandbox: bool | None = None,
64
+ sandbox_mode: Literal["permissive", "restrictive"] | None = None,
65
+ sandbox_allow_network: bool | None = None,
66
+ sandbox_extra_paths: list[str] | None = None,
67
+ # Context
68
+ parent_session_id: str | None = None,
69
+ project_path: str | None = None,
70
+ ) -> dict[str, Any]:
71
+ """
72
+ Core spawn_agent implementation that can be called directly.
73
+
74
+ This is the internal implementation used by both the spawn_agent MCP tool
75
+ and the deprecated spawn_agent_in_worktree/spawn_agent_in_clone tools.
76
+
77
+ Args:
78
+ prompt: Required - what the agent should do
79
+ runner: AgentRunner instance for executing agents
80
+ agent_def: Optional loaded agent definition
81
+ task_id: Optional - link to task (supports N, #N, UUID)
82
+ task_manager: Task manager for task resolution
83
+ isolation: Isolation mode (current/worktree/clone)
84
+ branch_name: Git branch name (auto-generated from task if not provided)
85
+ base_branch: Base branch for worktree/clone
86
+ worktree_storage: Storage for worktree records
87
+ git_manager: Git manager for worktree operations
88
+ clone_storage: Storage for clone records
89
+ clone_manager: Git manager for clone operations
90
+ workflow: Workflow to use
91
+ mode: Execution mode (terminal/embedded/headless)
92
+ terminal: Terminal type for terminal mode
93
+ provider: AI provider (claude/gemini/codex)
94
+ model: Model to use
95
+ timeout: Timeout in seconds
96
+ max_turns: Maximum conversation turns
97
+ sandbox: Enable sandbox (True/False/None). None inherits from agent_def.
98
+ sandbox_mode: Sandbox mode (permissive/restrictive). Overrides agent_def.
99
+ sandbox_allow_network: Allow network access. Overrides agent_def.
100
+ sandbox_extra_paths: Extra paths for sandbox write access.
101
+ parent_session_id: Parent session ID
102
+ project_path: Project path override
103
+
104
+ Returns:
105
+ Dict with success status, run_id, child_session_id, isolation metadata
106
+ """
107
+ # 1. Merge config: agent_def defaults < params
108
+ effective_isolation = isolation
109
+ if effective_isolation is None and agent_def:
110
+ effective_isolation = agent_def.isolation
111
+ effective_isolation = effective_isolation or "current"
112
+
113
+ effective_provider = provider
114
+ if effective_provider is None and agent_def:
115
+ effective_provider = agent_def.provider
116
+ effective_provider = effective_provider or "claude"
117
+
118
+ effective_mode: Literal["terminal", "embedded", "headless"] | None = mode
119
+ if effective_mode is None and agent_def:
120
+ effective_mode = cast(Literal["terminal", "embedded", "headless"], agent_def.mode)
121
+ effective_mode = effective_mode or "terminal"
122
+
123
+ effective_workflow = workflow
124
+ if effective_workflow is None and agent_def:
125
+ effective_workflow = agent_def.workflow
126
+
127
+ effective_base_branch = base_branch
128
+ if effective_base_branch is None and agent_def:
129
+ effective_base_branch = agent_def.base_branch
130
+ effective_base_branch = effective_base_branch or "main"
131
+
132
+ effective_branch_prefix = None
133
+ if agent_def:
134
+ effective_branch_prefix = agent_def.branch_prefix
135
+
136
+ # Build effective sandbox config (merge agent_def.sandbox with params)
137
+ effective_sandbox_config: SandboxConfig | None = None
138
+
139
+ # Start with agent_def.sandbox if present
140
+ base_sandbox = agent_def.sandbox if agent_def and hasattr(agent_def, "sandbox") else None
141
+
142
+ # Determine if sandbox should be enabled
143
+ sandbox_enabled = sandbox # Explicit param takes precedence
144
+ if sandbox_enabled is None and base_sandbox is not None:
145
+ sandbox_enabled = base_sandbox.enabled
146
+
147
+ # Build sandbox config if enabled or if we have params to apply
148
+ if sandbox_enabled is True or (
149
+ sandbox_enabled is None
150
+ and (sandbox_mode is not None or sandbox_allow_network is not None or sandbox_extra_paths)
151
+ ):
152
+ # Start from base or create new
153
+ if base_sandbox is not None:
154
+ effective_sandbox_config = SandboxConfig(
155
+ enabled=True if sandbox_enabled is None else sandbox_enabled,
156
+ mode=sandbox_mode if sandbox_mode is not None else base_sandbox.mode,
157
+ allow_network=(
158
+ sandbox_allow_network
159
+ if sandbox_allow_network is not None
160
+ else base_sandbox.allow_network
161
+ ),
162
+ extra_read_paths=base_sandbox.extra_read_paths,
163
+ extra_write_paths=(
164
+ list(base_sandbox.extra_write_paths) + (sandbox_extra_paths or [])
165
+ ),
166
+ )
167
+ else:
168
+ effective_sandbox_config = SandboxConfig(
169
+ enabled=True,
170
+ mode=sandbox_mode or "permissive",
171
+ allow_network=sandbox_allow_network if sandbox_allow_network is not None else True,
172
+ extra_write_paths=sandbox_extra_paths or [],
173
+ )
174
+ elif sandbox_enabled is False:
175
+ # Explicitly disabled - set config with enabled=False
176
+ effective_sandbox_config = SandboxConfig(enabled=False)
177
+
178
+ # 2. Resolve project context
179
+ ctx = get_project_context(Path(project_path) if project_path else None)
180
+ if ctx is None:
181
+ return {"success": False, "error": "Could not resolve project context"}
182
+
183
+ project_id = ctx.get("id") or ctx.get("project_id")
184
+ resolved_project_path = ctx.get("project_path")
185
+
186
+ if not project_id or not isinstance(project_id, str):
187
+ return {"success": False, "error": "Could not resolve project_id from context"}
188
+ if not resolved_project_path or not isinstance(resolved_project_path, str):
189
+ return {"success": False, "error": "Could not resolve project_path from context"}
190
+
191
+ # 3. Validate parent_session_id and spawn depth
192
+ if parent_session_id is None:
193
+ return {"success": False, "error": "parent_session_id is required"}
194
+
195
+ can_spawn, reason, _depth = runner.can_spawn(parent_session_id)
196
+ if not can_spawn:
197
+ return {"success": False, "error": reason}
198
+
199
+ # 4. Resolve task_id if provided (supports N, #N, UUID)
200
+ resolved_task_id: str | None = None
201
+ task_title: str | None = None
202
+ task_seq_num: int | None = None
203
+
204
+ if task_id and task_manager:
205
+ try:
206
+ resolved_task_id = resolve_task_id_for_mcp(task_manager, task_id, project_id)
207
+ task = task_manager.get_task(resolved_task_id)
208
+ if task:
209
+ task_title = task.title
210
+ task_seq_num = task.seq_num
211
+ except Exception as e:
212
+ logger.warning(f"Failed to resolve task_id {task_id}: {e}")
213
+
214
+ # 5. Get isolation handler
215
+ handler = get_isolation_handler(
216
+ effective_isolation,
217
+ git_manager=git_manager,
218
+ worktree_storage=worktree_storage,
219
+ clone_manager=clone_manager,
220
+ clone_storage=clone_storage,
221
+ )
222
+
223
+ # 6. Build spawn config
224
+ spawn_config = SpawnConfig(
225
+ prompt=prompt,
226
+ task_id=resolved_task_id,
227
+ task_title=task_title,
228
+ task_seq_num=task_seq_num,
229
+ branch_name=branch_name,
230
+ branch_prefix=effective_branch_prefix,
231
+ base_branch=effective_base_branch,
232
+ project_id=project_id,
233
+ project_path=resolved_project_path,
234
+ provider=effective_provider,
235
+ parent_session_id=parent_session_id,
236
+ )
237
+
238
+ # 7. Prepare environment (worktree/clone creation)
239
+ try:
240
+ isolation_ctx = await handler.prepare_environment(spawn_config)
241
+ except Exception as e:
242
+ logger.error(f"Failed to prepare environment: {e}", exc_info=True)
243
+ return {"success": False, "error": f"Failed to prepare environment: {e}"}
244
+
245
+ # 8. Build enhanced prompt with isolation context
246
+ enhanced_prompt = handler.build_context_prompt(prompt, isolation_ctx)
247
+
248
+ # 9. Generate session and run IDs
249
+ session_id = str(uuid.uuid4())
250
+ run_id = str(uuid.uuid4())
251
+
252
+ # 10. Execute spawn via SpawnExecutor
253
+ spawn_request = SpawnRequest(
254
+ prompt=enhanced_prompt,
255
+ cwd=isolation_ctx.cwd,
256
+ mode=effective_mode,
257
+ provider=effective_provider,
258
+ terminal=terminal,
259
+ session_id=session_id,
260
+ run_id=run_id,
261
+ parent_session_id=parent_session_id,
262
+ project_id=project_id,
263
+ workflow=effective_workflow,
264
+ worktree_id=isolation_ctx.worktree_id,
265
+ clone_id=isolation_ctx.clone_id,
266
+ session_manager=runner._child_session_manager,
267
+ machine_id=socket.gethostname(),
268
+ sandbox_config=effective_sandbox_config,
269
+ )
270
+
271
+ spawn_result = await execute_spawn(spawn_request)
272
+
273
+ # 11. Return response with isolation metadata
274
+ return {
275
+ "success": spawn_result.success,
276
+ "run_id": spawn_result.run_id,
277
+ "child_session_id": spawn_result.child_session_id,
278
+ "status": spawn_result.status,
279
+ "isolation": effective_isolation,
280
+ "branch_name": isolation_ctx.branch_name,
281
+ "worktree_id": isolation_ctx.worktree_id,
282
+ "worktree_path": isolation_ctx.cwd if effective_isolation == "worktree" else None,
283
+ "clone_id": isolation_ctx.clone_id,
284
+ "pid": spawn_result.pid,
285
+ "error": spawn_result.error,
286
+ "message": spawn_result.message,
287
+ }
288
+
289
+
290
+ def create_spawn_agent_registry(
291
+ runner: AgentRunner,
292
+ agent_loader: AgentDefinitionLoader | None = None,
293
+ task_manager: LocalTaskManager | None = None,
294
+ worktree_storage: Any | None = None,
295
+ git_manager: Any | None = None,
296
+ clone_storage: Any | None = None,
297
+ clone_manager: Any | None = None,
298
+ ) -> InternalToolRegistry:
299
+ """
300
+ Create a spawn_agent tool registry with the unified spawn_agent tool.
301
+
302
+ Args:
303
+ runner: AgentRunner instance for executing agents.
304
+ agent_loader: Loader for agent definitions.
305
+ task_manager: Task manager for task resolution.
306
+ worktree_storage: Storage for worktree records.
307
+ git_manager: Git manager for worktree operations.
308
+ clone_storage: Storage for clone records.
309
+ clone_manager: Git manager for clone operations.
310
+
311
+ Returns:
312
+ InternalToolRegistry with spawn_agent tool registered.
313
+ """
314
+ registry = InternalToolRegistry(
315
+ name="gobby-spawn-agent",
316
+ description="Unified agent spawning with isolation support",
317
+ )
318
+
319
+ # Use provided loader or create default
320
+ loader = agent_loader or AgentDefinitionLoader()
321
+
322
+ @registry.tool(
323
+ name="spawn_agent",
324
+ description=(
325
+ "Spawn a subagent to execute a task. Supports isolation modes: "
326
+ "'current' (work in current directory), 'worktree' (create git worktree), "
327
+ "'clone' (create shallow clone). Can use named agent definitions or raw parameters."
328
+ ),
329
+ )
330
+ async def spawn_agent(
331
+ prompt: str,
332
+ agent: str = "generic",
333
+ task_id: str | None = None,
334
+ # Isolation
335
+ isolation: Literal["current", "worktree", "clone"] | None = None,
336
+ branch_name: str | None = None,
337
+ base_branch: str | None = None,
338
+ # Execution
339
+ workflow: str | None = None,
340
+ mode: Literal["terminal", "embedded", "headless"] | None = None,
341
+ terminal: str = "auto",
342
+ provider: str | None = None,
343
+ model: str | None = None,
344
+ # Limits
345
+ timeout: float | None = None,
346
+ max_turns: int | None = None,
347
+ # Sandbox
348
+ sandbox: bool | None = None,
349
+ sandbox_mode: Literal["permissive", "restrictive"] | None = None,
350
+ sandbox_allow_network: bool | None = None,
351
+ sandbox_extra_paths: list[str] | None = None,
352
+ # Context
353
+ parent_session_id: str | None = None,
354
+ project_path: str | None = None,
355
+ ) -> dict[str, Any]:
356
+ """
357
+ Spawn a subagent with the specified configuration.
358
+
359
+ Args:
360
+ prompt: Required - what the agent should do
361
+ agent: Agent definition name (defaults to "generic")
362
+ task_id: Optional - link to task (supports N, #N, UUID)
363
+ isolation: Isolation mode (current/worktree/clone)
364
+ branch_name: Git branch name (auto-generated from task if not provided)
365
+ base_branch: Base branch for worktree/clone
366
+ workflow: Workflow to use
367
+ mode: Execution mode (terminal/embedded/headless)
368
+ terminal: Terminal type for terminal mode
369
+ provider: AI provider (claude/gemini/codex)
370
+ model: Model to use
371
+ timeout: Timeout in seconds
372
+ max_turns: Maximum conversation turns
373
+ sandbox: Enable sandbox (True/False/None). None inherits from agent_def.
374
+ sandbox_mode: Sandbox mode (permissive/restrictive). Overrides agent_def.
375
+ sandbox_allow_network: Allow network access. Overrides agent_def.
376
+ sandbox_extra_paths: Extra paths for sandbox write access.
377
+ parent_session_id: Parent session ID
378
+ project_path: Project path override
379
+
380
+ Returns:
381
+ Dict with success status, run_id, child_session_id, isolation metadata
382
+ """
383
+ # Load agent definition (defaults to "generic")
384
+ agent_def = loader.load(agent)
385
+ if agent_def is None and agent != "generic":
386
+ return {"success": False, "error": f"Agent '{agent}' not found"}
387
+
388
+ # Delegate to spawn_agent_impl
389
+ return await spawn_agent_impl(
390
+ prompt=prompt,
391
+ runner=runner,
392
+ agent_def=agent_def,
393
+ task_id=task_id,
394
+ task_manager=task_manager,
395
+ isolation=isolation,
396
+ branch_name=branch_name,
397
+ base_branch=base_branch,
398
+ worktree_storage=worktree_storage,
399
+ git_manager=git_manager,
400
+ clone_storage=clone_storage,
401
+ clone_manager=clone_manager,
402
+ workflow=workflow,
403
+ mode=mode,
404
+ terminal=terminal,
405
+ provider=provider,
406
+ model=model,
407
+ timeout=timeout,
408
+ max_turns=max_turns,
409
+ sandbox=sandbox,
410
+ sandbox_mode=sandbox_mode,
411
+ sandbox_allow_network=sandbox_allow_network,
412
+ sandbox_extra_paths=sandbox_extra_paths,
413
+ parent_session_id=parent_session_id,
414
+ project_path=project_path,
415
+ )
416
+
417
+ return registry
@@ -65,13 +65,13 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
65
65
  try:
66
66
  resolved_id = resolve_task_id_for_mcp(ctx.task_manager, task_id)
67
67
  except TaskNotFoundError as e:
68
- return {"error": str(e)}
68
+ return {"success": False, "error": str(e)}
69
69
  except ValueError as e:
70
- return {"error": str(e)}
70
+ return {"success": False, "error": str(e)}
71
71
 
72
72
  task = ctx.task_manager.get_task(resolved_id)
73
73
  if not task:
74
- return {"error": f"Task {task_id} not found"}
74
+ return {"success": False, "error": f"Task {task_id} not found"}
75
75
 
76
76
  # Link commit if provided (convenience for link + close in one call)
77
77
  if commit_sha:
@@ -85,6 +85,7 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
85
85
  commit_result = validate_commit_requirements(task, reason, repo_path)
86
86
  if not commit_result.can_close:
87
87
  return {
88
+ "success": False,
88
89
  "error": commit_result.error_type,
89
90
  "message": commit_result.message,
90
91
  }
@@ -92,11 +93,40 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
92
93
  # Auto-skip validation for certain close reasons
93
94
  should_skip = skip_validation or reason.lower() in SKIP_REASONS
94
95
 
96
+ # Enforce commits if session had edits
97
+ if session_id and not should_skip:
98
+ try:
99
+ from gobby.storage.sessions import LocalSessionManager
100
+
101
+ session_manager = LocalSessionManager(ctx.task_manager.db)
102
+ session = session_manager.get(session_id)
103
+
104
+ # Check if task has commits (including the one being linked right now)
105
+ has_commits = bool(task.commits) or bool(commit_sha)
106
+
107
+ if session and session.had_edits and not has_commits:
108
+ return {
109
+ "success": False,
110
+ "error": "missing_commits_for_edits",
111
+ "message": (
112
+ "This session made edits but no commits are linked to the task. "
113
+ "You must commit your changes and link them to the task before closing."
114
+ ),
115
+ "suggestion": (
116
+ "Commit your changes with `[#task_id]` in the message, "
117
+ "or pass `commit_sha` to `close_task`."
118
+ ),
119
+ }
120
+ except Exception:
121
+ # Don't block close on internal error
122
+ pass # nosec B110 - best-effort session edit check
123
+
95
124
  if not should_skip:
96
125
  # Check if task has children (is a parent task)
97
126
  parent_result = validate_parent_task(ctx, resolved_id)
98
127
  if not parent_result.can_close:
99
- response = {
128
+ response: dict[str, Any] = {
129
+ "success": False,
100
130
  "error": parent_result.error_type,
101
131
  "message": parent_result.message,
102
132
  }
@@ -127,6 +157,7 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
127
157
  )
128
158
  if not llm_result.can_close:
129
159
  response = {
160
+ "success": False,
130
161
  "error": llm_result.error_type,
131
162
  "message": llm_result.message,
132
163
  }
@@ -292,7 +323,7 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
292
323
  try:
293
324
  resolved_id = resolve_task_id_for_mcp(ctx.task_manager, task_id)
294
325
  except (TaskNotFoundError, ValueError) as e:
295
- return {"error": str(e)}
326
+ return {"success": False, "error": str(e)}
296
327
 
297
328
  try:
298
329
  ctx.task_manager.reopen_task(resolved_id, reason=reason)
@@ -313,7 +344,7 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
313
344
 
314
345
  return {}
315
346
  except ValueError as e:
316
- return {"error": str(e)}
347
+ return {"success": False, "error": str(e)}
317
348
 
318
349
  registry.register(
319
350
  name="reopen_task",
@@ -345,22 +376,23 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
345
376
  try:
346
377
  resolved_id = resolve_task_id_for_mcp(ctx.task_manager, task_id)
347
378
  except (TaskNotFoundError, ValueError) as e:
348
- return {"error": str(e)}
379
+ return {"success": False, "error": str(e)}
349
380
 
350
381
  # Get task before deleting to capture seq_num for ref
351
382
  task = ctx.task_manager.get_task(resolved_id)
352
383
  if not task:
353
- return {"error": f"Task {task_id} not found"}
384
+ return {"success": False, "error": f"Task {task_id} not found"}
354
385
  ref = f"#{task.seq_num}" if task.seq_num else resolved_id[:8]
355
386
 
356
387
  try:
357
388
  deleted = ctx.task_manager.delete_task(resolved_id, cascade=cascade, unlink=unlink)
358
389
  if not deleted:
359
- return {"error": f"Task {task_id} not found"}
390
+ return {"success": False, "error": f"Task {task_id} not found"}
360
391
  except ValueError as e:
361
392
  error_msg = str(e)
362
393
  if "dependent task(s)" in error_msg:
363
394
  return {
395
+ "success": False,
364
396
  "error": "has_dependents",
365
397
  "message": error_msg,
366
398
  "suggestion": f"Use cascade=True to delete task {ref} and its dependents, "
@@ -368,11 +400,12 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
368
400
  }
369
401
  elif "has children" in error_msg:
370
402
  return {
403
+ "success": False,
371
404
  "error": "has_children",
372
405
  "message": error_msg,
373
406
  "suggestion": f"Use cascade=True to delete task {ref} and all its subtasks.",
374
407
  }
375
- return {"error": error_msg}
408
+ return {"success": False, "error": error_msg}
376
409
 
377
410
  return {
378
411
  "ref": ref,
@@ -413,10 +446,10 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
413
446
  try:
414
447
  resolved_id = resolve_task_id_for_mcp(ctx.task_manager, task_id)
415
448
  except (TaskNotFoundError, ValueError) as e:
416
- return {"error": str(e)}
449
+ return {"success": False, "error": str(e)}
417
450
  task = ctx.task_manager.add_label(resolved_id, label)
418
451
  if not task:
419
- return {"error": f"Task {task_id} not found"}
452
+ return {"success": False, "error": f"Task {task_id} not found"}
420
453
  return {}
421
454
 
422
455
  registry.register(
@@ -441,10 +474,10 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
441
474
  try:
442
475
  resolved_id = resolve_task_id_for_mcp(ctx.task_manager, task_id)
443
476
  except (TaskNotFoundError, ValueError) as e:
444
- return {"error": str(e)}
477
+ return {"success": False, "error": str(e)}
445
478
  task = ctx.task_manager.remove_label(resolved_id, label)
446
479
  if not task:
447
- return {"error": f"Task {task_id} not found"}
480
+ return {"success": False, "error": f"Task {task_id} not found"}
448
481
  return {}
449
482
 
450
483
  registry.register(
@@ -487,17 +520,18 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
487
520
  try:
488
521
  resolved_id = resolve_task_id_for_mcp(ctx.task_manager, task_id)
489
522
  except TaskNotFoundError as e:
490
- return {"error": str(e)}
523
+ return {"success": False, "error": str(e)}
491
524
  except ValueError as e:
492
- return {"error": str(e)}
525
+ return {"success": False, "error": str(e)}
493
526
 
494
527
  task = ctx.task_manager.get_task(resolved_id)
495
528
  if not task:
496
- return {"error": f"Task {task_id} not found"}
529
+ return {"success": False, "error": f"Task {task_id} not found"}
497
530
 
498
531
  # Check if already claimed by another session
499
532
  if task.assignee and task.assignee != session_id and not force:
500
533
  return {
534
+ "success": False,
501
535
  "error": "Task already claimed by another session",
502
536
  "claimed_by": task.assignee,
503
537
  "message": f"Task is already claimed by session '{task.assignee}'. Use force=True to override.",
@@ -510,7 +544,7 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
510
544
  status="in_progress",
511
545
  )
512
546
  if not updated:
513
- return {"error": f"Failed to claim task {task_id}"}
547
+ return {"success": False, "error": f"Failed to claim task {task_id}"}
514
548
 
515
549
  # Link task to session (best-effort, don't fail the claim if this fails)
516
550
  try: