gobby 0.2.8__py3-none-any.whl → 0.2.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (168) hide show
  1. gobby/__init__.py +1 -1
  2. gobby/adapters/__init__.py +6 -0
  3. gobby/adapters/base.py +11 -2
  4. gobby/adapters/claude_code.py +5 -28
  5. gobby/adapters/codex_impl/adapter.py +38 -43
  6. gobby/adapters/copilot.py +324 -0
  7. gobby/adapters/cursor.py +373 -0
  8. gobby/adapters/gemini.py +2 -26
  9. gobby/adapters/windsurf.py +359 -0
  10. gobby/agents/definitions.py +162 -2
  11. gobby/agents/isolation.py +33 -1
  12. gobby/agents/pty_reader.py +192 -0
  13. gobby/agents/registry.py +10 -1
  14. gobby/agents/runner.py +24 -8
  15. gobby/agents/sandbox.py +8 -3
  16. gobby/agents/session.py +4 -0
  17. gobby/agents/spawn.py +9 -2
  18. gobby/agents/spawn_executor.py +49 -61
  19. gobby/agents/spawners/command_builder.py +4 -4
  20. gobby/app_context.py +64 -0
  21. gobby/cli/__init__.py +4 -0
  22. gobby/cli/install.py +259 -4
  23. gobby/cli/installers/__init__.py +12 -0
  24. gobby/cli/installers/copilot.py +242 -0
  25. gobby/cli/installers/cursor.py +244 -0
  26. gobby/cli/installers/shared.py +3 -0
  27. gobby/cli/installers/windsurf.py +242 -0
  28. gobby/cli/pipelines.py +639 -0
  29. gobby/cli/sessions.py +3 -1
  30. gobby/cli/skills.py +209 -0
  31. gobby/cli/tasks/crud.py +6 -5
  32. gobby/cli/tasks/search.py +1 -1
  33. gobby/cli/ui.py +116 -0
  34. gobby/cli/utils.py +5 -17
  35. gobby/cli/workflows.py +38 -17
  36. gobby/config/app.py +5 -0
  37. gobby/config/features.py +0 -20
  38. gobby/config/skills.py +23 -2
  39. gobby/config/tasks.py +4 -0
  40. gobby/hooks/broadcaster.py +9 -0
  41. gobby/hooks/event_handlers/__init__.py +155 -0
  42. gobby/hooks/event_handlers/_agent.py +175 -0
  43. gobby/hooks/event_handlers/_base.py +92 -0
  44. gobby/hooks/event_handlers/_misc.py +66 -0
  45. gobby/hooks/event_handlers/_session.py +487 -0
  46. gobby/hooks/event_handlers/_tool.py +196 -0
  47. gobby/hooks/events.py +48 -0
  48. gobby/hooks/hook_manager.py +27 -3
  49. gobby/install/copilot/hooks/hook_dispatcher.py +203 -0
  50. gobby/install/cursor/hooks/hook_dispatcher.py +203 -0
  51. gobby/install/gemini/hooks/hook_dispatcher.py +8 -0
  52. gobby/install/windsurf/hooks/hook_dispatcher.py +205 -0
  53. gobby/llm/__init__.py +14 -1
  54. gobby/llm/claude.py +594 -43
  55. gobby/llm/service.py +149 -0
  56. gobby/mcp_proxy/importer.py +4 -41
  57. gobby/mcp_proxy/instructions.py +9 -27
  58. gobby/mcp_proxy/manager.py +13 -3
  59. gobby/mcp_proxy/models.py +1 -0
  60. gobby/mcp_proxy/registries.py +66 -5
  61. gobby/mcp_proxy/server.py +6 -2
  62. gobby/mcp_proxy/services/recommendation.py +2 -28
  63. gobby/mcp_proxy/services/tool_filter.py +7 -0
  64. gobby/mcp_proxy/services/tool_proxy.py +19 -1
  65. gobby/mcp_proxy/stdio.py +37 -21
  66. gobby/mcp_proxy/tools/agents.py +7 -0
  67. gobby/mcp_proxy/tools/artifacts.py +3 -3
  68. gobby/mcp_proxy/tools/hub.py +30 -1
  69. gobby/mcp_proxy/tools/orchestration/cleanup.py +5 -5
  70. gobby/mcp_proxy/tools/orchestration/monitor.py +1 -1
  71. gobby/mcp_proxy/tools/orchestration/orchestrate.py +8 -3
  72. gobby/mcp_proxy/tools/orchestration/review.py +17 -4
  73. gobby/mcp_proxy/tools/orchestration/wait.py +7 -7
  74. gobby/mcp_proxy/tools/pipelines/__init__.py +254 -0
  75. gobby/mcp_proxy/tools/pipelines/_discovery.py +67 -0
  76. gobby/mcp_proxy/tools/pipelines/_execution.py +281 -0
  77. gobby/mcp_proxy/tools/sessions/_crud.py +4 -4
  78. gobby/mcp_proxy/tools/sessions/_handoff.py +1 -1
  79. gobby/mcp_proxy/tools/skills/__init__.py +184 -30
  80. gobby/mcp_proxy/tools/spawn_agent.py +229 -14
  81. gobby/mcp_proxy/tools/task_readiness.py +27 -4
  82. gobby/mcp_proxy/tools/tasks/_context.py +8 -0
  83. gobby/mcp_proxy/tools/tasks/_crud.py +27 -1
  84. gobby/mcp_proxy/tools/tasks/_helpers.py +1 -1
  85. gobby/mcp_proxy/tools/tasks/_lifecycle.py +125 -8
  86. gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +2 -1
  87. gobby/mcp_proxy/tools/tasks/_search.py +1 -1
  88. gobby/mcp_proxy/tools/workflows/__init__.py +273 -0
  89. gobby/mcp_proxy/tools/workflows/_artifacts.py +225 -0
  90. gobby/mcp_proxy/tools/workflows/_import.py +112 -0
  91. gobby/mcp_proxy/tools/workflows/_lifecycle.py +332 -0
  92. gobby/mcp_proxy/tools/workflows/_query.py +226 -0
  93. gobby/mcp_proxy/tools/workflows/_resolution.py +78 -0
  94. gobby/mcp_proxy/tools/workflows/_terminal.py +175 -0
  95. gobby/mcp_proxy/tools/worktrees.py +54 -15
  96. gobby/memory/components/__init__.py +0 -0
  97. gobby/memory/components/ingestion.py +98 -0
  98. gobby/memory/components/search.py +108 -0
  99. gobby/memory/context.py +5 -5
  100. gobby/memory/manager.py +16 -25
  101. gobby/paths.py +51 -0
  102. gobby/prompts/loader.py +1 -35
  103. gobby/runner.py +131 -16
  104. gobby/servers/http.py +193 -150
  105. gobby/servers/routes/__init__.py +2 -0
  106. gobby/servers/routes/admin.py +56 -0
  107. gobby/servers/routes/mcp/endpoints/execution.py +33 -32
  108. gobby/servers/routes/mcp/endpoints/registry.py +8 -8
  109. gobby/servers/routes/mcp/hooks.py +10 -1
  110. gobby/servers/routes/pipelines.py +227 -0
  111. gobby/servers/websocket.py +314 -1
  112. gobby/sessions/analyzer.py +89 -3
  113. gobby/sessions/manager.py +5 -5
  114. gobby/sessions/transcripts/__init__.py +3 -0
  115. gobby/sessions/transcripts/claude.py +5 -0
  116. gobby/sessions/transcripts/codex.py +5 -0
  117. gobby/sessions/transcripts/gemini.py +5 -0
  118. gobby/skills/hubs/__init__.py +25 -0
  119. gobby/skills/hubs/base.py +234 -0
  120. gobby/skills/hubs/claude_plugins.py +328 -0
  121. gobby/skills/hubs/clawdhub.py +289 -0
  122. gobby/skills/hubs/github_collection.py +465 -0
  123. gobby/skills/hubs/manager.py +263 -0
  124. gobby/skills/hubs/skillhub.py +342 -0
  125. gobby/skills/parser.py +23 -0
  126. gobby/skills/sync.py +5 -4
  127. gobby/storage/artifacts.py +19 -0
  128. gobby/storage/memories.py +4 -4
  129. gobby/storage/migrations.py +118 -3
  130. gobby/storage/pipelines.py +367 -0
  131. gobby/storage/sessions.py +23 -4
  132. gobby/storage/skills.py +48 -8
  133. gobby/storage/tasks/_aggregates.py +2 -2
  134. gobby/storage/tasks/_lifecycle.py +4 -4
  135. gobby/storage/tasks/_models.py +7 -1
  136. gobby/storage/tasks/_queries.py +3 -3
  137. gobby/sync/memories.py +4 -3
  138. gobby/tasks/commits.py +48 -17
  139. gobby/tasks/external_validator.py +4 -17
  140. gobby/tasks/validation.py +13 -87
  141. gobby/tools/summarizer.py +18 -51
  142. gobby/utils/status.py +13 -0
  143. gobby/workflows/actions.py +80 -0
  144. gobby/workflows/context_actions.py +265 -27
  145. gobby/workflows/definitions.py +119 -1
  146. gobby/workflows/detection_helpers.py +23 -11
  147. gobby/workflows/enforcement/__init__.py +11 -1
  148. gobby/workflows/enforcement/blocking.py +96 -0
  149. gobby/workflows/enforcement/handlers.py +35 -1
  150. gobby/workflows/enforcement/task_policy.py +18 -0
  151. gobby/workflows/engine.py +26 -4
  152. gobby/workflows/evaluator.py +8 -5
  153. gobby/workflows/lifecycle_evaluator.py +59 -27
  154. gobby/workflows/loader.py +567 -30
  155. gobby/workflows/lobster_compat.py +147 -0
  156. gobby/workflows/pipeline_executor.py +801 -0
  157. gobby/workflows/pipeline_state.py +172 -0
  158. gobby/workflows/pipeline_webhooks.py +206 -0
  159. gobby/workflows/premature_stop.py +5 -0
  160. gobby/worktrees/git.py +135 -20
  161. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/METADATA +56 -22
  162. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/RECORD +166 -122
  163. gobby/hooks/event_handlers.py +0 -1008
  164. gobby/mcp_proxy/tools/workflows.py +0 -1023
  165. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/WHEEL +0 -0
  166. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/entry_points.txt +0 -0
  167. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/licenses/LICENSE.md +0 -0
  168. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,226 @@
1
+ """
2
+ Query tools for workflows.
3
+ """
4
+
5
+ import logging
6
+ from pathlib import Path
7
+ from typing import Any
8
+
9
+ import yaml
10
+
11
+ from gobby.mcp_proxy.tools.workflows._resolution import resolve_session_id
12
+ from gobby.storage.sessions import LocalSessionManager
13
+ from gobby.utils.project_context import get_workflow_project_path
14
+ from gobby.workflows.definitions import WorkflowDefinition
15
+ from gobby.workflows.loader import WorkflowLoader
16
+ from gobby.workflows.state_manager import WorkflowStateManager
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ def get_workflow(
22
+ loader: WorkflowLoader,
23
+ name: str,
24
+ project_path: str | None = None,
25
+ ) -> dict[str, Any]:
26
+ """
27
+ Get workflow details including steps, triggers, and settings.
28
+
29
+ Args:
30
+ loader: WorkflowLoader instance
31
+ name: Workflow name (without .yaml extension)
32
+ project_path: Project directory path. Auto-discovered from cwd if not provided.
33
+
34
+ Returns:
35
+ Workflow definition details
36
+ """
37
+ # Auto-discover project path if not provided
38
+ if not project_path:
39
+ discovered = get_workflow_project_path()
40
+ if discovered:
41
+ project_path = str(discovered)
42
+
43
+ proj = Path(project_path) if project_path else None
44
+ definition = loader.load_workflow(name, proj)
45
+
46
+ if not definition:
47
+ return {"success": False, "error": f"Workflow '{name}' not found"}
48
+
49
+ # Handle WorkflowDefinition vs PipelineDefinition
50
+ if isinstance(definition, WorkflowDefinition):
51
+ return {
52
+ "success": True,
53
+ "name": definition.name,
54
+ "type": definition.type,
55
+ "description": definition.description,
56
+ "version": definition.version,
57
+ "steps": (
58
+ [
59
+ {
60
+ "name": s.name,
61
+ "description": s.description,
62
+ "allowed_tools": s.allowed_tools,
63
+ "blocked_tools": s.blocked_tools,
64
+ }
65
+ for s in definition.steps
66
+ ]
67
+ if definition.steps
68
+ else []
69
+ ),
70
+ "triggers": (
71
+ {name: len(actions) for name, actions in definition.triggers.items()}
72
+ if definition.triggers
73
+ else {}
74
+ ),
75
+ "settings": definition.settings,
76
+ }
77
+ else:
78
+ # PipelineDefinition
79
+ return {
80
+ "success": True,
81
+ "name": definition.name,
82
+ "type": definition.type,
83
+ "description": definition.description,
84
+ "version": definition.version,
85
+ "steps": (
86
+ [{"id": s.id, "exec": s.exec, "prompt": s.prompt} for s in definition.steps]
87
+ if definition.steps
88
+ else []
89
+ ),
90
+ "triggers": {},
91
+ "settings": {},
92
+ }
93
+
94
+
95
+ def list_workflows(
96
+ loader: WorkflowLoader,
97
+ project_path: str | None = None,
98
+ workflow_type: str | None = None,
99
+ global_only: bool = False,
100
+ ) -> dict[str, Any]:
101
+ """
102
+ List available workflows.
103
+
104
+ Lists workflows from both project (.gobby/workflows) and global (~/.gobby/workflows)
105
+ directories. Project workflows shadow global ones with the same name.
106
+
107
+ Args:
108
+ loader: WorkflowLoader instance
109
+ project_path: Project directory path. Auto-discovered from cwd if not provided.
110
+ workflow_type: Filter by type ("step" or "lifecycle")
111
+ global_only: If True, only show global workflows (ignore project)
112
+
113
+ Returns:
114
+ List of workflows with name, type, description, and source
115
+ """
116
+
117
+ # Auto-discover project path if not provided
118
+ if not project_path:
119
+ discovered = get_workflow_project_path()
120
+ if discovered:
121
+ project_path = str(discovered)
122
+
123
+ search_dirs = list(loader.global_dirs)
124
+ proj = Path(project_path) if project_path else None
125
+
126
+ # Include project workflows unless global_only (project searched first to shadow global)
127
+ if not global_only and proj:
128
+ project_dir = proj / ".gobby" / "workflows"
129
+ if project_dir.exists():
130
+ search_dirs.insert(0, project_dir)
131
+
132
+ workflows = []
133
+ seen_names = set()
134
+
135
+ for search_dir in search_dirs:
136
+ if not search_dir.exists():
137
+ continue
138
+
139
+ is_project = proj and search_dir == (proj / ".gobby" / "workflows")
140
+
141
+ for yaml_path in search_dir.glob("*.yaml"):
142
+ name = yaml_path.stem
143
+ if name in seen_names:
144
+ continue
145
+
146
+ try:
147
+ with open(yaml_path, encoding="utf-8") as f:
148
+ data = yaml.safe_load(f)
149
+
150
+ if not data:
151
+ continue
152
+
153
+ wf_type = data.get("type", "step")
154
+
155
+ if workflow_type and wf_type != workflow_type:
156
+ continue
157
+
158
+ workflows.append(
159
+ {
160
+ "name": name,
161
+ "type": wf_type,
162
+ "description": data.get("description", ""),
163
+ "source": "project" if is_project else "global",
164
+ }
165
+ )
166
+ seen_names.add(name)
167
+
168
+ except Exception as e:
169
+ logger.debug(
170
+ "Skipping invalid workflow file %s: %s",
171
+ yaml_path,
172
+ e,
173
+ exc_info=True,
174
+ ) # nosec B110
175
+
176
+ return {"workflows": workflows, "count": len(workflows)}
177
+
178
+
179
+ def get_workflow_status(
180
+ state_manager: WorkflowStateManager,
181
+ session_manager: LocalSessionManager,
182
+ session_id: str | None = None,
183
+ ) -> dict[str, Any]:
184
+ """
185
+ Get current workflow step and state.
186
+
187
+ Args:
188
+ state_manager: WorkflowStateManager instance
189
+ session_manager: LocalSessionManager instance
190
+ session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
191
+
192
+ Returns:
193
+ Workflow state including step, action counts, artifacts
194
+ """
195
+ # Require explicit session_id to prevent cross-session bleed
196
+ if not session_id:
197
+ return {
198
+ "has_workflow": False,
199
+ "error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
200
+ }
201
+
202
+ # Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
203
+ try:
204
+ resolved_session_id = resolve_session_id(session_manager, session_id)
205
+ except ValueError as e:
206
+ return {"has_workflow": False, "error": str(e)}
207
+
208
+ state = state_manager.get_state(resolved_session_id)
209
+ if not state:
210
+ return {"has_workflow": False, "session_id": resolved_session_id}
211
+
212
+ return {
213
+ "has_workflow": True,
214
+ "session_id": resolved_session_id,
215
+ "workflow_name": state.workflow_name,
216
+ "step": state.step,
217
+ "step_action_count": state.step_action_count,
218
+ "total_action_count": state.total_action_count,
219
+ "reflection_pending": state.reflection_pending,
220
+ "artifacts": list(state.artifacts.keys()) if state.artifacts else [],
221
+ "variables": state.variables,
222
+ "task_progress": (
223
+ f"{state.current_task_index + 1}/{len(state.task_list)}" if state.task_list else None
224
+ ),
225
+ "updated_at": state.updated_at.isoformat() if state.updated_at else None,
226
+ }
@@ -0,0 +1,78 @@
1
+ """
2
+ Resolution utilities for workflow tools.
3
+
4
+ Provides functions to resolve session and task references from various
5
+ formats (#N, N, UUID, prefix) to canonical UUIDs.
6
+ """
7
+
8
+ import logging
9
+ from typing import Any
10
+
11
+ from gobby.storage.database import DatabaseProtocol
12
+ from gobby.storage.sessions import LocalSessionManager
13
+ from gobby.storage.tasks._id import resolve_task_reference
14
+ from gobby.storage.tasks._models import TaskNotFoundError
15
+ from gobby.utils.project_context import get_project_context
16
+
17
+ logger = logging.getLogger(__name__)
18
+
19
+
20
+ def resolve_session_id(session_manager: LocalSessionManager, ref: str) -> str:
21
+ """Resolve session reference (#N, N, UUID, or prefix) to UUID."""
22
+ project_ctx = get_project_context()
23
+ project_id = project_ctx.get("id") if project_ctx else None
24
+ return session_manager.resolve_session_reference(ref, project_id)
25
+
26
+
27
+ def resolve_session_task_value(
28
+ value: Any,
29
+ session_id: str | None,
30
+ session_manager: LocalSessionManager,
31
+ db: DatabaseProtocol,
32
+ ) -> Any:
33
+ """
34
+ Resolve a session_task value from seq_num reference (#N or N) to UUID.
35
+
36
+ This prevents repeated resolution failures in condition evaluation when
37
+ task_tree_complete() is called with a seq_num that requires project_id.
38
+
39
+ Args:
40
+ value: The value to potentially resolve (e.g., "#4424", "47", or a UUID)
41
+ session_id: Session ID to look up project_id
42
+ session_manager: Session manager for lookups
43
+ db: Database for task resolution
44
+
45
+ Returns:
46
+ Resolved UUID if value was a seq_num reference, otherwise original value
47
+ """
48
+ # Only process string values that look like seq_num references
49
+ if not isinstance(value, str):
50
+ return value
51
+
52
+ # Check if it's a seq_num reference (#N or plain N)
53
+ is_seq_ref = value.startswith("#") or value.isdigit()
54
+ if not is_seq_ref:
55
+ return value
56
+
57
+ # Need session to get project_id
58
+ if not session_id:
59
+ logger.warning(f"Cannot resolve task reference '{value}': no session_id provided")
60
+ return value
61
+
62
+ # Get project_id from session
63
+ session = session_manager.get(session_id)
64
+ if not session or not session.project_id:
65
+ logger.warning(f"Cannot resolve task reference '{value}': session has no project_id")
66
+ return value
67
+
68
+ # Resolve the reference
69
+ try:
70
+ resolved = resolve_task_reference(db, value, session.project_id)
71
+ logger.debug(f"Resolved session_task '{value}' to UUID '{resolved}'")
72
+ return resolved
73
+ except TaskNotFoundError as e:
74
+ logger.warning(f"Could not resolve task reference '{value}': {e}")
75
+ return value
76
+ except Exception as e:
77
+ logger.warning(f"Unexpected error resolving task reference '{value}': {e}")
78
+ return value
@@ -0,0 +1,175 @@
1
+ """
2
+ Terminal tools for workflows.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ import asyncio
8
+ import logging
9
+ import os
10
+ import stat
11
+ import subprocess # nosec B404
12
+ from pathlib import Path
13
+ from typing import TYPE_CHECKING, Any
14
+
15
+ from gobby.paths import get_install_dir
16
+
17
+ if TYPE_CHECKING:
18
+ from gobby.storage.sessions import LocalSessionManager
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+
23
+ async def close_terminal(
24
+ session_id: str | None = None,
25
+ session_manager: LocalSessionManager | None = None,
26
+ signal: str = "TERM",
27
+ delay_ms: int = 0,
28
+ ) -> dict[str, Any]:
29
+ """
30
+ Close the current terminal by running the agent shutdown script.
31
+
32
+ This is for agent self-termination (meeseeks-style). The agent calls
33
+ this to close its own terminal window when done with its workflow.
34
+
35
+ The script is located at ~/.gobby/scripts/agent_shutdown.sh and is
36
+ automatically rebuilt if missing. It handles different terminal types
37
+ (tmux, iTerm, Terminal.app, Ghostty, Kitty, WezTerm, etc.).
38
+
39
+ Args:
40
+ session_id: Session ID to look up terminal PID from. Accepts #N, N,
41
+ UUID, or prefix. If provided with session_manager, the terminal
42
+ PID is resolved from session.terminal_context.parent_pid.
43
+ session_manager: LocalSessionManager for session lookups.
44
+ signal: Signal to use for shutdown (TERM, KILL, INT). Default: TERM.
45
+ delay_ms: Optional delay in milliseconds before shutdown. Default: 0.
46
+
47
+ Returns:
48
+ Dict with success status and message.
49
+ """
50
+ # Script location
51
+ gobby_dir = Path.home() / ".gobby"
52
+ scripts_dir = gobby_dir / "scripts"
53
+ script_path = scripts_dir / "agent_shutdown.sh"
54
+
55
+ # Source script from the install directory (single source of truth)
56
+ source_script_path = get_install_dir() / "shared" / "scripts" / "agent_shutdown.sh"
57
+
58
+ def get_script_version(script_content: str) -> str | None:
59
+ """Extract VERSION marker from script content."""
60
+ import re
61
+
62
+ match = re.search(r"^# VERSION:\s*(.+)$", script_content, re.MULTILINE)
63
+ return match.group(1).strip() if match else None
64
+
65
+ # Ensure directories exist and script is present/up-to-date
66
+ script_rebuilt = False
67
+ try:
68
+ scripts_dir.mkdir(parents=True, exist_ok=True)
69
+
70
+ # Read source script content
71
+ if source_script_path.exists():
72
+ source_content = source_script_path.read_text()
73
+ source_version = get_script_version(source_content)
74
+ else:
75
+ logger.warning(f"Source shutdown script not found at {source_script_path}")
76
+ source_content = None
77
+ source_version = None
78
+
79
+ # Check if installed script exists and compare versions
80
+ needs_rebuild = False
81
+ if not script_path.exists():
82
+ needs_rebuild = True
83
+ elif source_content:
84
+ installed_content = script_path.read_text()
85
+ installed_version = get_script_version(installed_content)
86
+ # Rebuild if versions differ or installed has no version marker
87
+ if installed_version != source_version:
88
+ needs_rebuild = True
89
+ logger.info(
90
+ f"Shutdown script version mismatch: installed={installed_version}, source={source_version}"
91
+ )
92
+
93
+ if needs_rebuild:
94
+ if not source_content:
95
+ logger.error(
96
+ f"Cannot rebuild shutdown script at {script_path}: "
97
+ f"source script not found at {source_script_path}"
98
+ )
99
+ return {
100
+ "success": False,
101
+ "error": f"Source shutdown script not found at {source_script_path}",
102
+ }
103
+ script_path.write_text(source_content)
104
+ # Make executable
105
+ script_path.chmod(script_path.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP)
106
+ script_rebuilt = True
107
+ logger.info(f"Created/updated agent shutdown script at {script_path}")
108
+ except OSError as e:
109
+ return {
110
+ "success": False,
111
+ "error": f"Failed to create shutdown script: {e}",
112
+ }
113
+
114
+ # Validate signal
115
+ valid_signals = {"TERM", "KILL", "INT", "HUP", "QUIT"}
116
+ if signal.upper() not in valid_signals:
117
+ return {
118
+ "success": False,
119
+ "error": f"Invalid signal '{signal}'. Valid: {valid_signals}",
120
+ }
121
+
122
+ # Apply delay before launching script (non-blocking)
123
+ if delay_ms > 0:
124
+ await asyncio.sleep(delay_ms / 1000.0)
125
+
126
+ # Resolve terminal PID from session context if available
127
+ target_pid: int | None = None
128
+ if session_id and session_manager:
129
+ from gobby.mcp_proxy.tools.workflows._resolution import resolve_session_id
130
+
131
+ try:
132
+ resolved_id = resolve_session_id(session_manager, session_id)
133
+ if resolved_id:
134
+ session = session_manager.get(resolved_id)
135
+ if session and session.terminal_context:
136
+ pid_value = session.terminal_context.get("parent_pid")
137
+ if pid_value is not None:
138
+ target_pid = int(pid_value)
139
+ logger.info(f"Resolved terminal PID {target_pid} from session {session_id}")
140
+ except Exception as e:
141
+ logger.warning(f"Failed to resolve terminal PID from session: {e}")
142
+
143
+ # Launch the script
144
+ try:
145
+ # Run in background - we don't wait for it since it kills our process
146
+ env = os.environ.copy()
147
+
148
+ # Pass PID as first arg if available, otherwise script will discover via PPID
149
+ pid_arg = str(target_pid) if target_pid else ""
150
+ subprocess.Popen( # nosec B603 - script path is from gobby scripts directory
151
+ [str(script_path), pid_arg, signal.upper(), "0"], # Delay already applied
152
+ env=env,
153
+ start_new_session=True, # Detach from parent
154
+ stdout=subprocess.DEVNULL,
155
+ stderr=subprocess.DEVNULL,
156
+ )
157
+
158
+ result: dict[str, Any] = {
159
+ "success": True,
160
+ "message": "Shutdown script launched",
161
+ "script_path": str(script_path),
162
+ "script_rebuilt": script_rebuilt,
163
+ "signal": signal.upper(),
164
+ }
165
+ if target_pid:
166
+ result["target_pid"] = target_pid
167
+ result["pid_source"] = "session_terminal_context"
168
+ else:
169
+ result["pid_source"] = "ppid_discovery"
170
+ return result
171
+ except OSError as e:
172
+ return {
173
+ "success": False,
174
+ "error": f"Failed to launch shutdown script: {e}",
175
+ }
@@ -169,14 +169,15 @@ def _copy_project_json_to_worktree(
169
169
 
170
170
 
171
171
  def _install_provider_hooks(
172
- provider: Literal["claude", "gemini", "codex", "antigravity"] | None,
172
+ provider: Literal["claude", "gemini", "codex", "antigravity", "cursor", "windsurf", "copilot"]
173
+ | None,
173
174
  worktree_path: str | Path,
174
175
  ) -> bool:
175
176
  """
176
177
  Install CLI hooks for the specified provider in the worktree.
177
178
 
178
179
  Args:
179
- provider: Provider name ('claude', 'gemini', 'antigravity', or None)
180
+ provider: Provider name ('claude', 'gemini', 'antigravity', 'cursor', 'windsurf', 'copilot', or None)
180
181
  worktree_path: Path to worktree directory
181
182
 
182
183
  Returns:
@@ -196,6 +197,16 @@ def _install_provider_hooks(
196
197
  return True
197
198
  else:
198
199
  logger.warning(f"Failed to install Claude hooks: {result.get('error')}")
200
+ elif provider in ("cursor", "windsurf", "copilot"):
201
+ # These editors use Claude hooks format
202
+ from gobby.cli.installers.claude import install_claude
203
+
204
+ result = install_claude(worktree_path_obj)
205
+ if result["success"]:
206
+ logger.info(f"Installed {provider} hooks in worktree: {worktree_path}")
207
+ return True
208
+ else:
209
+ logger.warning(f"Failed to install {provider} hooks: {result.get('error')}")
199
210
  elif provider == "gemini":
200
211
  from gobby.cli.installers.gemini import install_gemini
201
212
 
@@ -326,7 +337,10 @@ def create_worktrees_registry(
326
337
  worktree_path: str | None = None,
327
338
  create_branch: bool = True,
328
339
  project_path: str | None = None,
329
- provider: Literal["claude", "gemini", "codex", "antigravity"] | None = None,
340
+ provider: Literal[
341
+ "claude", "gemini", "codex", "antigravity", "cursor", "windsurf", "copilot"
342
+ ]
343
+ | None = None,
330
344
  ) -> dict[str, Any]:
331
345
  """
332
346
  Create a new git worktree.
@@ -338,7 +352,7 @@ def create_worktrees_registry(
338
352
  worktree_path: Optional custom path (defaults to ../{branch_name}).
339
353
  create_branch: Whether to create a new branch (default: True).
340
354
  project_path: Path to project directory (pass cwd from CLI).
341
- provider: CLI provider to install hooks for (claude, gemini, codex, antigravity).
355
+ provider: CLI provider to install hooks for (claude, gemini, codex, antigravity, cursor, windsurf, copilot).
342
356
  If specified, installs hooks so agents can communicate with daemon.
343
357
 
344
358
  Returns:
@@ -451,7 +465,7 @@ def create_worktrees_registry(
451
465
  async def list_worktrees(
452
466
  status: str | None = None,
453
467
  agent_session_id: str | None = None,
454
- limit: int = 50,
468
+ limit: int | str = 50,
455
469
  ) -> dict[str, Any]:
456
470
  """
457
471
  List worktrees with optional filters.
@@ -464,6 +478,9 @@ def create_worktrees_registry(
464
478
  Returns:
465
479
  Dict with list of worktrees.
466
480
  """
481
+ # Handle string inputs from MCP
482
+ limit = int(limit) if isinstance(limit, str) else limit
483
+
467
484
  # Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
468
485
  resolved_session_id = agent_session_id
469
486
  if agent_session_id:
@@ -569,7 +586,7 @@ def create_worktrees_registry(
569
586
  )
570
587
  async def delete_worktree(
571
588
  worktree_id: str,
572
- force: bool = False,
589
+ force: bool | str = False,
573
590
  project_path: str | None = None,
574
591
  ) -> dict[str, Any]:
575
592
  """
@@ -591,6 +608,9 @@ def create_worktrees_registry(
591
608
  Returns:
592
609
  Dict with success status.
593
610
  """
611
+ # Handle string inputs from MCP
612
+ force = force in (True, "true", "True", "1") if isinstance(force, str) else force
613
+
594
614
  worktree = worktree_storage.get(worktree_id)
595
615
 
596
616
  if not worktree:
@@ -611,8 +631,11 @@ def create_worktrees_registry(
611
631
  # nosec B110 - if context resolution fails, continue without git manager
612
632
  pass
613
633
 
614
- # Check for uncommitted changes if not forcing
615
- if resolved_git_mgr and Path(worktree.worktree_path).exists():
634
+ # Check if worktree path exists
635
+ worktree_exists = Path(worktree.worktree_path).exists()
636
+
637
+ # Check for uncommitted changes if not forcing (only if path exists)
638
+ if resolved_git_mgr and worktree_exists:
616
639
  status = resolved_git_mgr.get_worktree_status(worktree.worktree_path)
617
640
  if status and status.has_uncommitted_changes and not force:
618
641
  return {
@@ -621,8 +644,8 @@ def create_worktrees_registry(
621
644
  "uncommitted_changes": True,
622
645
  }
623
646
 
624
- # Delete git worktree
625
- if resolved_git_mgr:
647
+ # Delete git worktree (only if path exists - handles orphaned DB records)
648
+ if resolved_git_mgr and worktree_exists:
626
649
  result = resolved_git_mgr.delete_worktree(
627
650
  worktree.worktree_path,
628
651
  force=force,
@@ -634,6 +657,11 @@ def create_worktrees_registry(
634
657
  "success": False,
635
658
  "error": result.error or "Failed to delete git worktree",
636
659
  }
660
+ elif not worktree_exists:
661
+ # Worktree path gone (manually deleted) - just clean up DB record
662
+ logger.info(
663
+ f"Worktree path {worktree.worktree_path} doesn't exist, cleaning up DB record only"
664
+ )
637
665
 
638
666
  # Delete database record
639
667
  deleted = worktree_storage.delete(worktree_id)
@@ -744,8 +772,8 @@ def create_worktrees_registry(
744
772
  )
745
773
  async def detect_stale_worktrees(
746
774
  project_path: str | None = None,
747
- hours: int = 24,
748
- limit: int = 50,
775
+ hours: int | str = 24,
776
+ limit: int | str = 50,
749
777
  ) -> dict[str, Any]:
750
778
  """
751
779
  Find stale worktrees (no activity for N hours).
@@ -758,6 +786,10 @@ def create_worktrees_registry(
758
786
  Returns:
759
787
  Dict with list of stale worktrees.
760
788
  """
789
+ # Handle string inputs from MCP (JSON params come as strings)
790
+ hours = int(hours) if isinstance(hours, str) else hours
791
+ limit = int(limit) if isinstance(limit, str) else limit
792
+
761
793
  _, resolved_project_id, error = _resolve_project_context(
762
794
  project_path, git_manager, project_id
763
795
  )
@@ -794,9 +826,9 @@ def create_worktrees_registry(
794
826
  )
795
827
  async def cleanup_stale_worktrees(
796
828
  project_path: str | None = None,
797
- hours: int = 24,
798
- dry_run: bool = True,
799
- delete_git: bool = False,
829
+ hours: int | str = 24,
830
+ dry_run: bool | str = True,
831
+ delete_git: bool | str = False,
800
832
  ) -> dict[str, Any]:
801
833
  """
802
834
  Cleanup stale worktrees.
@@ -810,6 +842,13 @@ def create_worktrees_registry(
810
842
  Returns:
811
843
  Dict with cleanup results.
812
844
  """
845
+ # Handle string inputs from MCP (JSON params come as strings)
846
+ hours = int(hours) if isinstance(hours, str) else hours
847
+ dry_run = dry_run in (True, "true", "True", "1") if isinstance(dry_run, str) else dry_run
848
+ delete_git = (
849
+ delete_git in (True, "true", "True", "1") if isinstance(delete_git, str) else delete_git
850
+ )
851
+
813
852
  resolved_git_manager, resolved_project_id, error = _resolve_project_context(
814
853
  project_path, git_manager, project_id
815
854
  )
File without changes