gobby 0.2.8__py3-none-any.whl → 0.2.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (168) hide show
  1. gobby/__init__.py +1 -1
  2. gobby/adapters/__init__.py +6 -0
  3. gobby/adapters/base.py +11 -2
  4. gobby/adapters/claude_code.py +5 -28
  5. gobby/adapters/codex_impl/adapter.py +38 -43
  6. gobby/adapters/copilot.py +324 -0
  7. gobby/adapters/cursor.py +373 -0
  8. gobby/adapters/gemini.py +2 -26
  9. gobby/adapters/windsurf.py +359 -0
  10. gobby/agents/definitions.py +162 -2
  11. gobby/agents/isolation.py +33 -1
  12. gobby/agents/pty_reader.py +192 -0
  13. gobby/agents/registry.py +10 -1
  14. gobby/agents/runner.py +24 -8
  15. gobby/agents/sandbox.py +8 -3
  16. gobby/agents/session.py +4 -0
  17. gobby/agents/spawn.py +9 -2
  18. gobby/agents/spawn_executor.py +49 -61
  19. gobby/agents/spawners/command_builder.py +4 -4
  20. gobby/app_context.py +64 -0
  21. gobby/cli/__init__.py +4 -0
  22. gobby/cli/install.py +259 -4
  23. gobby/cli/installers/__init__.py +12 -0
  24. gobby/cli/installers/copilot.py +242 -0
  25. gobby/cli/installers/cursor.py +244 -0
  26. gobby/cli/installers/shared.py +3 -0
  27. gobby/cli/installers/windsurf.py +242 -0
  28. gobby/cli/pipelines.py +639 -0
  29. gobby/cli/sessions.py +3 -1
  30. gobby/cli/skills.py +209 -0
  31. gobby/cli/tasks/crud.py +6 -5
  32. gobby/cli/tasks/search.py +1 -1
  33. gobby/cli/ui.py +116 -0
  34. gobby/cli/utils.py +5 -17
  35. gobby/cli/workflows.py +38 -17
  36. gobby/config/app.py +5 -0
  37. gobby/config/features.py +0 -20
  38. gobby/config/skills.py +23 -2
  39. gobby/config/tasks.py +4 -0
  40. gobby/hooks/broadcaster.py +9 -0
  41. gobby/hooks/event_handlers/__init__.py +155 -0
  42. gobby/hooks/event_handlers/_agent.py +175 -0
  43. gobby/hooks/event_handlers/_base.py +92 -0
  44. gobby/hooks/event_handlers/_misc.py +66 -0
  45. gobby/hooks/event_handlers/_session.py +487 -0
  46. gobby/hooks/event_handlers/_tool.py +196 -0
  47. gobby/hooks/events.py +48 -0
  48. gobby/hooks/hook_manager.py +27 -3
  49. gobby/install/copilot/hooks/hook_dispatcher.py +203 -0
  50. gobby/install/cursor/hooks/hook_dispatcher.py +203 -0
  51. gobby/install/gemini/hooks/hook_dispatcher.py +8 -0
  52. gobby/install/windsurf/hooks/hook_dispatcher.py +205 -0
  53. gobby/llm/__init__.py +14 -1
  54. gobby/llm/claude.py +594 -43
  55. gobby/llm/service.py +149 -0
  56. gobby/mcp_proxy/importer.py +4 -41
  57. gobby/mcp_proxy/instructions.py +9 -27
  58. gobby/mcp_proxy/manager.py +13 -3
  59. gobby/mcp_proxy/models.py +1 -0
  60. gobby/mcp_proxy/registries.py +66 -5
  61. gobby/mcp_proxy/server.py +6 -2
  62. gobby/mcp_proxy/services/recommendation.py +2 -28
  63. gobby/mcp_proxy/services/tool_filter.py +7 -0
  64. gobby/mcp_proxy/services/tool_proxy.py +19 -1
  65. gobby/mcp_proxy/stdio.py +37 -21
  66. gobby/mcp_proxy/tools/agents.py +7 -0
  67. gobby/mcp_proxy/tools/artifacts.py +3 -3
  68. gobby/mcp_proxy/tools/hub.py +30 -1
  69. gobby/mcp_proxy/tools/orchestration/cleanup.py +5 -5
  70. gobby/mcp_proxy/tools/orchestration/monitor.py +1 -1
  71. gobby/mcp_proxy/tools/orchestration/orchestrate.py +8 -3
  72. gobby/mcp_proxy/tools/orchestration/review.py +17 -4
  73. gobby/mcp_proxy/tools/orchestration/wait.py +7 -7
  74. gobby/mcp_proxy/tools/pipelines/__init__.py +254 -0
  75. gobby/mcp_proxy/tools/pipelines/_discovery.py +67 -0
  76. gobby/mcp_proxy/tools/pipelines/_execution.py +281 -0
  77. gobby/mcp_proxy/tools/sessions/_crud.py +4 -4
  78. gobby/mcp_proxy/tools/sessions/_handoff.py +1 -1
  79. gobby/mcp_proxy/tools/skills/__init__.py +184 -30
  80. gobby/mcp_proxy/tools/spawn_agent.py +229 -14
  81. gobby/mcp_proxy/tools/task_readiness.py +27 -4
  82. gobby/mcp_proxy/tools/tasks/_context.py +8 -0
  83. gobby/mcp_proxy/tools/tasks/_crud.py +27 -1
  84. gobby/mcp_proxy/tools/tasks/_helpers.py +1 -1
  85. gobby/mcp_proxy/tools/tasks/_lifecycle.py +125 -8
  86. gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +2 -1
  87. gobby/mcp_proxy/tools/tasks/_search.py +1 -1
  88. gobby/mcp_proxy/tools/workflows/__init__.py +273 -0
  89. gobby/mcp_proxy/tools/workflows/_artifacts.py +225 -0
  90. gobby/mcp_proxy/tools/workflows/_import.py +112 -0
  91. gobby/mcp_proxy/tools/workflows/_lifecycle.py +332 -0
  92. gobby/mcp_proxy/tools/workflows/_query.py +226 -0
  93. gobby/mcp_proxy/tools/workflows/_resolution.py +78 -0
  94. gobby/mcp_proxy/tools/workflows/_terminal.py +175 -0
  95. gobby/mcp_proxy/tools/worktrees.py +54 -15
  96. gobby/memory/components/__init__.py +0 -0
  97. gobby/memory/components/ingestion.py +98 -0
  98. gobby/memory/components/search.py +108 -0
  99. gobby/memory/context.py +5 -5
  100. gobby/memory/manager.py +16 -25
  101. gobby/paths.py +51 -0
  102. gobby/prompts/loader.py +1 -35
  103. gobby/runner.py +131 -16
  104. gobby/servers/http.py +193 -150
  105. gobby/servers/routes/__init__.py +2 -0
  106. gobby/servers/routes/admin.py +56 -0
  107. gobby/servers/routes/mcp/endpoints/execution.py +33 -32
  108. gobby/servers/routes/mcp/endpoints/registry.py +8 -8
  109. gobby/servers/routes/mcp/hooks.py +10 -1
  110. gobby/servers/routes/pipelines.py +227 -0
  111. gobby/servers/websocket.py +314 -1
  112. gobby/sessions/analyzer.py +89 -3
  113. gobby/sessions/manager.py +5 -5
  114. gobby/sessions/transcripts/__init__.py +3 -0
  115. gobby/sessions/transcripts/claude.py +5 -0
  116. gobby/sessions/transcripts/codex.py +5 -0
  117. gobby/sessions/transcripts/gemini.py +5 -0
  118. gobby/skills/hubs/__init__.py +25 -0
  119. gobby/skills/hubs/base.py +234 -0
  120. gobby/skills/hubs/claude_plugins.py +328 -0
  121. gobby/skills/hubs/clawdhub.py +289 -0
  122. gobby/skills/hubs/github_collection.py +465 -0
  123. gobby/skills/hubs/manager.py +263 -0
  124. gobby/skills/hubs/skillhub.py +342 -0
  125. gobby/skills/parser.py +23 -0
  126. gobby/skills/sync.py +5 -4
  127. gobby/storage/artifacts.py +19 -0
  128. gobby/storage/memories.py +4 -4
  129. gobby/storage/migrations.py +118 -3
  130. gobby/storage/pipelines.py +367 -0
  131. gobby/storage/sessions.py +23 -4
  132. gobby/storage/skills.py +48 -8
  133. gobby/storage/tasks/_aggregates.py +2 -2
  134. gobby/storage/tasks/_lifecycle.py +4 -4
  135. gobby/storage/tasks/_models.py +7 -1
  136. gobby/storage/tasks/_queries.py +3 -3
  137. gobby/sync/memories.py +4 -3
  138. gobby/tasks/commits.py +48 -17
  139. gobby/tasks/external_validator.py +4 -17
  140. gobby/tasks/validation.py +13 -87
  141. gobby/tools/summarizer.py +18 -51
  142. gobby/utils/status.py +13 -0
  143. gobby/workflows/actions.py +80 -0
  144. gobby/workflows/context_actions.py +265 -27
  145. gobby/workflows/definitions.py +119 -1
  146. gobby/workflows/detection_helpers.py +23 -11
  147. gobby/workflows/enforcement/__init__.py +11 -1
  148. gobby/workflows/enforcement/blocking.py +96 -0
  149. gobby/workflows/enforcement/handlers.py +35 -1
  150. gobby/workflows/enforcement/task_policy.py +18 -0
  151. gobby/workflows/engine.py +26 -4
  152. gobby/workflows/evaluator.py +8 -5
  153. gobby/workflows/lifecycle_evaluator.py +59 -27
  154. gobby/workflows/loader.py +567 -30
  155. gobby/workflows/lobster_compat.py +147 -0
  156. gobby/workflows/pipeline_executor.py +801 -0
  157. gobby/workflows/pipeline_state.py +172 -0
  158. gobby/workflows/pipeline_webhooks.py +206 -0
  159. gobby/workflows/premature_stop.py +5 -0
  160. gobby/worktrees/git.py +135 -20
  161. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/METADATA +56 -22
  162. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/RECORD +166 -122
  163. gobby/hooks/event_handlers.py +0 -1008
  164. gobby/mcp_proxy/tools/workflows.py +0 -1023
  165. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/WHEEL +0 -0
  166. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/entry_points.txt +0 -0
  167. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/licenses/LICENSE.md +0 -0
  168. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/top_level.txt +0 -0
gobby/mcp_proxy/stdio.py CHANGED
@@ -116,6 +116,16 @@ class DaemonProxy:
116
116
  "validate_task",
117
117
  ):
118
118
  timeout = 300.0
119
+ # Wait tools: use the requested timeout plus a buffer
120
+ elif tool_name in (
121
+ "wait_for_task",
122
+ "wait_for_any_task",
123
+ "wait_for_all_tasks",
124
+ ):
125
+ # Extract timeout from arguments, default to 300s if not specified
126
+ arg_timeout = float((arguments or {}).get("timeout", 300.0))
127
+ # Add 30s buffer for HTTP overhead
128
+ timeout = arg_timeout + 30.0
119
129
 
120
130
  return await self._request(
121
131
  "POST",
@@ -242,13 +252,15 @@ class DaemonProxy:
242
252
  },
243
253
  )
244
254
 
245
- async def init_project(
246
- self, name: str | None = None, github_url: str | None = None
247
- ) -> dict[str, Any]:
248
- """Initialize a project - use 'gobby init' CLI command instead."""
255
+ async def init_project(self, name: str, project_path: str | None = None) -> dict[str, Any]:
256
+ """Initialize a new Gobby project.
257
+
258
+ Note: Project initialization requires CLI access and cannot be done
259
+ via the MCP proxy. Use 'gobby init' command instead.
260
+ """
249
261
  return {
250
262
  "success": False,
251
- "error": "init_project requires CLI access. Run 'gobby init' from your terminal.",
263
+ "error": "Project initialization requires CLI access. Use 'gobby init' command instead.",
252
264
  }
253
265
 
254
266
 
@@ -410,22 +422,6 @@ def register_proxy_tools(mcp: FastMCP, proxy: DaemonProxy) -> None:
410
422
  cwd=cwd,
411
423
  )
412
424
 
413
- @mcp.tool()
414
- async def init_project(
415
- name: str | None = None, github_url: str | None = None
416
- ) -> dict[str, Any]:
417
- """
418
- Initialize a new Gobby project in the current directory.
419
-
420
- Args:
421
- name: Optional project name (auto-detected from directory name if not provided)
422
- github_url: Optional GitHub URL (auto-detected from git remote if not provided)
423
-
424
- Returns:
425
- Dict with success status and project details
426
- """
427
- return await proxy.init_project(name, github_url)
428
-
429
425
  @mcp.tool()
430
426
  async def add_mcp_server(
431
427
  name: str,
@@ -503,6 +499,26 @@ def register_proxy_tools(mcp: FastMCP, proxy: DaemonProxy) -> None:
503
499
  query=query,
504
500
  )
505
501
 
502
+ @mcp.tool()
503
+ async def init_project(
504
+ name: str,
505
+ project_path: str | None = None,
506
+ ) -> dict[str, Any]:
507
+ """
508
+ Initialize a new Gobby project.
509
+
510
+ Note: Project initialization requires CLI access and cannot be done
511
+ via the MCP proxy. Use 'gobby init' command instead.
512
+
513
+ Args:
514
+ name: Project name
515
+ project_path: Path to project directory (optional)
516
+
517
+ Returns:
518
+ Result dict with error (CLI access required)
519
+ """
520
+ return await proxy.init_project(name, project_path)
521
+
506
522
 
507
523
  async def ensure_daemon_running() -> None:
508
524
  """Ensure the Gobby daemon is running and healthy."""
@@ -40,6 +40,9 @@ def create_agents_registry(
40
40
  git_manager: Any | None = None,
41
41
  clone_storage: Any | None = None,
42
42
  clone_manager: Any | None = None,
43
+ # For mode=self (workflow activation on caller session)
44
+ workflow_loader: Any | None = None,
45
+ db: Any | None = None,
43
46
  ) -> InternalToolRegistry:
44
47
  """
45
48
  Create an agent tool registry with all agent-related tools.
@@ -430,6 +433,10 @@ def create_agents_registry(
430
433
  clone_storage=clone_storage,
431
434
  clone_manager=clone_manager,
432
435
  session_manager=session_manager,
436
+ workflow_loader=workflow_loader,
437
+ # For mode=self (workflow activation on caller session)
438
+ state_manager=workflow_state_manager,
439
+ db=db,
433
440
  )
434
441
 
435
442
  # Merge spawn_agent tools into agents registry
@@ -19,11 +19,11 @@ from gobby.mcp_proxy.tools.internal import InternalToolRegistry
19
19
 
20
20
  if TYPE_CHECKING:
21
21
  from gobby.storage.artifacts import LocalArtifactManager
22
- from gobby.storage.database import LocalDatabase
22
+ from gobby.storage.database import DatabaseProtocol
23
23
 
24
24
 
25
25
  def create_artifacts_registry(
26
- db: LocalDatabase | None = None,
26
+ db: DatabaseProtocol | None = None,
27
27
  artifact_manager: LocalArtifactManager | None = None,
28
28
  session_manager: Any | None = None,
29
29
  ) -> InternalToolRegistry:
@@ -31,7 +31,7 @@ def create_artifacts_registry(
31
31
  Create an artifacts tool registry with all artifact-related tools.
32
32
 
33
33
  Args:
34
- db: LocalDatabase instance (used to create artifact_manager if not provided)
34
+ db: DatabaseProtocol instance (used to create artifact_manager if not provided)
35
35
  artifact_manager: LocalArtifactManager instance
36
36
  session_manager: Session manager for resolving session references
37
37
 
@@ -45,7 +45,7 @@ def create_hub_registry(
45
45
  """
46
46
  registry = HubToolRegistry(
47
47
  name="gobby-hub",
48
- description="Hub (cross-project) queries - list_all_projects, list_cross_project_tasks, list_cross_project_sessions, hub_stats",
48
+ description="Hub (cross-project) queries and system info - get_machine_id, list_all_projects, list_cross_project_tasks, list_cross_project_sessions, hub_stats",
49
49
  )
50
50
 
51
51
  def _get_hub_db() -> LocalDatabase | None:
@@ -54,6 +54,35 @@ def create_hub_registry(
54
54
  return None
55
55
  return LocalDatabase(hub_db_path)
56
56
 
57
+ @registry.tool(
58
+ name="get_machine_id",
59
+ description="Get the daemon's machine identifier. Use this from sandboxed agents that cannot read ~/.gobby/machine_id directly.",
60
+ )
61
+ async def get_machine_id() -> dict[str, Any]:
62
+ """
63
+ Get the machine identifier used by this Gobby daemon.
64
+
65
+ The machine_id is stored in ~/.gobby/machine_id and is generated
66
+ once on first daemon run. This tool provides read-only access to
67
+ the daemon's authoritative machine_id.
68
+
69
+ Returns:
70
+ Dict with machine_id or error if not found.
71
+ """
72
+ from gobby.utils.machine_id import get_machine_id as _get_machine_id
73
+
74
+ machine_id = _get_machine_id()
75
+ if machine_id:
76
+ return {
77
+ "success": True,
78
+ "machine_id": machine_id,
79
+ }
80
+
81
+ return {
82
+ "success": False,
83
+ "error": "machine_id not found - daemon may not have initialized properly",
84
+ }
85
+
57
86
  @registry.tool(
58
87
  name="list_all_projects",
59
88
  description="List all unique projects in the hub database.",
@@ -38,7 +38,7 @@ def register_cleanup(
38
38
  """
39
39
  Approve a reviewed task and clean up its worktree.
40
40
 
41
- This tool transitions a task from "review" to "closed" status
41
+ This tool transitions a task from "needs_review" to "closed" status
42
42
  and optionally deletes the associated worktree.
43
43
 
44
44
  Args:
@@ -71,11 +71,11 @@ def register_cleanup(
71
71
  "error": f"Task not found: {task_id}",
72
72
  }
73
73
 
74
- # Verify task is in review status
75
- if task.status != "review":
74
+ # Verify task is in needs_review status
75
+ if task.status != "needs_review":
76
76
  return {
77
77
  "success": False,
78
- "error": f"Task must be in 'review' status to approve. Current status: {task.status}",
78
+ "error": f"Task must be in 'needs_review' status to approve. Current status: {task.status}",
79
79
  }
80
80
 
81
81
  # Get associated worktree (if any)
@@ -148,7 +148,7 @@ def register_cleanup(
148
148
  name="approve_and_cleanup",
149
149
  description=(
150
150
  "Approve a reviewed task and clean up its worktree. "
151
- "Transitions task from 'review' to 'closed' status and deletes worktree."
151
+ "Transitions task from 'needs_review' to 'closed' status and deletes worktree."
152
152
  ),
153
153
  input_schema={
154
154
  "type": "object",
@@ -101,7 +101,7 @@ def register_monitor(
101
101
  closed_tasks.append(task_info)
102
102
  elif task.status == "in_progress":
103
103
  in_progress_tasks.append(task_info)
104
- elif task.status == "review":
104
+ elif task.status == "needs_review":
105
105
  review_tasks.append(task_info)
106
106
  else:
107
107
  open_tasks.append(task_info)
@@ -35,7 +35,9 @@ def register_orchestrator(
35
35
 
36
36
  async def orchestrate_ready_tasks(
37
37
  parent_task_id: str,
38
- provider: Literal["claude", "gemini", "codex", "antigravity"] = "gemini",
38
+ provider: Literal[
39
+ "claude", "gemini", "codex", "antigravity", "cursor", "windsurf", "copilot"
40
+ ] = "gemini",
39
41
  model: str | None = None,
40
42
  terminal: str = "auto",
41
43
  mode: str = "terminal",
@@ -43,7 +45,10 @@ def register_orchestrator(
43
45
  max_concurrent: int = 3,
44
46
  parent_session_id: str | None = None,
45
47
  project_path: str | None = None,
46
- coding_provider: Literal["claude", "gemini", "codex", "antigravity"] | None = None,
48
+ coding_provider: Literal[
49
+ "claude", "gemini", "codex", "antigravity", "cursor", "windsurf", "copilot"
50
+ ]
51
+ | None = None,
47
52
  coding_model: str | None = None,
48
53
  base_branch: str | None = None,
49
54
  ) -> dict[str, Any]:
@@ -652,7 +657,7 @@ def register_orchestrator(
652
657
  },
653
658
  "provider": {
654
659
  "type": "string",
655
- "description": "Fallback LLM provider (claude, gemini, codex, antigravity)",
660
+ "description": "Fallback LLM provider (claude, gemini, codex, antigravity, cursor, windsurf, copilot)",
656
661
  "default": "gemini",
657
662
  },
658
663
  "model": {
@@ -30,7 +30,9 @@ def register_reviewer(
30
30
 
31
31
  async def spawn_review_agent(
32
32
  task_id: str,
33
- review_provider: Literal["claude", "gemini", "codex", "antigravity"] = "claude",
33
+ review_provider: Literal[
34
+ "claude", "gemini", "codex", "antigravity", "cursor", "windsurf", "copilot"
35
+ ] = "claude",
34
36
  review_model: str | None = "claude-opus-4-5",
35
37
  terminal: str = "auto",
36
38
  mode: str = "terminal",
@@ -61,7 +63,15 @@ def register_reviewer(
61
63
  """
62
64
  # Validate mode and review_provider
63
65
  allowed_modes = {"terminal", "embedded", "headless"}
64
- allowed_providers = {"claude", "gemini", "codex", "antigravity"}
66
+ allowed_providers = {
67
+ "claude",
68
+ "gemini",
69
+ "codex",
70
+ "antigravity",
71
+ "cursor",
72
+ "windsurf",
73
+ "copilot",
74
+ }
65
75
 
66
76
  mode_lower = mode.lower() if mode else "terminal"
67
77
  if mode_lower not in allowed_modes:
@@ -314,7 +324,7 @@ def register_reviewer(
314
324
  },
315
325
  "review_provider": {
316
326
  "type": "string",
317
- "description": "LLM provider for review (claude, gemini, codex, antigravity)",
327
+ "description": "LLM provider for review (claude, gemini, codex, antigravity, cursor, windsurf, copilot)",
318
328
  "default": "claude",
319
329
  },
320
330
  "review_model": {
@@ -350,7 +360,10 @@ def register_reviewer(
350
360
  async def process_completed_agents(
351
361
  parent_session_id: str,
352
362
  spawn_reviews: bool = True,
353
- review_provider: Literal["claude", "gemini", "codex", "antigravity"] | None = None,
363
+ review_provider: Literal[
364
+ "claude", "gemini", "codex", "antigravity", "cursor", "windsurf", "copilot"
365
+ ]
366
+ | None = None,
354
367
  review_model: str | None = None,
355
368
  terminal: str = "auto",
356
369
  mode: str = "terminal",
@@ -62,7 +62,7 @@ def register_wait(
62
62
 
63
63
  # Consider task complete if status is "closed" or "review"
64
64
  # (review tasks have completed their work, just awaiting human approval)
65
- is_complete = task.status in ("closed", "review")
65
+ is_complete = task.status in ("closed", "needs_review")
66
66
  return is_complete, task_info
67
67
 
68
68
  async def wait_for_task(
@@ -73,7 +73,7 @@ def register_wait(
73
73
  """
74
74
  Wait for a single task to complete.
75
75
 
76
- Blocks until the task reaches "closed" or "review" status, or timeout expires.
76
+ Blocks until the task reaches "closed" or "needs_review" status, or timeout expires.
77
77
 
78
78
  Args:
79
79
  task_id: Task reference (#N, N, path, or UUID)
@@ -170,7 +170,7 @@ def register_wait(
170
170
  name="wait_for_task",
171
171
  description=(
172
172
  "Wait for a single task to complete. "
173
- "Blocks until task reaches 'closed' or 'review' status, or timeout expires."
173
+ "Blocks until task reaches 'closed' or 'needs_review' status, or timeout expires."
174
174
  ),
175
175
  input_schema={
176
176
  "type": "object",
@@ -201,7 +201,7 @@ def register_wait(
201
201
  """
202
202
  Wait for any one of multiple tasks to complete.
203
203
 
204
- Blocks until at least one task reaches "closed" or "review" status, or timeout expires.
204
+ Blocks until at least one task reaches "closed" or "needs_review" status, or timeout expires.
205
205
 
206
206
  Args:
207
207
  task_ids: List of task references (#N, N, path, or UUID)
@@ -290,7 +290,7 @@ def register_wait(
290
290
  name="wait_for_any_task",
291
291
  description=(
292
292
  "Wait for any one of multiple tasks to complete. "
293
- "Returns as soon as the first task reaches 'closed' or 'review' status."
293
+ "Returns as soon as the first task reaches 'closed' or 'needs_review' status."
294
294
  ),
295
295
  input_schema={
296
296
  "type": "object",
@@ -322,7 +322,7 @@ def register_wait(
322
322
  """
323
323
  Wait for all tasks to complete.
324
324
 
325
- Blocks until all tasks reach "closed" or "review" status, or timeout expires.
325
+ Blocks until all tasks reach "closed" or "needs_review" status, or timeout expires.
326
326
 
327
327
  Args:
328
328
  task_ids: List of task references (#N, N, path, or UUID)
@@ -442,7 +442,7 @@ def register_wait(
442
442
  name="wait_for_all_tasks",
443
443
  description=(
444
444
  "Wait for all tasks to complete. "
445
- "Blocks until all tasks reach 'closed' or 'review' status, or timeout expires."
445
+ "Blocks until all tasks reach 'closed' or 'needs_review' status, or timeout expires."
446
446
  ),
447
447
  input_schema={
448
448
  "type": "object",
@@ -0,0 +1,254 @@
1
+ """
2
+ Internal MCP tools for Gobby Pipeline System.
3
+
4
+ Exposes functionality for:
5
+ - list_pipelines: Discover available pipeline definitions
6
+ - Dynamic pipeline tools: Pipelines with expose_as_tool=True are exposed as MCP tools
7
+
8
+ These tools are registered with the InternalToolRegistry and accessed
9
+ via the downstream proxy pattern (call_tool, list_tools, get_tool_schema).
10
+ """
11
+
12
+ import logging
13
+ from typing import Any
14
+
15
+ from gobby.mcp_proxy.tools.internal import InternalToolRegistry
16
+ from gobby.mcp_proxy.tools.pipelines._discovery import list_pipelines
17
+ from gobby.mcp_proxy.tools.pipelines._execution import (
18
+ approve_pipeline,
19
+ get_pipeline_status,
20
+ reject_pipeline,
21
+ run_pipeline,
22
+ )
23
+
24
+ logger = logging.getLogger(__name__)
25
+
26
+ __all__ = [
27
+ "create_pipelines_registry",
28
+ ]
29
+
30
+
31
+ def create_pipelines_registry(
32
+ loader: Any | None = None,
33
+ executor: Any | None = None,
34
+ execution_manager: Any | None = None,
35
+ ) -> InternalToolRegistry:
36
+ """
37
+ Create a pipeline tool registry with all pipeline-related tools.
38
+
39
+ Args:
40
+ loader: WorkflowLoader instance for discovering pipelines
41
+ executor: PipelineExecutor instance for running pipelines
42
+ execution_manager: LocalPipelineExecutionManager for tracking executions
43
+
44
+ Returns:
45
+ InternalToolRegistry with pipeline tools registered
46
+ """
47
+ _loader = loader
48
+ _executor = executor
49
+ _execution_manager = execution_manager
50
+
51
+ registry = InternalToolRegistry(
52
+ name="gobby-pipelines",
53
+ description="Pipeline management - list, run, and monitor pipeline executions",
54
+ )
55
+
56
+ # Register dynamic tools for pipelines with expose_as_tool=True
57
+ _register_exposed_pipeline_tools(registry, _loader, _executor)
58
+
59
+ @registry.tool(
60
+ name="list_pipelines",
61
+ description="List available pipeline definitions from project and global directories.",
62
+ )
63
+ def _list_pipelines(
64
+ project_path: str | None = None,
65
+ ) -> dict[str, Any]:
66
+ return list_pipelines(_loader, project_path)
67
+
68
+ @registry.tool(
69
+ name="run_pipeline",
70
+ description="Run a pipeline by name with given inputs.",
71
+ )
72
+ async def _run_pipeline(
73
+ name: str,
74
+ inputs: dict[str, Any] | None = None,
75
+ project_id: str | None = None,
76
+ ) -> dict[str, Any]:
77
+ return await run_pipeline(
78
+ loader=_loader,
79
+ executor=_executor,
80
+ name=name,
81
+ inputs=inputs or {},
82
+ project_id=project_id or "",
83
+ )
84
+
85
+ @registry.tool(
86
+ name="approve_pipeline",
87
+ description="Approve a pipeline execution that is waiting for approval.",
88
+ )
89
+ async def _approve_pipeline(
90
+ token: str,
91
+ approved_by: str | None = None,
92
+ ) -> dict[str, Any]:
93
+ return await approve_pipeline(
94
+ executor=_executor,
95
+ token=token,
96
+ approved_by=approved_by,
97
+ )
98
+
99
+ @registry.tool(
100
+ name="reject_pipeline",
101
+ description="Reject a pipeline execution that is waiting for approval.",
102
+ )
103
+ async def _reject_pipeline(
104
+ token: str,
105
+ rejected_by: str | None = None,
106
+ ) -> dict[str, Any]:
107
+ return await reject_pipeline(
108
+ executor=_executor,
109
+ token=token,
110
+ rejected_by=rejected_by,
111
+ )
112
+
113
+ @registry.tool(
114
+ name="get_pipeline_status",
115
+ description="Get the status of a pipeline execution including step details.",
116
+ )
117
+ def _get_pipeline_status(
118
+ execution_id: str,
119
+ ) -> dict[str, Any]:
120
+ return get_pipeline_status(
121
+ execution_manager=_execution_manager,
122
+ execution_id=execution_id,
123
+ )
124
+
125
+ return registry
126
+
127
+
128
+ def _register_exposed_pipeline_tools(
129
+ registry: InternalToolRegistry,
130
+ loader: Any | None,
131
+ executor: Any | None,
132
+ ) -> None:
133
+ """
134
+ Register dynamic tools for pipelines with expose_as_tool=True.
135
+
136
+ Each exposed pipeline becomes an MCP tool named "pipeline:<pipeline_name>".
137
+
138
+ Args:
139
+ registry: The registry to add tools to
140
+ loader: WorkflowLoader for discovering pipelines
141
+ executor: PipelineExecutor for running pipelines
142
+ """
143
+ if loader is None:
144
+ logger.debug("Skipping dynamic pipeline tools: no loader")
145
+ return
146
+
147
+ try:
148
+ discovered = loader.discover_pipeline_workflows()
149
+ except Exception:
150
+ logger.warning("Failed to discover pipelines for dynamic tools", exc_info=True)
151
+ return
152
+
153
+ for workflow in discovered:
154
+ pipeline = workflow.definition
155
+
156
+ # Only expose pipelines with expose_as_tool=True
157
+ if not getattr(pipeline, "expose_as_tool", False):
158
+ continue
159
+
160
+ _create_pipeline_tool(registry, pipeline, loader, executor)
161
+
162
+
163
+ def _create_pipeline_tool(
164
+ registry: InternalToolRegistry,
165
+ pipeline: Any,
166
+ loader: Any,
167
+ executor: Any | None,
168
+ ) -> None:
169
+ """
170
+ Create a dynamic tool for a single pipeline.
171
+
172
+ Args:
173
+ registry: The registry to add the tool to
174
+ pipeline: The PipelineDefinition to expose
175
+ loader: WorkflowLoader for loading pipelines
176
+ executor: PipelineExecutor for running pipelines
177
+ """
178
+ tool_name = f"pipeline:{pipeline.name}"
179
+ description = pipeline.description or f"Run the {pipeline.name} pipeline"
180
+
181
+ # Build input schema from pipeline inputs
182
+ input_schema = _build_input_schema(pipeline)
183
+
184
+ # Create closure to capture pipeline name
185
+ pipeline_name = pipeline.name
186
+
187
+ async def _execute_pipeline(**kwargs: Any) -> dict[str, Any]:
188
+ return await run_pipeline(
189
+ loader=loader,
190
+ executor=executor,
191
+ name=pipeline_name,
192
+ inputs=kwargs,
193
+ project_id="",
194
+ )
195
+
196
+ # Register the tool with the schema
197
+ registry.register(
198
+ name=tool_name,
199
+ description=description,
200
+ func=_execute_pipeline,
201
+ input_schema=input_schema,
202
+ )
203
+
204
+ logger.debug(f"Registered dynamic pipeline tool: {tool_name}")
205
+
206
+
207
+ def _build_input_schema(pipeline: Any) -> dict[str, Any]:
208
+ """
209
+ Build JSON Schema for pipeline inputs.
210
+
211
+ Args:
212
+ pipeline: The PipelineDefinition
213
+
214
+ Returns:
215
+ JSON Schema dict for the pipeline's inputs
216
+ """
217
+ properties = {}
218
+ required = []
219
+
220
+ for name, input_def in pipeline.inputs.items():
221
+ if isinstance(input_def, dict):
222
+ # Input is already a schema-like dict
223
+ prop = {}
224
+ if "type" in input_def:
225
+ prop["type"] = input_def["type"]
226
+ else:
227
+ prop["type"] = "string"
228
+
229
+ if "description" in input_def:
230
+ prop["description"] = input_def["description"]
231
+
232
+ if "default" in input_def:
233
+ prop["default"] = input_def["default"]
234
+ else:
235
+ # No default means required
236
+ required.append(name)
237
+
238
+ properties[name] = prop
239
+ else:
240
+ # Input is a simple default value
241
+ properties[name] = {
242
+ "type": "string",
243
+ "default": input_def,
244
+ }
245
+
246
+ schema: dict[str, Any] = {
247
+ "type": "object",
248
+ "properties": properties,
249
+ }
250
+
251
+ if required:
252
+ schema["required"] = required
253
+
254
+ return schema
@@ -0,0 +1,67 @@
1
+ """Pipeline discovery tools."""
2
+
3
+ import logging
4
+ from typing import Any
5
+
6
+ logger = logging.getLogger(__name__)
7
+
8
+
9
+ def list_pipelines(
10
+ loader: Any,
11
+ project_path: str | None = None,
12
+ ) -> dict[str, Any]:
13
+ """
14
+ List available pipeline definitions.
15
+
16
+ Args:
17
+ loader: WorkflowLoader instance
18
+ project_path: Optional project path for project-specific pipelines
19
+
20
+ Returns:
21
+ Dict with success status and list of pipeline info
22
+ """
23
+ if not loader:
24
+ return {
25
+ "success": False,
26
+ "error": "No loader configured",
27
+ "pipelines": [],
28
+ }
29
+
30
+ try:
31
+ discovered = loader.discover_pipeline_workflows(project_path=project_path)
32
+
33
+ pipelines = []
34
+ for workflow in discovered:
35
+ pipeline_info = {
36
+ "name": workflow.name,
37
+ "description": workflow.definition.description,
38
+ "is_project": workflow.is_project,
39
+ "path": str(workflow.path),
40
+ "priority": workflow.priority,
41
+ }
42
+
43
+ # Add step count if available
44
+ if hasattr(workflow.definition, "steps"):
45
+ pipeline_info["step_count"] = len(workflow.definition.steps)
46
+
47
+ pipelines.append(pipeline_info)
48
+
49
+ return {
50
+ "success": True,
51
+ "pipelines": pipelines,
52
+ "count": len(pipelines),
53
+ }
54
+
55
+ except (FileNotFoundError, ValueError) as e:
56
+ return {
57
+ "success": False,
58
+ "error": str(e),
59
+ "pipelines": [],
60
+ }
61
+ except Exception:
62
+ logger.exception("Unexpected error discovering pipelines")
63
+ return {
64
+ "success": False,
65
+ "error": "Internal error during pipeline discovery",
66
+ "pipelines": [],
67
+ }