gobby 0.2.9__py3-none-any.whl → 0.2.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (134) hide show
  1. gobby/__init__.py +1 -1
  2. gobby/adapters/__init__.py +6 -0
  3. gobby/adapters/base.py +11 -2
  4. gobby/adapters/claude_code.py +2 -2
  5. gobby/adapters/codex_impl/adapter.py +38 -43
  6. gobby/adapters/copilot.py +324 -0
  7. gobby/adapters/cursor.py +373 -0
  8. gobby/adapters/gemini.py +2 -26
  9. gobby/adapters/windsurf.py +359 -0
  10. gobby/agents/definitions.py +162 -2
  11. gobby/agents/isolation.py +33 -1
  12. gobby/agents/pty_reader.py +192 -0
  13. gobby/agents/registry.py +10 -1
  14. gobby/agents/runner.py +24 -8
  15. gobby/agents/sandbox.py +8 -3
  16. gobby/agents/session.py +4 -0
  17. gobby/agents/spawn.py +9 -2
  18. gobby/agents/spawn_executor.py +49 -61
  19. gobby/agents/spawners/command_builder.py +4 -4
  20. gobby/app_context.py +5 -0
  21. gobby/cli/__init__.py +4 -0
  22. gobby/cli/install.py +259 -4
  23. gobby/cli/installers/__init__.py +12 -0
  24. gobby/cli/installers/copilot.py +242 -0
  25. gobby/cli/installers/cursor.py +244 -0
  26. gobby/cli/installers/shared.py +3 -0
  27. gobby/cli/installers/windsurf.py +242 -0
  28. gobby/cli/pipelines.py +639 -0
  29. gobby/cli/sessions.py +3 -1
  30. gobby/cli/skills.py +209 -0
  31. gobby/cli/tasks/crud.py +6 -5
  32. gobby/cli/tasks/search.py +1 -1
  33. gobby/cli/ui.py +116 -0
  34. gobby/cli/workflows.py +38 -17
  35. gobby/config/app.py +5 -0
  36. gobby/config/skills.py +23 -2
  37. gobby/hooks/broadcaster.py +9 -0
  38. gobby/hooks/event_handlers/_base.py +6 -1
  39. gobby/hooks/event_handlers/_session.py +44 -130
  40. gobby/hooks/events.py +48 -0
  41. gobby/hooks/hook_manager.py +25 -3
  42. gobby/install/copilot/hooks/hook_dispatcher.py +203 -0
  43. gobby/install/cursor/hooks/hook_dispatcher.py +203 -0
  44. gobby/install/gemini/hooks/hook_dispatcher.py +8 -0
  45. gobby/install/windsurf/hooks/hook_dispatcher.py +205 -0
  46. gobby/llm/__init__.py +14 -1
  47. gobby/llm/claude.py +217 -1
  48. gobby/llm/service.py +149 -0
  49. gobby/mcp_proxy/instructions.py +9 -27
  50. gobby/mcp_proxy/models.py +1 -0
  51. gobby/mcp_proxy/registries.py +56 -9
  52. gobby/mcp_proxy/server.py +6 -2
  53. gobby/mcp_proxy/services/tool_filter.py +7 -0
  54. gobby/mcp_proxy/services/tool_proxy.py +19 -1
  55. gobby/mcp_proxy/stdio.py +37 -21
  56. gobby/mcp_proxy/tools/agents.py +7 -0
  57. gobby/mcp_proxy/tools/hub.py +30 -1
  58. gobby/mcp_proxy/tools/orchestration/cleanup.py +5 -5
  59. gobby/mcp_proxy/tools/orchestration/monitor.py +1 -1
  60. gobby/mcp_proxy/tools/orchestration/orchestrate.py +8 -3
  61. gobby/mcp_proxy/tools/orchestration/review.py +17 -4
  62. gobby/mcp_proxy/tools/orchestration/wait.py +7 -7
  63. gobby/mcp_proxy/tools/pipelines/__init__.py +254 -0
  64. gobby/mcp_proxy/tools/pipelines/_discovery.py +67 -0
  65. gobby/mcp_proxy/tools/pipelines/_execution.py +281 -0
  66. gobby/mcp_proxy/tools/sessions/_crud.py +4 -4
  67. gobby/mcp_proxy/tools/sessions/_handoff.py +1 -1
  68. gobby/mcp_proxy/tools/skills/__init__.py +184 -30
  69. gobby/mcp_proxy/tools/spawn_agent.py +229 -14
  70. gobby/mcp_proxy/tools/tasks/_context.py +8 -0
  71. gobby/mcp_proxy/tools/tasks/_crud.py +27 -1
  72. gobby/mcp_proxy/tools/tasks/_helpers.py +1 -1
  73. gobby/mcp_proxy/tools/tasks/_lifecycle.py +125 -8
  74. gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +2 -1
  75. gobby/mcp_proxy/tools/tasks/_search.py +1 -1
  76. gobby/mcp_proxy/tools/workflows/__init__.py +9 -2
  77. gobby/mcp_proxy/tools/workflows/_lifecycle.py +12 -1
  78. gobby/mcp_proxy/tools/workflows/_query.py +45 -26
  79. gobby/mcp_proxy/tools/workflows/_terminal.py +39 -3
  80. gobby/mcp_proxy/tools/worktrees.py +54 -15
  81. gobby/memory/context.py +5 -5
  82. gobby/runner.py +108 -6
  83. gobby/servers/http.py +7 -1
  84. gobby/servers/routes/__init__.py +2 -0
  85. gobby/servers/routes/admin.py +44 -0
  86. gobby/servers/routes/mcp/endpoints/execution.py +18 -25
  87. gobby/servers/routes/mcp/hooks.py +10 -1
  88. gobby/servers/routes/pipelines.py +227 -0
  89. gobby/servers/websocket.py +314 -1
  90. gobby/sessions/analyzer.py +87 -1
  91. gobby/sessions/manager.py +5 -5
  92. gobby/sessions/transcripts/__init__.py +3 -0
  93. gobby/sessions/transcripts/claude.py +5 -0
  94. gobby/sessions/transcripts/codex.py +5 -0
  95. gobby/sessions/transcripts/gemini.py +5 -0
  96. gobby/skills/hubs/__init__.py +25 -0
  97. gobby/skills/hubs/base.py +234 -0
  98. gobby/skills/hubs/claude_plugins.py +328 -0
  99. gobby/skills/hubs/clawdhub.py +289 -0
  100. gobby/skills/hubs/github_collection.py +465 -0
  101. gobby/skills/hubs/manager.py +263 -0
  102. gobby/skills/hubs/skillhub.py +342 -0
  103. gobby/storage/memories.py +4 -4
  104. gobby/storage/migrations.py +95 -3
  105. gobby/storage/pipelines.py +367 -0
  106. gobby/storage/sessions.py +23 -4
  107. gobby/storage/skills.py +1 -1
  108. gobby/storage/tasks/_aggregates.py +2 -2
  109. gobby/storage/tasks/_lifecycle.py +4 -4
  110. gobby/storage/tasks/_models.py +7 -1
  111. gobby/storage/tasks/_queries.py +3 -3
  112. gobby/sync/memories.py +4 -3
  113. gobby/tasks/commits.py +48 -17
  114. gobby/workflows/actions.py +75 -0
  115. gobby/workflows/context_actions.py +246 -5
  116. gobby/workflows/definitions.py +119 -1
  117. gobby/workflows/detection_helpers.py +23 -11
  118. gobby/workflows/enforcement/task_policy.py +18 -0
  119. gobby/workflows/engine.py +20 -1
  120. gobby/workflows/evaluator.py +8 -5
  121. gobby/workflows/lifecycle_evaluator.py +57 -26
  122. gobby/workflows/loader.py +567 -30
  123. gobby/workflows/lobster_compat.py +147 -0
  124. gobby/workflows/pipeline_executor.py +801 -0
  125. gobby/workflows/pipeline_state.py +172 -0
  126. gobby/workflows/pipeline_webhooks.py +206 -0
  127. gobby/workflows/premature_stop.py +5 -0
  128. gobby/worktrees/git.py +135 -20
  129. {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/METADATA +56 -22
  130. {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/RECORD +134 -106
  131. {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/WHEEL +0 -0
  132. {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/entry_points.txt +0 -0
  133. {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/licenses/LICENSE.md +0 -0
  134. {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/top_level.txt +0 -0
@@ -254,13 +254,20 @@ def create_workflows_registry(
254
254
  description=(
255
255
  "Close the current terminal window/pane (agent self-termination). "
256
256
  "Launches ~/.gobby/scripts/agent_shutdown.sh which handles "
257
- "terminal-specific shutdown (tmux, iTerm, etc.). Rebuilds script if missing."
257
+ "terminal-specific shutdown (tmux, iTerm, etc.). Rebuilds script if missing. "
258
+ "Pass session_id to reliably target the correct terminal PID."
258
259
  ),
259
260
  )
260
261
  async def _close_terminal(
262
+ session_id: str | None = None,
261
263
  signal: str = "TERM",
262
264
  delay_ms: int = 0,
263
265
  ) -> dict[str, Any]:
264
- return await close_terminal(signal, delay_ms)
266
+ return await close_terminal(
267
+ session_id=session_id,
268
+ session_manager=_session_manager,
269
+ signal=signal,
270
+ delay_ms=delay_ms,
271
+ )
265
272
 
266
273
  return registry
@@ -13,7 +13,7 @@ from gobby.mcp_proxy.tools.workflows._resolution import (
13
13
  from gobby.storage.database import DatabaseProtocol
14
14
  from gobby.storage.sessions import LocalSessionManager
15
15
  from gobby.utils.project_context import get_workflow_project_path
16
- from gobby.workflows.definitions import WorkflowState
16
+ from gobby.workflows.definitions import WorkflowDefinition, WorkflowState
17
17
  from gobby.workflows.loader import WorkflowLoader
18
18
  from gobby.workflows.state_manager import WorkflowStateManager
19
19
 
@@ -65,6 +65,13 @@ def activate_workflow(
65
65
  "error": f"Workflow '{name}' is lifecycle type (auto-runs on events, not manually activated)",
66
66
  }
67
67
 
68
+ # This function only supports step-based workflows (WorkflowDefinition)
69
+ if not isinstance(definition, WorkflowDefinition):
70
+ return {
71
+ "success": False,
72
+ "error": f"'{name}' is a pipeline. Use pipeline execution tools instead.",
73
+ }
74
+
68
75
  # Require explicit session_id to prevent cross-session bleed
69
76
  if not session_id:
70
77
  return {
@@ -280,6 +287,10 @@ def request_step_transition(
280
287
  if not definition:
281
288
  return {"success": False, "error": f"Workflow '{state.workflow_name}' not found"}
282
289
 
290
+ # Transitions only apply to step-based workflows
291
+ if not isinstance(definition, WorkflowDefinition):
292
+ return {"success": False, "error": "Transitions are not supported for pipelines"}
293
+
283
294
  if not any(s.name == to_step for s in definition.steps):
284
295
  return {
285
296
  "success": False,
@@ -11,6 +11,7 @@ import yaml
11
11
  from gobby.mcp_proxy.tools.workflows._resolution import resolve_session_id
12
12
  from gobby.storage.sessions import LocalSessionManager
13
13
  from gobby.utils.project_context import get_workflow_project_path
14
+ from gobby.workflows.definitions import WorkflowDefinition
14
15
  from gobby.workflows.loader import WorkflowLoader
15
16
  from gobby.workflows.state_manager import WorkflowStateManager
16
17
 
@@ -45,32 +46,50 @@ def get_workflow(
45
46
  if not definition:
46
47
  return {"success": False, "error": f"Workflow '{name}' not found"}
47
48
 
48
- return {
49
- "success": True,
50
- "name": definition.name,
51
- "type": definition.type,
52
- "description": definition.description,
53
- "version": definition.version,
54
- "steps": (
55
- [
56
- {
57
- "name": s.name,
58
- "description": s.description,
59
- "allowed_tools": s.allowed_tools,
60
- "blocked_tools": s.blocked_tools,
61
- }
62
- for s in definition.steps
63
- ]
64
- if definition.steps
65
- else []
66
- ),
67
- "triggers": (
68
- {name: len(actions) for name, actions in definition.triggers.items()}
69
- if definition.triggers
70
- else {}
71
- ),
72
- "settings": definition.settings,
73
- }
49
+ # Handle WorkflowDefinition vs PipelineDefinition
50
+ if isinstance(definition, WorkflowDefinition):
51
+ return {
52
+ "success": True,
53
+ "name": definition.name,
54
+ "type": definition.type,
55
+ "description": definition.description,
56
+ "version": definition.version,
57
+ "steps": (
58
+ [
59
+ {
60
+ "name": s.name,
61
+ "description": s.description,
62
+ "allowed_tools": s.allowed_tools,
63
+ "blocked_tools": s.blocked_tools,
64
+ }
65
+ for s in definition.steps
66
+ ]
67
+ if definition.steps
68
+ else []
69
+ ),
70
+ "triggers": (
71
+ {name: len(actions) for name, actions in definition.triggers.items()}
72
+ if definition.triggers
73
+ else {}
74
+ ),
75
+ "settings": definition.settings,
76
+ }
77
+ else:
78
+ # PipelineDefinition
79
+ return {
80
+ "success": True,
81
+ "name": definition.name,
82
+ "type": definition.type,
83
+ "description": definition.description,
84
+ "version": definition.version,
85
+ "steps": (
86
+ [{"id": s.id, "exec": s.exec, "prompt": s.prompt} for s in definition.steps]
87
+ if definition.steps
88
+ else []
89
+ ),
90
+ "triggers": {},
91
+ "settings": {},
92
+ }
74
93
 
75
94
 
76
95
  def list_workflows(
@@ -2,20 +2,27 @@
2
2
  Terminal tools for workflows.
3
3
  """
4
4
 
5
+ from __future__ import annotations
6
+
5
7
  import asyncio
6
8
  import logging
7
9
  import os
8
10
  import stat
9
11
  import subprocess # nosec B404
10
12
  from pathlib import Path
11
- from typing import Any
13
+ from typing import TYPE_CHECKING, Any
12
14
 
13
15
  from gobby.paths import get_install_dir
14
16
 
17
+ if TYPE_CHECKING:
18
+ from gobby.storage.sessions import LocalSessionManager
19
+
15
20
  logger = logging.getLogger(__name__)
16
21
 
17
22
 
18
23
  async def close_terminal(
24
+ session_id: str | None = None,
25
+ session_manager: LocalSessionManager | None = None,
19
26
  signal: str = "TERM",
20
27
  delay_ms: int = 0,
21
28
  ) -> dict[str, Any]:
@@ -30,6 +37,10 @@ async def close_terminal(
30
37
  (tmux, iTerm, Terminal.app, Ghostty, Kitty, WezTerm, etc.).
31
38
 
32
39
  Args:
40
+ session_id: Session ID to look up terminal PID from. Accepts #N, N,
41
+ UUID, or prefix. If provided with session_manager, the terminal
42
+ PID is resolved from session.terminal_context.parent_pid.
43
+ session_manager: LocalSessionManager for session lookups.
33
44
  signal: Signal to use for shutdown (TERM, KILL, INT). Default: TERM.
34
45
  delay_ms: Optional delay in milliseconds before shutdown. Default: 0.
35
46
 
@@ -112,26 +123,51 @@ async def close_terminal(
112
123
  if delay_ms > 0:
113
124
  await asyncio.sleep(delay_ms / 1000.0)
114
125
 
126
+ # Resolve terminal PID from session context if available
127
+ target_pid: int | None = None
128
+ if session_id and session_manager:
129
+ from gobby.mcp_proxy.tools.workflows._resolution import resolve_session_id
130
+
131
+ try:
132
+ resolved_id = resolve_session_id(session_manager, session_id)
133
+ if resolved_id:
134
+ session = session_manager.get(resolved_id)
135
+ if session and session.terminal_context:
136
+ pid_value = session.terminal_context.get("parent_pid")
137
+ if pid_value is not None:
138
+ target_pid = int(pid_value)
139
+ logger.info(f"Resolved terminal PID {target_pid} from session {session_id}")
140
+ except Exception as e:
141
+ logger.warning(f"Failed to resolve terminal PID from session: {e}")
142
+
115
143
  # Launch the script
116
144
  try:
117
145
  # Run in background - we don't wait for it since it kills our process
118
146
  env = os.environ.copy()
119
147
 
148
+ # Pass PID as first arg if available, otherwise script will discover via PPID
149
+ pid_arg = str(target_pid) if target_pid else ""
120
150
  subprocess.Popen( # nosec B603 - script path is from gobby scripts directory
121
- [str(script_path), signal.upper(), "0"], # Delay already applied
151
+ [str(script_path), pid_arg, signal.upper(), "0"], # Delay already applied
122
152
  env=env,
123
153
  start_new_session=True, # Detach from parent
124
154
  stdout=subprocess.DEVNULL,
125
155
  stderr=subprocess.DEVNULL,
126
156
  )
127
157
 
128
- return {
158
+ result: dict[str, Any] = {
129
159
  "success": True,
130
160
  "message": "Shutdown script launched",
131
161
  "script_path": str(script_path),
132
162
  "script_rebuilt": script_rebuilt,
133
163
  "signal": signal.upper(),
134
164
  }
165
+ if target_pid:
166
+ result["target_pid"] = target_pid
167
+ result["pid_source"] = "session_terminal_context"
168
+ else:
169
+ result["pid_source"] = "ppid_discovery"
170
+ return result
135
171
  except OSError as e:
136
172
  return {
137
173
  "success": False,
@@ -169,14 +169,15 @@ def _copy_project_json_to_worktree(
169
169
 
170
170
 
171
171
  def _install_provider_hooks(
172
- provider: Literal["claude", "gemini", "codex", "antigravity"] | None,
172
+ provider: Literal["claude", "gemini", "codex", "antigravity", "cursor", "windsurf", "copilot"]
173
+ | None,
173
174
  worktree_path: str | Path,
174
175
  ) -> bool:
175
176
  """
176
177
  Install CLI hooks for the specified provider in the worktree.
177
178
 
178
179
  Args:
179
- provider: Provider name ('claude', 'gemini', 'antigravity', or None)
180
+ provider: Provider name ('claude', 'gemini', 'antigravity', 'cursor', 'windsurf', 'copilot', or None)
180
181
  worktree_path: Path to worktree directory
181
182
 
182
183
  Returns:
@@ -196,6 +197,16 @@ def _install_provider_hooks(
196
197
  return True
197
198
  else:
198
199
  logger.warning(f"Failed to install Claude hooks: {result.get('error')}")
200
+ elif provider in ("cursor", "windsurf", "copilot"):
201
+ # These editors use Claude hooks format
202
+ from gobby.cli.installers.claude import install_claude
203
+
204
+ result = install_claude(worktree_path_obj)
205
+ if result["success"]:
206
+ logger.info(f"Installed {provider} hooks in worktree: {worktree_path}")
207
+ return True
208
+ else:
209
+ logger.warning(f"Failed to install {provider} hooks: {result.get('error')}")
199
210
  elif provider == "gemini":
200
211
  from gobby.cli.installers.gemini import install_gemini
201
212
 
@@ -326,7 +337,10 @@ def create_worktrees_registry(
326
337
  worktree_path: str | None = None,
327
338
  create_branch: bool = True,
328
339
  project_path: str | None = None,
329
- provider: Literal["claude", "gemini", "codex", "antigravity"] | None = None,
340
+ provider: Literal[
341
+ "claude", "gemini", "codex", "antigravity", "cursor", "windsurf", "copilot"
342
+ ]
343
+ | None = None,
330
344
  ) -> dict[str, Any]:
331
345
  """
332
346
  Create a new git worktree.
@@ -338,7 +352,7 @@ def create_worktrees_registry(
338
352
  worktree_path: Optional custom path (defaults to ../{branch_name}).
339
353
  create_branch: Whether to create a new branch (default: True).
340
354
  project_path: Path to project directory (pass cwd from CLI).
341
- provider: CLI provider to install hooks for (claude, gemini, codex, antigravity).
355
+ provider: CLI provider to install hooks for (claude, gemini, codex, antigravity, cursor, windsurf, copilot).
342
356
  If specified, installs hooks so agents can communicate with daemon.
343
357
 
344
358
  Returns:
@@ -451,7 +465,7 @@ def create_worktrees_registry(
451
465
  async def list_worktrees(
452
466
  status: str | None = None,
453
467
  agent_session_id: str | None = None,
454
- limit: int = 50,
468
+ limit: int | str = 50,
455
469
  ) -> dict[str, Any]:
456
470
  """
457
471
  List worktrees with optional filters.
@@ -464,6 +478,9 @@ def create_worktrees_registry(
464
478
  Returns:
465
479
  Dict with list of worktrees.
466
480
  """
481
+ # Handle string inputs from MCP
482
+ limit = int(limit) if isinstance(limit, str) else limit
483
+
467
484
  # Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
468
485
  resolved_session_id = agent_session_id
469
486
  if agent_session_id:
@@ -569,7 +586,7 @@ def create_worktrees_registry(
569
586
  )
570
587
  async def delete_worktree(
571
588
  worktree_id: str,
572
- force: bool = False,
589
+ force: bool | str = False,
573
590
  project_path: str | None = None,
574
591
  ) -> dict[str, Any]:
575
592
  """
@@ -591,6 +608,9 @@ def create_worktrees_registry(
591
608
  Returns:
592
609
  Dict with success status.
593
610
  """
611
+ # Handle string inputs from MCP
612
+ force = force in (True, "true", "True", "1") if isinstance(force, str) else force
613
+
594
614
  worktree = worktree_storage.get(worktree_id)
595
615
 
596
616
  if not worktree:
@@ -611,8 +631,11 @@ def create_worktrees_registry(
611
631
  # nosec B110 - if context resolution fails, continue without git manager
612
632
  pass
613
633
 
614
- # Check for uncommitted changes if not forcing
615
- if resolved_git_mgr and Path(worktree.worktree_path).exists():
634
+ # Check if worktree path exists
635
+ worktree_exists = Path(worktree.worktree_path).exists()
636
+
637
+ # Check for uncommitted changes if not forcing (only if path exists)
638
+ if resolved_git_mgr and worktree_exists:
616
639
  status = resolved_git_mgr.get_worktree_status(worktree.worktree_path)
617
640
  if status and status.has_uncommitted_changes and not force:
618
641
  return {
@@ -621,8 +644,8 @@ def create_worktrees_registry(
621
644
  "uncommitted_changes": True,
622
645
  }
623
646
 
624
- # Delete git worktree
625
- if resolved_git_mgr:
647
+ # Delete git worktree (only if path exists - handles orphaned DB records)
648
+ if resolved_git_mgr and worktree_exists:
626
649
  result = resolved_git_mgr.delete_worktree(
627
650
  worktree.worktree_path,
628
651
  force=force,
@@ -634,6 +657,11 @@ def create_worktrees_registry(
634
657
  "success": False,
635
658
  "error": result.error or "Failed to delete git worktree",
636
659
  }
660
+ elif not worktree_exists:
661
+ # Worktree path gone (manually deleted) - just clean up DB record
662
+ logger.info(
663
+ f"Worktree path {worktree.worktree_path} doesn't exist, cleaning up DB record only"
664
+ )
637
665
 
638
666
  # Delete database record
639
667
  deleted = worktree_storage.delete(worktree_id)
@@ -744,8 +772,8 @@ def create_worktrees_registry(
744
772
  )
745
773
  async def detect_stale_worktrees(
746
774
  project_path: str | None = None,
747
- hours: int = 24,
748
- limit: int = 50,
775
+ hours: int | str = 24,
776
+ limit: int | str = 50,
749
777
  ) -> dict[str, Any]:
750
778
  """
751
779
  Find stale worktrees (no activity for N hours).
@@ -758,6 +786,10 @@ def create_worktrees_registry(
758
786
  Returns:
759
787
  Dict with list of stale worktrees.
760
788
  """
789
+ # Handle string inputs from MCP (JSON params come as strings)
790
+ hours = int(hours) if isinstance(hours, str) else hours
791
+ limit = int(limit) if isinstance(limit, str) else limit
792
+
761
793
  _, resolved_project_id, error = _resolve_project_context(
762
794
  project_path, git_manager, project_id
763
795
  )
@@ -794,9 +826,9 @@ def create_worktrees_registry(
794
826
  )
795
827
  async def cleanup_stale_worktrees(
796
828
  project_path: str | None = None,
797
- hours: int = 24,
798
- dry_run: bool = True,
799
- delete_git: bool = False,
829
+ hours: int | str = 24,
830
+ dry_run: bool | str = True,
831
+ delete_git: bool | str = False,
800
832
  ) -> dict[str, Any]:
801
833
  """
802
834
  Cleanup stale worktrees.
@@ -810,6 +842,13 @@ def create_worktrees_registry(
810
842
  Returns:
811
843
  Dict with cleanup results.
812
844
  """
845
+ # Handle string inputs from MCP (JSON params come as strings)
846
+ hours = int(hours) if isinstance(hours, str) else hours
847
+ dry_run = dry_run in (True, "true", "True", "1") if isinstance(dry_run, str) else dry_run
848
+ delete_git = (
849
+ delete_git in (True, "true", "True", "1") if isinstance(delete_git, str) else delete_git
850
+ )
851
+
813
852
  resolved_git_manager, resolved_project_id, error = _resolve_project_context(
814
853
  project_path, git_manager, project_id
815
854
  )
gobby/memory/context.py CHANGED
@@ -50,14 +50,14 @@ def build_memory_context(memories: list[Memory]) -> str:
50
50
 
51
51
  # 1. Project Context
52
52
  if context_memories:
53
- parts.append("## Project Context\n")
53
+ parts.append("## Project Context")
54
54
  for mem in context_memories:
55
- parts.append(f"{mem.content}\n")
55
+ parts.append(mem.content)
56
56
  parts.append("")
57
57
 
58
58
  # 2. Preferences
59
59
  if pref_memories:
60
- parts.append("## Preferences\n")
60
+ parts.append("## Preferences")
61
61
  for mem in pref_memories:
62
62
  content = _strip_leading_bullet(mem.content)
63
63
  if content: # Skip empty content
@@ -66,7 +66,7 @@ def build_memory_context(memories: list[Memory]) -> str:
66
66
 
67
67
  # 3. Patterns
68
68
  if pattern_memories:
69
- parts.append("## Patterns\n")
69
+ parts.append("## Patterns")
70
70
  for mem in pattern_memories:
71
71
  content = _strip_leading_bullet(mem.content)
72
72
  if content: # Skip empty content
@@ -75,7 +75,7 @@ def build_memory_context(memories: list[Memory]) -> str:
75
75
 
76
76
  # 4. Facts/Other
77
77
  if fact_memories:
78
- parts.append("## Facts\n")
78
+ parts.append("## Facts")
79
79
  for mem in fact_memories:
80
80
  content = _strip_leading_bullet(mem.content)
81
81
  if content: # Skip empty content
gobby/runner.py CHANGED
@@ -1,10 +1,12 @@
1
+ from __future__ import annotations
2
+
1
3
  import asyncio
2
4
  import logging
3
5
  import os
4
6
  import signal
5
7
  import sys
6
8
  from pathlib import Path
7
- from typing import Any
9
+ from typing import TYPE_CHECKING, Any
8
10
 
9
11
  import uvicorn
10
12
 
@@ -37,6 +39,12 @@ from gobby.worktrees.git import WorktreeGitManager
37
39
 
38
40
  os.environ["TOKENIZERS_PARALLELISM"] = "false"
39
41
 
42
+ # Type hints for pipeline components (imported lazily at runtime)
43
+ if TYPE_CHECKING:
44
+ from gobby.storage.pipelines import LocalPipelineExecutionManager
45
+ from gobby.workflows.loader import WorkflowLoader
46
+ from gobby.workflows.pipeline_executor import PipelineExecutor
47
+
40
48
  logger = logging.getLogger(__name__)
41
49
 
42
50
 
@@ -182,6 +190,36 @@ class GobbyRunner:
182
190
  except Exception as e:
183
191
  logger.debug(f"Could not initialize git manager: {e}")
184
192
 
193
+ # Initialize Pipeline Components
194
+ self.workflow_loader: WorkflowLoader | None = None
195
+ self.pipeline_execution_manager: LocalPipelineExecutionManager | None = None
196
+ self.pipeline_executor: PipelineExecutor | None = None
197
+ try:
198
+ from gobby.storage.pipelines import LocalPipelineExecutionManager
199
+ from gobby.workflows.loader import WorkflowLoader
200
+ from gobby.workflows.pipeline_executor import PipelineExecutor
201
+
202
+ self.workflow_loader = WorkflowLoader()
203
+ if self.project_id:
204
+ self.pipeline_execution_manager = LocalPipelineExecutionManager(
205
+ db=self.database,
206
+ project_id=self.project_id,
207
+ )
208
+ if self.llm_service:
209
+ self.pipeline_executor = PipelineExecutor(
210
+ db=self.database,
211
+ execution_manager=self.pipeline_execution_manager,
212
+ llm_service=self.llm_service,
213
+ loader=self.workflow_loader,
214
+ )
215
+ logger.debug("Pipeline executor initialized")
216
+ else:
217
+ logger.debug("Pipeline executor not initialized: LLM service not available")
218
+ else:
219
+ logger.debug("Pipeline execution manager not initialized: no project context")
220
+ except Exception as e:
221
+ logger.warning(f"Failed to initialize pipeline components: {e}")
222
+
185
223
  # Initialize Agent Runner (Phase 7 - Subagents)
186
224
  # Create executor registry for lazy executor creation
187
225
  self.executor_registry = ExecutorRegistry(config=self.config)
@@ -234,6 +272,9 @@ class GobbyRunner:
234
272
  clone_storage=self.clone_storage,
235
273
  git_manager=self.git_manager,
236
274
  project_id=self.project_id,
275
+ pipeline_executor=self.pipeline_executor,
276
+ workflow_loader=self.workflow_loader,
277
+ pipeline_execution_manager=self.pipeline_execution_manager,
237
278
  )
238
279
 
239
280
  self.http_server = HTTPServer(
@@ -252,7 +293,7 @@ class GobbyRunner:
252
293
  self.websocket_server: WebSocketServer | None = None
253
294
  if self.config.websocket and getattr(self.config.websocket, "enabled", True):
254
295
  websocket_config = WebSocketConfig(
255
- host="localhost",
296
+ host=self.config.bind_host,
256
297
  port=self.config.websocket.port,
257
298
  ping_interval=self.config.websocket.ping_interval,
258
299
  ping_timeout=self.config.websocket.ping_timeout,
@@ -260,6 +301,7 @@ class GobbyRunner:
260
301
  self.websocket_server = WebSocketServer(
261
302
  config=websocket_config,
262
303
  mcp_manager=self.mcp_proxy,
304
+ llm_service=self.llm_service,
263
305
  )
264
306
  # Pass WebSocket server reference to HTTP server for broadcasting
265
307
  self.http_server.websocket_server = self.websocket_server
@@ -273,6 +315,9 @@ class GobbyRunner:
273
315
  # Register agent event callback for WebSocket broadcasting
274
316
  self._setup_agent_event_broadcasting()
275
317
 
318
+ # Register pipeline event callback for WebSocket broadcasting
319
+ self._setup_pipeline_event_broadcasting()
320
+
276
321
  def _init_database(self) -> DatabaseProtocol:
277
322
  """Initialize hub database."""
278
323
  hub_db_path = Path(self.config.database_path).expanduser()
@@ -287,13 +332,23 @@ class GobbyRunner:
287
332
  return hub_db
288
333
 
289
334
  def _setup_agent_event_broadcasting(self) -> None:
290
- """Set up WebSocket broadcasting for agent lifecycle events."""
335
+ """Set up WebSocket broadcasting for agent lifecycle events and PTY reading."""
336
+ from gobby.agents.pty_reader import get_pty_reader_manager
291
337
  from gobby.agents.registry import get_running_agent_registry
292
338
 
293
339
  if not self.websocket_server:
294
340
  return
295
341
 
296
342
  registry = get_running_agent_registry()
343
+ pty_manager = get_pty_reader_manager()
344
+
345
+ # Set up PTY output callback to broadcast via WebSocket
346
+ async def broadcast_terminal_output(run_id: str, data: str) -> None:
347
+ """Broadcast terminal output via WebSocket."""
348
+ if self.websocket_server:
349
+ await self.websocket_server.broadcast_terminal_output(run_id, data)
350
+
351
+ pty_manager.set_output_callback(broadcast_terminal_output)
297
352
 
298
353
  def broadcast_agent_event(event_type: str, run_id: str, data: dict[str, Any]) -> None:
299
354
  """Broadcast agent events via WebSocket (non-blocking)."""
@@ -309,6 +364,32 @@ class GobbyRunner:
309
364
  except Exception as e:
310
365
  logger.warning(f"Failed to broadcast agent event {event_type}: {e}")
311
366
 
367
+ # Handle PTY reader start/stop for embedded agents
368
+ if event_type == "agent_started" and data.get("mode") == "embedded":
369
+ # Start PTY reader for embedded agents
370
+ agent = registry.get(run_id)
371
+ if agent and agent.master_fd is not None:
372
+
373
+ async def start_pty_reader() -> None:
374
+ await pty_manager.start_reader(agent)
375
+
376
+ task = asyncio.create_task(start_pty_reader())
377
+ task.add_done_callback(_log_broadcast_exception)
378
+
379
+ elif event_type in (
380
+ "agent_completed",
381
+ "agent_failed",
382
+ "agent_cancelled",
383
+ "agent_timeout",
384
+ ):
385
+ # Stop PTY reader when agent finishes
386
+
387
+ async def stop_pty_reader() -> None:
388
+ await pty_manager.stop_reader(run_id)
389
+
390
+ task = asyncio.create_task(stop_pty_reader())
391
+ task.add_done_callback(_log_broadcast_exception)
392
+
312
393
  # Create async task to broadcast and attach exception callback
313
394
  task = asyncio.create_task(
314
395
  self.websocket_server.broadcast_agent_event(
@@ -324,7 +405,29 @@ class GobbyRunner:
324
405
  task.add_done_callback(_log_broadcast_exception)
325
406
 
326
407
  registry.add_event_callback(broadcast_agent_event)
327
- logger.debug("Agent event broadcasting enabled")
408
+ logger.debug("Agent event broadcasting and PTY reading enabled")
409
+
410
+ def _setup_pipeline_event_broadcasting(self) -> None:
411
+ """Set up WebSocket broadcasting for pipeline execution events."""
412
+ if not self.websocket_server:
413
+ return
414
+
415
+ if not self.pipeline_executor:
416
+ logger.debug("Pipeline event broadcasting skipped: no pipeline executor")
417
+ return
418
+
419
+ async def broadcast_pipeline_event(event: str, execution_id: str, **kwargs: Any) -> None:
420
+ """Broadcast pipeline events via WebSocket."""
421
+ if self.websocket_server:
422
+ await self.websocket_server.broadcast_pipeline_event(
423
+ event=event,
424
+ execution_id=execution_id,
425
+ **kwargs,
426
+ )
427
+
428
+ # Set the callback on the pipeline executor
429
+ self.pipeline_executor.event_callback = broadcast_pipeline_event
430
+ logger.debug("Pipeline event broadcasting enabled")
328
431
 
329
432
  async def _metrics_cleanup_loop(self) -> None:
330
433
  """Background loop for periodic metrics cleanup (every 24 hours)."""
@@ -433,11 +536,10 @@ class GobbyRunner:
433
536
  websocket_task = asyncio.create_task(self.websocket_server.start())
434
537
 
435
538
  # Start HTTP server
436
- # nosec B104: 0.0.0.0 binding is intentional - daemon serves local network
437
539
  graceful_shutdown_timeout = 15
438
540
  config = uvicorn.Config(
439
541
  self.http_server.app,
440
- host="0.0.0.0", # nosec B104 - local daemon needs network access
542
+ host=self.config.bind_host,
441
543
  port=self.http_server.port,
442
544
  log_level="warning",
443
545
  access_log=False,