gobby 0.2.8__py3-none-any.whl → 0.2.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (168) hide show
  1. gobby/__init__.py +1 -1
  2. gobby/adapters/__init__.py +6 -0
  3. gobby/adapters/base.py +11 -2
  4. gobby/adapters/claude_code.py +5 -28
  5. gobby/adapters/codex_impl/adapter.py +38 -43
  6. gobby/adapters/copilot.py +324 -0
  7. gobby/adapters/cursor.py +373 -0
  8. gobby/adapters/gemini.py +2 -26
  9. gobby/adapters/windsurf.py +359 -0
  10. gobby/agents/definitions.py +162 -2
  11. gobby/agents/isolation.py +33 -1
  12. gobby/agents/pty_reader.py +192 -0
  13. gobby/agents/registry.py +10 -1
  14. gobby/agents/runner.py +24 -8
  15. gobby/agents/sandbox.py +8 -3
  16. gobby/agents/session.py +4 -0
  17. gobby/agents/spawn.py +9 -2
  18. gobby/agents/spawn_executor.py +49 -61
  19. gobby/agents/spawners/command_builder.py +4 -4
  20. gobby/app_context.py +64 -0
  21. gobby/cli/__init__.py +4 -0
  22. gobby/cli/install.py +259 -4
  23. gobby/cli/installers/__init__.py +12 -0
  24. gobby/cli/installers/copilot.py +242 -0
  25. gobby/cli/installers/cursor.py +244 -0
  26. gobby/cli/installers/shared.py +3 -0
  27. gobby/cli/installers/windsurf.py +242 -0
  28. gobby/cli/pipelines.py +639 -0
  29. gobby/cli/sessions.py +3 -1
  30. gobby/cli/skills.py +209 -0
  31. gobby/cli/tasks/crud.py +6 -5
  32. gobby/cli/tasks/search.py +1 -1
  33. gobby/cli/ui.py +116 -0
  34. gobby/cli/utils.py +5 -17
  35. gobby/cli/workflows.py +38 -17
  36. gobby/config/app.py +5 -0
  37. gobby/config/features.py +0 -20
  38. gobby/config/skills.py +23 -2
  39. gobby/config/tasks.py +4 -0
  40. gobby/hooks/broadcaster.py +9 -0
  41. gobby/hooks/event_handlers/__init__.py +155 -0
  42. gobby/hooks/event_handlers/_agent.py +175 -0
  43. gobby/hooks/event_handlers/_base.py +92 -0
  44. gobby/hooks/event_handlers/_misc.py +66 -0
  45. gobby/hooks/event_handlers/_session.py +487 -0
  46. gobby/hooks/event_handlers/_tool.py +196 -0
  47. gobby/hooks/events.py +48 -0
  48. gobby/hooks/hook_manager.py +27 -3
  49. gobby/install/copilot/hooks/hook_dispatcher.py +203 -0
  50. gobby/install/cursor/hooks/hook_dispatcher.py +203 -0
  51. gobby/install/gemini/hooks/hook_dispatcher.py +8 -0
  52. gobby/install/windsurf/hooks/hook_dispatcher.py +205 -0
  53. gobby/llm/__init__.py +14 -1
  54. gobby/llm/claude.py +594 -43
  55. gobby/llm/service.py +149 -0
  56. gobby/mcp_proxy/importer.py +4 -41
  57. gobby/mcp_proxy/instructions.py +9 -27
  58. gobby/mcp_proxy/manager.py +13 -3
  59. gobby/mcp_proxy/models.py +1 -0
  60. gobby/mcp_proxy/registries.py +66 -5
  61. gobby/mcp_proxy/server.py +6 -2
  62. gobby/mcp_proxy/services/recommendation.py +2 -28
  63. gobby/mcp_proxy/services/tool_filter.py +7 -0
  64. gobby/mcp_proxy/services/tool_proxy.py +19 -1
  65. gobby/mcp_proxy/stdio.py +37 -21
  66. gobby/mcp_proxy/tools/agents.py +7 -0
  67. gobby/mcp_proxy/tools/artifacts.py +3 -3
  68. gobby/mcp_proxy/tools/hub.py +30 -1
  69. gobby/mcp_proxy/tools/orchestration/cleanup.py +5 -5
  70. gobby/mcp_proxy/tools/orchestration/monitor.py +1 -1
  71. gobby/mcp_proxy/tools/orchestration/orchestrate.py +8 -3
  72. gobby/mcp_proxy/tools/orchestration/review.py +17 -4
  73. gobby/mcp_proxy/tools/orchestration/wait.py +7 -7
  74. gobby/mcp_proxy/tools/pipelines/__init__.py +254 -0
  75. gobby/mcp_proxy/tools/pipelines/_discovery.py +67 -0
  76. gobby/mcp_proxy/tools/pipelines/_execution.py +281 -0
  77. gobby/mcp_proxy/tools/sessions/_crud.py +4 -4
  78. gobby/mcp_proxy/tools/sessions/_handoff.py +1 -1
  79. gobby/mcp_proxy/tools/skills/__init__.py +184 -30
  80. gobby/mcp_proxy/tools/spawn_agent.py +229 -14
  81. gobby/mcp_proxy/tools/task_readiness.py +27 -4
  82. gobby/mcp_proxy/tools/tasks/_context.py +8 -0
  83. gobby/mcp_proxy/tools/tasks/_crud.py +27 -1
  84. gobby/mcp_proxy/tools/tasks/_helpers.py +1 -1
  85. gobby/mcp_proxy/tools/tasks/_lifecycle.py +125 -8
  86. gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +2 -1
  87. gobby/mcp_proxy/tools/tasks/_search.py +1 -1
  88. gobby/mcp_proxy/tools/workflows/__init__.py +273 -0
  89. gobby/mcp_proxy/tools/workflows/_artifacts.py +225 -0
  90. gobby/mcp_proxy/tools/workflows/_import.py +112 -0
  91. gobby/mcp_proxy/tools/workflows/_lifecycle.py +332 -0
  92. gobby/mcp_proxy/tools/workflows/_query.py +226 -0
  93. gobby/mcp_proxy/tools/workflows/_resolution.py +78 -0
  94. gobby/mcp_proxy/tools/workflows/_terminal.py +175 -0
  95. gobby/mcp_proxy/tools/worktrees.py +54 -15
  96. gobby/memory/components/__init__.py +0 -0
  97. gobby/memory/components/ingestion.py +98 -0
  98. gobby/memory/components/search.py +108 -0
  99. gobby/memory/context.py +5 -5
  100. gobby/memory/manager.py +16 -25
  101. gobby/paths.py +51 -0
  102. gobby/prompts/loader.py +1 -35
  103. gobby/runner.py +131 -16
  104. gobby/servers/http.py +193 -150
  105. gobby/servers/routes/__init__.py +2 -0
  106. gobby/servers/routes/admin.py +56 -0
  107. gobby/servers/routes/mcp/endpoints/execution.py +33 -32
  108. gobby/servers/routes/mcp/endpoints/registry.py +8 -8
  109. gobby/servers/routes/mcp/hooks.py +10 -1
  110. gobby/servers/routes/pipelines.py +227 -0
  111. gobby/servers/websocket.py +314 -1
  112. gobby/sessions/analyzer.py +89 -3
  113. gobby/sessions/manager.py +5 -5
  114. gobby/sessions/transcripts/__init__.py +3 -0
  115. gobby/sessions/transcripts/claude.py +5 -0
  116. gobby/sessions/transcripts/codex.py +5 -0
  117. gobby/sessions/transcripts/gemini.py +5 -0
  118. gobby/skills/hubs/__init__.py +25 -0
  119. gobby/skills/hubs/base.py +234 -0
  120. gobby/skills/hubs/claude_plugins.py +328 -0
  121. gobby/skills/hubs/clawdhub.py +289 -0
  122. gobby/skills/hubs/github_collection.py +465 -0
  123. gobby/skills/hubs/manager.py +263 -0
  124. gobby/skills/hubs/skillhub.py +342 -0
  125. gobby/skills/parser.py +23 -0
  126. gobby/skills/sync.py +5 -4
  127. gobby/storage/artifacts.py +19 -0
  128. gobby/storage/memories.py +4 -4
  129. gobby/storage/migrations.py +118 -3
  130. gobby/storage/pipelines.py +367 -0
  131. gobby/storage/sessions.py +23 -4
  132. gobby/storage/skills.py +48 -8
  133. gobby/storage/tasks/_aggregates.py +2 -2
  134. gobby/storage/tasks/_lifecycle.py +4 -4
  135. gobby/storage/tasks/_models.py +7 -1
  136. gobby/storage/tasks/_queries.py +3 -3
  137. gobby/sync/memories.py +4 -3
  138. gobby/tasks/commits.py +48 -17
  139. gobby/tasks/external_validator.py +4 -17
  140. gobby/tasks/validation.py +13 -87
  141. gobby/tools/summarizer.py +18 -51
  142. gobby/utils/status.py +13 -0
  143. gobby/workflows/actions.py +80 -0
  144. gobby/workflows/context_actions.py +265 -27
  145. gobby/workflows/definitions.py +119 -1
  146. gobby/workflows/detection_helpers.py +23 -11
  147. gobby/workflows/enforcement/__init__.py +11 -1
  148. gobby/workflows/enforcement/blocking.py +96 -0
  149. gobby/workflows/enforcement/handlers.py +35 -1
  150. gobby/workflows/enforcement/task_policy.py +18 -0
  151. gobby/workflows/engine.py +26 -4
  152. gobby/workflows/evaluator.py +8 -5
  153. gobby/workflows/lifecycle_evaluator.py +59 -27
  154. gobby/workflows/loader.py +567 -30
  155. gobby/workflows/lobster_compat.py +147 -0
  156. gobby/workflows/pipeline_executor.py +801 -0
  157. gobby/workflows/pipeline_state.py +172 -0
  158. gobby/workflows/pipeline_webhooks.py +206 -0
  159. gobby/workflows/premature_stop.py +5 -0
  160. gobby/worktrees/git.py +135 -20
  161. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/METADATA +56 -22
  162. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/RECORD +166 -122
  163. gobby/hooks/event_handlers.py +0 -1008
  164. gobby/mcp_proxy/tools/workflows.py +0 -1023
  165. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/WHEEL +0 -0
  166. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/entry_points.txt +0 -0
  167. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/licenses/LICENSE.md +0 -0
  168. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/top_level.txt +0 -0
@@ -1,1023 +0,0 @@
1
- """
2
- Internal MCP tools for Gobby Workflow System.
3
-
4
- Exposes functionality for:
5
- - get_workflow: Get details about a specific workflow definition
6
- - list_workflows: Discover available workflow definitions
7
- - activate_workflow: Start a step-based workflow (supports initial variables)
8
- - end_workflow: Complete/terminate active workflow
9
- - get_workflow_status: Get current workflow state
10
- - request_step_transition: Request transition to a different step
11
- - mark_artifact_complete: Register an artifact as complete
12
- - set_variable: Set a workflow variable for the session
13
- - get_variable: Get workflow variable(s) for the session
14
- - get_workflow_status: Get current workflow state
15
- - request_step_transition: Request transition to a different step
16
- - mark_artifact_complete: Register an artifact as complete
17
- - set_variable: Set a workflow variable for the session
18
- - get_variable: Get workflow variable(s) for the session
19
- - import_workflow: Import a workflow from a file path
20
- - reload_cache: Clear the workflow loader cache to pick up file changes
21
-
22
- These tools are registered with the InternalToolRegistry and accessed
23
- via the downstream proxy pattern (call_tool, list_tools, get_tool_schema).
24
- """
25
-
26
- import logging
27
- from datetime import UTC, datetime
28
- from pathlib import Path
29
- from typing import Any
30
-
31
- from gobby.mcp_proxy.tools.internal import InternalToolRegistry
32
- from gobby.storage.database import LocalDatabase
33
- from gobby.storage.sessions import LocalSessionManager
34
- from gobby.storage.tasks._id import resolve_task_reference
35
- from gobby.storage.tasks._models import TaskNotFoundError
36
- from gobby.utils.project_context import get_workflow_project_path
37
- from gobby.workflows.definitions import WorkflowState
38
- from gobby.workflows.loader import WorkflowLoader
39
- from gobby.workflows.state_manager import WorkflowStateManager
40
-
41
- logger = logging.getLogger(__name__)
42
-
43
-
44
- def _resolve_session_task_value(
45
- value: str,
46
- session_id: str | None,
47
- session_manager: LocalSessionManager,
48
- db: LocalDatabase,
49
- ) -> str:
50
- """
51
- Resolve a session_task value from seq_num reference (#N or N) to UUID.
52
-
53
- This prevents repeated resolution failures in condition evaluation when
54
- task_tree_complete() is called with a seq_num that requires project_id.
55
-
56
- Args:
57
- value: The value to potentially resolve (e.g., "#4424", "47", or a UUID)
58
- session_id: Session ID to look up project_id
59
- session_manager: Session manager for lookups
60
- db: Database for task resolution
61
-
62
- Returns:
63
- Resolved UUID if value was a seq_num reference, otherwise original value
64
- """
65
- # Only process string values that look like seq_num references
66
- if not isinstance(value, str):
67
- return value
68
-
69
- # Check if it's a seq_num reference (#N or plain N)
70
- is_seq_ref = value.startswith("#") or value.isdigit()
71
- if not is_seq_ref:
72
- return value
73
-
74
- # Need session to get project_id
75
- if not session_id:
76
- logger.warning(f"Cannot resolve task reference '{value}': no session_id provided")
77
- return value
78
-
79
- # Get project_id from session
80
- session = session_manager.get(session_id)
81
- if not session or not session.project_id:
82
- logger.warning(f"Cannot resolve task reference '{value}': session has no project_id")
83
- return value
84
-
85
- # Resolve the reference
86
- try:
87
- resolved = resolve_task_reference(db, value, session.project_id)
88
- logger.debug(f"Resolved session_task '{value}' to UUID '{resolved}'")
89
- return resolved
90
- except TaskNotFoundError as e:
91
- logger.warning(f"Could not resolve task reference '{value}': {e}")
92
- return value
93
- except Exception as e:
94
- logger.warning(f"Unexpected error resolving task reference '{value}': {e}")
95
- return value
96
-
97
-
98
- def create_workflows_registry(
99
- loader: WorkflowLoader | None = None,
100
- state_manager: WorkflowStateManager | None = None,
101
- session_manager: LocalSessionManager | None = None,
102
- db: LocalDatabase | None = None,
103
- ) -> InternalToolRegistry:
104
- """
105
- Create a workflow tool registry with all workflow-related tools.
106
-
107
- Args:
108
- loader: WorkflowLoader instance
109
- state_manager: WorkflowStateManager instance
110
- session_manager: LocalSessionManager instance
111
- db: LocalDatabase instance
112
-
113
- Returns:
114
- InternalToolRegistry with workflow tools registered
115
- """
116
- from gobby.utils.project_context import get_project_context
117
-
118
- # Create defaults if not provided
119
- _db = db or LocalDatabase()
120
- _loader = loader or WorkflowLoader()
121
- _state_manager = state_manager or WorkflowStateManager(_db)
122
- _session_manager = session_manager or LocalSessionManager(_db)
123
-
124
- def _resolve_session_id(ref: str) -> str:
125
- """Resolve session reference (#N, N, UUID, or prefix) to UUID."""
126
- project_ctx = get_project_context()
127
- project_id = project_ctx.get("id") if project_ctx else None
128
- return _session_manager.resolve_session_reference(ref, project_id)
129
-
130
- registry = InternalToolRegistry(
131
- name="gobby-workflows",
132
- description="Workflow management - list, activate, status, transition, end",
133
- )
134
-
135
- @registry.tool(
136
- name="get_workflow",
137
- description="Get details about a specific workflow definition.",
138
- )
139
- def get_workflow(
140
- name: str,
141
- project_path: str | None = None,
142
- ) -> dict[str, Any]:
143
- """
144
- Get workflow details including steps, triggers, and settings.
145
-
146
- Args:
147
- name: Workflow name (without .yaml extension)
148
- project_path: Project directory path. Auto-discovered from cwd if not provided.
149
-
150
- Returns:
151
- Workflow definition details
152
- """
153
- # Auto-discover project path if not provided
154
- if not project_path:
155
- discovered = get_workflow_project_path()
156
- if discovered:
157
- project_path = str(discovered)
158
-
159
- proj = Path(project_path) if project_path else None
160
- definition = _loader.load_workflow(name, proj)
161
-
162
- if not definition:
163
- return {"success": False, "error": f"Workflow '{name}' not found"}
164
-
165
- return {
166
- "success": True,
167
- "name": definition.name,
168
- "type": definition.type,
169
- "description": definition.description,
170
- "version": definition.version,
171
- "steps": (
172
- [
173
- {
174
- "name": s.name,
175
- "description": s.description,
176
- "allowed_tools": s.allowed_tools,
177
- "blocked_tools": s.blocked_tools,
178
- }
179
- for s in definition.steps
180
- ]
181
- if definition.steps
182
- else []
183
- ),
184
- "triggers": (
185
- {name: len(actions) for name, actions in definition.triggers.items()}
186
- if definition.triggers
187
- else {}
188
- ),
189
- "settings": definition.settings,
190
- }
191
-
192
- @registry.tool(
193
- name="list_workflows",
194
- description="List available workflow definitions from project and global directories.",
195
- )
196
- def list_workflows(
197
- project_path: str | None = None,
198
- workflow_type: str | None = None,
199
- global_only: bool = False,
200
- ) -> dict[str, Any]:
201
- """
202
- List available workflows.
203
-
204
- Lists workflows from both project (.gobby/workflows) and global (~/.gobby/workflows)
205
- directories. Project workflows shadow global ones with the same name.
206
-
207
- Args:
208
- project_path: Project directory path. Auto-discovered from cwd if not provided.
209
- workflow_type: Filter by type ("step" or "lifecycle")
210
- global_only: If True, only show global workflows (ignore project)
211
-
212
- Returns:
213
- List of workflows with name, type, description, and source
214
- """
215
- import yaml
216
-
217
- # Auto-discover project path if not provided
218
- if not project_path:
219
- discovered = get_workflow_project_path()
220
- if discovered:
221
- project_path = str(discovered)
222
-
223
- search_dirs = list(_loader.global_dirs)
224
- proj = Path(project_path) if project_path else None
225
-
226
- # Include project workflows unless global_only (project searched first to shadow global)
227
- if not global_only and proj:
228
- project_dir = proj / ".gobby" / "workflows"
229
- if project_dir.exists():
230
- search_dirs.insert(0, project_dir)
231
-
232
- workflows = []
233
- seen_names = set()
234
-
235
- for search_dir in search_dirs:
236
- if not search_dir.exists():
237
- continue
238
-
239
- is_project = proj and search_dir == (proj / ".gobby" / "workflows")
240
-
241
- for yaml_path in search_dir.glob("*.yaml"):
242
- name = yaml_path.stem
243
- if name in seen_names:
244
- continue
245
-
246
- try:
247
- with open(yaml_path) as f:
248
- data = yaml.safe_load(f)
249
-
250
- if not data:
251
- continue
252
-
253
- wf_type = data.get("type", "step")
254
-
255
- if workflow_type and wf_type != workflow_type:
256
- continue
257
-
258
- workflows.append(
259
- {
260
- "name": name,
261
- "type": wf_type,
262
- "description": data.get("description", ""),
263
- "source": "project" if is_project else "global",
264
- }
265
- )
266
- seen_names.add(name)
267
-
268
- except Exception:
269
- pass # nosec B110 - skip invalid workflow files
270
-
271
- return {"workflows": workflows, "count": len(workflows)}
272
-
273
- @registry.tool(
274
- name="activate_workflow",
275
- description="Activate a step-based workflow for the current session. Accepts #N, N, UUID, or prefix for session_id.",
276
- )
277
- def activate_workflow(
278
- name: str,
279
- session_id: str | None = None,
280
- initial_step: str | None = None,
281
- variables: dict[str, Any] | None = None,
282
- project_path: str | None = None,
283
- ) -> dict[str, Any]:
284
- """
285
- Activate a step-based workflow for the current session.
286
-
287
- Args:
288
- name: Workflow name (e.g., "plan-act-reflect", "auto-task")
289
- session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
290
- initial_step: Optional starting step (defaults to first step)
291
- variables: Optional initial variables to set (merged with workflow defaults)
292
- project_path: Project directory path. Auto-discovered from cwd if not provided.
293
-
294
- Returns:
295
- Success status, workflow info, and current step.
296
-
297
- Example:
298
- activate_workflow(
299
- name="auto-task",
300
- variables={"session_task": "#47"},
301
- session_id="#5"
302
- )
303
-
304
- Errors if:
305
- - session_id not provided
306
- - Another step-based workflow is currently active
307
- - Workflow not found
308
- - Workflow is lifecycle type (those auto-run, not manually activated)
309
- """
310
- # Auto-discover project path if not provided
311
- if not project_path:
312
- discovered = get_workflow_project_path()
313
- if discovered:
314
- project_path = str(discovered)
315
-
316
- proj = Path(project_path) if project_path else None
317
-
318
- # Load workflow
319
- definition = _loader.load_workflow(name, proj)
320
- if not definition:
321
- return {"success": False, "error": f"Workflow '{name}' not found"}
322
-
323
- if definition.type == "lifecycle":
324
- return {
325
- "success": False,
326
- "error": f"Workflow '{name}' is lifecycle type (auto-runs on events, not manually activated)",
327
- }
328
-
329
- # Require explicit session_id to prevent cross-session bleed
330
- if not session_id:
331
- return {
332
- "success": False,
333
- "error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
334
- }
335
-
336
- # Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
337
- try:
338
- resolved_session_id = _resolve_session_id(session_id)
339
- except ValueError as e:
340
- return {"success": False, "error": str(e)}
341
-
342
- # Check for existing workflow
343
- # Allow if:
344
- # - No existing state
345
- # - Existing is __lifecycle__ placeholder
346
- # - Existing is a lifecycle-type workflow (they run concurrently with step workflows)
347
- existing = _state_manager.get_state(resolved_session_id)
348
- if existing and existing.workflow_name != "__lifecycle__":
349
- # Check if existing workflow is a lifecycle type
350
- existing_def = _loader.load_workflow(existing.workflow_name, proj)
351
- # Only allow if we can confirm it's a lifecycle workflow
352
- # If definition not found or it's a step workflow, block activation
353
- if not existing_def or existing_def.type != "lifecycle":
354
- # It's a step workflow (or unknown) - can only have one active
355
- return {
356
- "success": False,
357
- "error": f"Session already has step workflow '{existing.workflow_name}' active. Use end_workflow first.",
358
- }
359
- # Existing is a lifecycle workflow - allow step workflow to activate alongside it
360
-
361
- # Determine initial step
362
- if initial_step:
363
- if not any(s.name == initial_step for s in definition.steps):
364
- return {
365
- "success": False,
366
- "error": f"Step '{initial_step}' not found. Available: {[s.name for s in definition.steps]}",
367
- }
368
- step = initial_step
369
- else:
370
- step = definition.steps[0].name if definition.steps else "default"
371
-
372
- # Merge workflow default variables with passed-in variables
373
- merged_variables = dict(definition.variables) # Start with workflow defaults
374
- if variables:
375
- merged_variables.update(variables) # Override with passed-in values
376
-
377
- # Resolve session_task references (#N or N) to UUIDs upfront
378
- # This prevents repeated resolution failures in condition evaluation
379
- if "session_task" in merged_variables:
380
- session_task_val = merged_variables["session_task"]
381
- if isinstance(session_task_val, str):
382
- merged_variables["session_task"] = _resolve_session_task_value(
383
- session_task_val, resolved_session_id, _session_manager, _db
384
- )
385
-
386
- # Create state
387
- state = WorkflowState(
388
- session_id=resolved_session_id,
389
- workflow_name=name,
390
- step=step,
391
- step_entered_at=datetime.now(UTC),
392
- step_action_count=0,
393
- total_action_count=0,
394
- artifacts={},
395
- observations=[],
396
- reflection_pending=False,
397
- context_injected=False,
398
- variables=merged_variables,
399
- task_list=None,
400
- current_task_index=0,
401
- files_modified_this_task=0,
402
- )
403
-
404
- _state_manager.save_state(state)
405
-
406
- return {
407
- "success": True,
408
- "session_id": resolved_session_id,
409
- "workflow": name,
410
- "step": step,
411
- "steps": [s.name for s in definition.steps],
412
- "variables": merged_variables,
413
- }
414
-
415
- @registry.tool(
416
- name="end_workflow",
417
- description="End the currently active step-based workflow. Accepts #N, N, UUID, or prefix for session_id.",
418
- )
419
- def end_workflow(
420
- session_id: str | None = None,
421
- reason: str | None = None,
422
- ) -> dict[str, Any]:
423
- """
424
- End the currently active step-based workflow.
425
-
426
- Allows starting a different workflow afterward.
427
- Does not affect lifecycle workflows (they continue running).
428
-
429
- Args:
430
- session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
431
- reason: Optional reason for ending
432
-
433
- Returns:
434
- Success status
435
- """
436
- # Require explicit session_id to prevent cross-session bleed
437
- if not session_id:
438
- return {
439
- "success": False,
440
- "error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
441
- }
442
-
443
- # Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
444
- try:
445
- resolved_session_id = _resolve_session_id(session_id)
446
- except ValueError as e:
447
- return {"success": False, "error": str(e)}
448
-
449
- state = _state_manager.get_state(resolved_session_id)
450
- if not state:
451
- return {"error": "No workflow active for session"}
452
-
453
- _state_manager.delete_state(resolved_session_id)
454
-
455
- return {}
456
-
457
- @registry.tool(
458
- name="get_workflow_status",
459
- description="Get current workflow step and state. Accepts #N, N, UUID, or prefix for session_id.",
460
- )
461
- def get_workflow_status(session_id: str | None = None) -> dict[str, Any]:
462
- """
463
- Get current workflow step and state.
464
-
465
- Args:
466
- session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
467
-
468
- Returns:
469
- Workflow state including step, action counts, artifacts
470
- """
471
- # Require explicit session_id to prevent cross-session bleed
472
- if not session_id:
473
- return {
474
- "has_workflow": False,
475
- "error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
476
- }
477
-
478
- # Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
479
- try:
480
- resolved_session_id = _resolve_session_id(session_id)
481
- except ValueError as e:
482
- return {"has_workflow": False, "error": str(e)}
483
-
484
- state = _state_manager.get_state(resolved_session_id)
485
- if not state:
486
- return {"has_workflow": False, "session_id": resolved_session_id}
487
-
488
- return {
489
- "has_workflow": True,
490
- "session_id": resolved_session_id,
491
- "workflow_name": state.workflow_name,
492
- "step": state.step,
493
- "step_action_count": state.step_action_count,
494
- "total_action_count": state.total_action_count,
495
- "reflection_pending": state.reflection_pending,
496
- "artifacts": list(state.artifacts.keys()) if state.artifacts else [],
497
- "variables": state.variables,
498
- "task_progress": (
499
- f"{state.current_task_index + 1}/{len(state.task_list)}"
500
- if state.task_list
501
- else None
502
- ),
503
- "updated_at": state.updated_at.isoformat() if state.updated_at else None,
504
- }
505
-
506
- @registry.tool(
507
- name="request_step_transition",
508
- description="Request transition to a different step. Accepts #N, N, UUID, or prefix for session_id.",
509
- )
510
- def request_step_transition(
511
- to_step: str,
512
- reason: str | None = None,
513
- session_id: str | None = None,
514
- force: bool = False,
515
- project_path: str | None = None,
516
- ) -> dict[str, Any]:
517
- """
518
- Request transition to a different step. May require approval.
519
-
520
- Args:
521
- to_step: Target step name
522
- reason: Reason for transition
523
- session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
524
- force: Skip exit condition checks
525
- project_path: Project directory path. Auto-discovered from cwd if not provided.
526
-
527
- Returns:
528
- Success status and new step info
529
- """
530
- # Auto-discover project path if not provided
531
- if not project_path:
532
- discovered = get_workflow_project_path()
533
- if discovered:
534
- project_path = str(discovered)
535
-
536
- proj = Path(project_path) if project_path else None
537
-
538
- # Require explicit session_id to prevent cross-session bleed
539
- if not session_id:
540
- return {
541
- "success": False,
542
- "error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
543
- }
544
-
545
- # Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
546
- try:
547
- resolved_session_id = _resolve_session_id(session_id)
548
- except ValueError as e:
549
- return {"success": False, "error": str(e)}
550
-
551
- state = _state_manager.get_state(resolved_session_id)
552
- if not state:
553
- return {"success": False, "error": "No workflow active for session"}
554
-
555
- # Load workflow to validate step
556
- definition = _loader.load_workflow(state.workflow_name, proj)
557
- if not definition:
558
- return {"success": False, "error": f"Workflow '{state.workflow_name}' not found"}
559
-
560
- if not any(s.name == to_step for s in definition.steps):
561
- return {
562
- "success": False,
563
- "error": f"Step '{to_step}' not found. Available: {[s.name for s in definition.steps]}",
564
- }
565
-
566
- # Block manual transitions to steps that have conditional auto-transitions
567
- # These steps should only be reached when their conditions are met
568
- current_step_def = next((s for s in definition.steps if s.name == state.step), None)
569
- if current_step_def and current_step_def.transitions:
570
- for transition in current_step_def.transitions:
571
- if transition.to == to_step and transition.when:
572
- # This step has a conditional transition - block manual transition
573
- return {
574
- "success": False,
575
- "error": (
576
- f"Step '{to_step}' has a conditional auto-transition "
577
- f"(when: {transition.when}). Manual transitions to this step "
578
- f"are blocked to prevent workflow circumvention. "
579
- f"The transition will occur automatically when the condition is met."
580
- ),
581
- }
582
-
583
- old_step = state.step
584
- state.step = to_step
585
- state.step_entered_at = datetime.now(UTC)
586
- state.step_action_count = 0
587
-
588
- _state_manager.save_state(state)
589
-
590
- return {
591
- "success": True,
592
- "from_step": old_step,
593
- "to_step": to_step,
594
- "reason": reason,
595
- "forced": force,
596
- }
597
-
598
- @registry.tool(
599
- name="mark_artifact_complete",
600
- description="Register an artifact as complete (plan, spec, etc.). Accepts #N, N, UUID, or prefix for session_id.",
601
- )
602
- def mark_artifact_complete(
603
- artifact_type: str,
604
- file_path: str,
605
- session_id: str | None = None,
606
- ) -> dict[str, Any]:
607
- """
608
- Register an artifact as complete.
609
-
610
- Args:
611
- artifact_type: Type of artifact (e.g., "plan", "spec", "test")
612
- file_path: Path to the artifact file
613
- session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
614
-
615
- Returns:
616
- Success status
617
- """
618
- # Require explicit session_id to prevent cross-session bleed
619
- if not session_id:
620
- return {
621
- "success": False,
622
- "error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
623
- }
624
-
625
- # Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
626
- try:
627
- resolved_session_id = _resolve_session_id(session_id)
628
- except ValueError as e:
629
- return {"success": False, "error": str(e)}
630
-
631
- state = _state_manager.get_state(resolved_session_id)
632
- if not state:
633
- return {"error": "No workflow active for session"}
634
-
635
- # Update artifacts
636
- state.artifacts[artifact_type] = file_path
637
- _state_manager.save_state(state)
638
-
639
- return {}
640
-
641
- @registry.tool(
642
- name="set_variable",
643
- description="Set a workflow variable for the current session (session-scoped, not persisted to YAML). Accepts #N, N, UUID, or prefix for session_id.",
644
- )
645
- def set_variable(
646
- name: str,
647
- value: str | int | float | bool | None,
648
- session_id: str | None = None,
649
- ) -> dict[str, Any]:
650
- """
651
- Set a workflow variable for the current session.
652
-
653
- Variables set this way are session-scoped - they persist in the database
654
- for the duration of the session but do not modify the workflow YAML file.
655
-
656
- This is useful for:
657
- - Setting session_epic to enforce epic completion before stopping
658
- - Setting is_worktree to mark a session as a worktree agent
659
- - Dynamic configuration without modifying workflow definitions
660
-
661
- Args:
662
- name: Variable name (e.g., "session_epic", "is_worktree")
663
- value: Variable value (string, number, boolean, or null)
664
- session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
665
-
666
- Returns:
667
- Success status and updated variables
668
- """
669
- # Require explicit session_id to prevent cross-session bleed
670
- if not session_id:
671
- return {
672
- "success": False,
673
- "error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
674
- }
675
-
676
- # Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
677
- try:
678
- resolved_session_id = _resolve_session_id(session_id)
679
- except ValueError as e:
680
- return {"success": False, "error": str(e)}
681
-
682
- # Get or create state
683
- state = _state_manager.get_state(resolved_session_id)
684
- if not state:
685
- # Create a minimal lifecycle state for variable storage
686
- state = WorkflowState(
687
- session_id=resolved_session_id,
688
- workflow_name="__lifecycle__",
689
- step="",
690
- step_entered_at=datetime.now(UTC),
691
- variables={},
692
- )
693
-
694
- # Block modification of session_task when a real workflow is active
695
- # This prevents circumventing workflows by changing the tracked task
696
- if name == "session_task" and state.workflow_name != "__lifecycle__":
697
- current_value = state.variables.get("session_task")
698
- if current_value is not None and value != current_value:
699
- return {
700
- "success": False,
701
- "error": (
702
- f"Cannot modify session_task while workflow '{state.workflow_name}' is active. "
703
- f"Current value: {current_value}. "
704
- f"Use end_workflow() first if you need to change the tracked task."
705
- ),
706
- }
707
-
708
- # Resolve session_task references (#N or N) to UUIDs upfront
709
- # This prevents repeated resolution failures in condition evaluation
710
- if name == "session_task" and isinstance(value, str):
711
- value = _resolve_session_task_value(value, resolved_session_id, _session_manager, _db)
712
-
713
- # Set the variable
714
- state.variables[name] = value
715
- _state_manager.save_state(state)
716
-
717
- # Add deprecation warning for session_task variable (when no workflow active)
718
- if name == "session_task" and value and state.workflow_name == "__lifecycle__":
719
- return {
720
- "warning": (
721
- "DEPRECATED: Setting session_task directly is deprecated. "
722
- "Use activate_workflow(name='auto-task', variables={'session_task': ...}) instead "
723
- "for proper state machine semantics and on_premature_stop handling."
724
- ),
725
- }
726
-
727
- return {}
728
-
729
- @registry.tool(
730
- name="get_variable",
731
- description="Get workflow variable(s) for the current session. Accepts #N, N, UUID, or prefix for session_id.",
732
- )
733
- def get_variable(
734
- name: str | None = None,
735
- session_id: str | None = None,
736
- ) -> dict[str, Any]:
737
- """
738
- Get workflow variable(s) for the current session.
739
-
740
- Args:
741
- name: Variable name to get (if None, returns all variables)
742
- session_id: Session reference (accepts #N, N, UUID, or prefix) - required to prevent cross-session bleed
743
-
744
- Returns:
745
- Variable value(s) and session info
746
- """
747
- # Require explicit session_id to prevent cross-session bleed
748
- if not session_id:
749
- return {
750
- "success": False,
751
- "error": "session_id is required. Pass the session ID explicitly to prevent cross-session variable bleed.",
752
- }
753
-
754
- # Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
755
- try:
756
- resolved_session_id = _resolve_session_id(session_id)
757
- except ValueError as e:
758
- return {"success": False, "error": str(e)}
759
-
760
- state = _state_manager.get_state(resolved_session_id)
761
- if not state:
762
- if name:
763
- return {
764
- "success": True,
765
- "session_id": resolved_session_id,
766
- "variable": name,
767
- "value": None,
768
- "exists": False,
769
- }
770
- return {
771
- "success": True,
772
- "session_id": resolved_session_id,
773
- "variables": {},
774
- }
775
-
776
- if name:
777
- value = state.variables.get(name)
778
- return {
779
- "success": True,
780
- "session_id": resolved_session_id,
781
- "variable": name,
782
- "value": value,
783
- "exists": name in state.variables,
784
- }
785
-
786
- return {
787
- "success": True,
788
- "session_id": resolved_session_id,
789
- "variables": state.variables,
790
- }
791
-
792
- @registry.tool(
793
- name="import_workflow",
794
- description="Import a workflow from a file path into the project or global directory.",
795
- )
796
- def import_workflow(
797
- source_path: str,
798
- workflow_name: str | None = None,
799
- is_global: bool = False,
800
- project_path: str | None = None,
801
- ) -> dict[str, Any]:
802
- """
803
- Import a workflow from a file.
804
-
805
- Args:
806
- source_path: Path to the workflow YAML file
807
- workflow_name: Override the workflow name (defaults to name in file)
808
- is_global: Install to global ~/.gobby/workflows instead of project
809
- project_path: Project directory path. Auto-discovered from cwd if not provided.
810
-
811
- Returns:
812
- Success status and destination path
813
- """
814
- import shutil
815
-
816
- import yaml
817
-
818
- source = Path(source_path)
819
- if not source.exists():
820
- return {"success": False, "error": f"File not found: {source_path}"}
821
-
822
- if source.suffix != ".yaml":
823
- return {"success": False, "error": "Workflow file must have .yaml extension"}
824
-
825
- try:
826
- with open(source) as f:
827
- data = yaml.safe_load(f)
828
-
829
- if not data or "name" not in data:
830
- return {"success": False, "error": "Invalid workflow: missing 'name' field"}
831
-
832
- except yaml.YAMLError as e:
833
- return {"success": False, "error": f"Invalid YAML: {e}"}
834
-
835
- name = workflow_name or data.get("name", source.stem)
836
- filename = f"{name}.yaml"
837
-
838
- if is_global:
839
- dest_dir = Path.home() / ".gobby" / "workflows"
840
- else:
841
- # Auto-discover project path if not provided
842
- if not project_path:
843
- discovered = get_workflow_project_path()
844
- if discovered:
845
- project_path = str(discovered)
846
-
847
- proj = Path(project_path) if project_path else None
848
- if not proj:
849
- return {
850
- "success": False,
851
- "error": "project_path required when not using is_global (could not auto-discover)",
852
- }
853
- dest_dir = proj / ".gobby" / "workflows"
854
-
855
- dest_dir.mkdir(parents=True, exist_ok=True)
856
- dest_path = dest_dir / filename
857
-
858
- shutil.copy(source, dest_path)
859
-
860
- # Clear loader cache so new workflow is discoverable
861
- _loader.clear_cache()
862
-
863
- return {
864
- "success": True,
865
- "workflow_name": name,
866
- "destination": str(dest_path),
867
- "is_global": is_global,
868
- }
869
-
870
- @registry.tool(
871
- name="reload_cache",
872
- description="Clear the workflow cache. Use this after modifying workflow YAML files.",
873
- )
874
- def reload_cache() -> dict[str, Any]:
875
- """
876
- Clear the workflow loader cache.
877
-
878
- This forces the daemon to re-read workflow YAML files from disk
879
- on the next access. Use this when you've modified workflow files
880
- and want the changes to take effect immediately without restarting
881
- the daemon.
882
-
883
- Returns:
884
- Success status
885
- """
886
- _loader.clear_cache()
887
- logger.info("Workflow cache cleared via reload_cache tool")
888
- return {"message": "Workflow cache cleared"}
889
-
890
- @registry.tool(
891
- name="close_terminal",
892
- description=(
893
- "Close the current terminal window/pane (agent self-termination). "
894
- "Launches ~/.gobby/scripts/agent_shutdown.sh which handles "
895
- "terminal-specific shutdown (tmux, iTerm, etc.). Rebuilds script if missing."
896
- ),
897
- )
898
- async def close_terminal(
899
- signal: str = "TERM",
900
- delay_ms: int = 0,
901
- ) -> dict[str, Any]:
902
- """
903
- Close the current terminal by running the agent shutdown script.
904
-
905
- This is for agent self-termination (meeseeks-style). The agent calls
906
- this to close its own terminal window when done with its workflow.
907
-
908
- The script is located at ~/.gobby/scripts/agent_shutdown.sh and is
909
- automatically rebuilt if missing. It handles different terminal types
910
- (tmux, iTerm, Terminal.app, Ghostty, Kitty, WezTerm, etc.).
911
-
912
- Args:
913
- signal: Signal to use for shutdown (TERM, KILL, INT). Default: TERM.
914
- delay_ms: Optional delay in milliseconds before shutdown. Default: 0.
915
-
916
- Returns:
917
- Dict with success status and message.
918
- """
919
- import asyncio
920
- import os
921
- import stat
922
- import subprocess # nosec B404 - subprocess needed for agent shutdown script
923
-
924
- # Script location
925
- gobby_dir = Path.home() / ".gobby"
926
- scripts_dir = gobby_dir / "scripts"
927
- script_path = scripts_dir / "agent_shutdown.sh"
928
-
929
- # Source script from the install directory (single source of truth)
930
- source_script_path = (
931
- Path(__file__).parent.parent.parent
932
- / "install"
933
- / "shared"
934
- / "scripts"
935
- / "agent_shutdown.sh"
936
- )
937
-
938
- def get_script_version(script_content: str) -> str | None:
939
- """Extract VERSION marker from script content."""
940
- import re
941
-
942
- match = re.search(r"^# VERSION:\s*(.+)$", script_content, re.MULTILINE)
943
- return match.group(1).strip() if match else None
944
-
945
- # Ensure directories exist and script is present/up-to-date
946
- script_rebuilt = False
947
- try:
948
- scripts_dir.mkdir(parents=True, exist_ok=True)
949
-
950
- # Read source script content
951
- if source_script_path.exists():
952
- source_content = source_script_path.read_text()
953
- source_version = get_script_version(source_content)
954
- else:
955
- logger.warning(f"Source shutdown script not found at {source_script_path}")
956
- source_content = None
957
- source_version = None
958
-
959
- # Check if installed script exists and compare versions
960
- needs_rebuild = False
961
- if not script_path.exists():
962
- needs_rebuild = True
963
- elif source_content:
964
- installed_content = script_path.read_text()
965
- installed_version = get_script_version(installed_content)
966
- # Rebuild if versions differ or installed has no version marker
967
- if installed_version != source_version:
968
- needs_rebuild = True
969
- logger.info(
970
- f"Shutdown script version mismatch: installed={installed_version}, source={source_version}"
971
- )
972
-
973
- if needs_rebuild and source_content:
974
- script_path.write_text(source_content)
975
- # Make executable
976
- script_path.chmod(script_path.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP)
977
- script_rebuilt = True
978
- logger.info(f"Created/updated agent shutdown script at {script_path}")
979
- except OSError as e:
980
- return {
981
- "success": False,
982
- "error": f"Failed to create shutdown script: {e}",
983
- }
984
-
985
- # Validate signal
986
- valid_signals = {"TERM", "KILL", "INT", "HUP", "QUIT"}
987
- if signal.upper() not in valid_signals:
988
- return {
989
- "success": False,
990
- "error": f"Invalid signal '{signal}'. Valid: {valid_signals}",
991
- }
992
-
993
- # Apply delay before launching script (non-blocking)
994
- if delay_ms > 0:
995
- await asyncio.sleep(delay_ms / 1000.0)
996
-
997
- # Launch the script
998
- try:
999
- # Run in background - we don't wait for it since it kills our process
1000
- env = os.environ.copy()
1001
-
1002
- subprocess.Popen( # nosec B603 - script path is from gobby scripts directory
1003
- [str(script_path), signal.upper(), "0"], # Delay already applied
1004
- env=env,
1005
- start_new_session=True, # Detach from parent
1006
- stdout=subprocess.DEVNULL,
1007
- stderr=subprocess.DEVNULL,
1008
- )
1009
-
1010
- return {
1011
- "success": True,
1012
- "message": "Shutdown script launched",
1013
- "script_path": str(script_path),
1014
- "script_rebuilt": script_rebuilt,
1015
- "signal": signal.upper(),
1016
- }
1017
- except Exception as e:
1018
- return {
1019
- "success": False,
1020
- "error": f"Failed to launch shutdown script: {e}",
1021
- }
1022
-
1023
- return registry