gobby 0.2.8__py3-none-any.whl → 0.2.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +1 -1
- gobby/adapters/__init__.py +6 -0
- gobby/adapters/base.py +11 -2
- gobby/adapters/claude_code.py +5 -28
- gobby/adapters/codex_impl/adapter.py +38 -43
- gobby/adapters/copilot.py +324 -0
- gobby/adapters/cursor.py +373 -0
- gobby/adapters/gemini.py +2 -26
- gobby/adapters/windsurf.py +359 -0
- gobby/agents/definitions.py +162 -2
- gobby/agents/isolation.py +33 -1
- gobby/agents/pty_reader.py +192 -0
- gobby/agents/registry.py +10 -1
- gobby/agents/runner.py +24 -8
- gobby/agents/sandbox.py +8 -3
- gobby/agents/session.py +4 -0
- gobby/agents/spawn.py +9 -2
- gobby/agents/spawn_executor.py +49 -61
- gobby/agents/spawners/command_builder.py +4 -4
- gobby/app_context.py +64 -0
- gobby/cli/__init__.py +4 -0
- gobby/cli/install.py +259 -4
- gobby/cli/installers/__init__.py +12 -0
- gobby/cli/installers/copilot.py +242 -0
- gobby/cli/installers/cursor.py +244 -0
- gobby/cli/installers/shared.py +3 -0
- gobby/cli/installers/windsurf.py +242 -0
- gobby/cli/pipelines.py +639 -0
- gobby/cli/sessions.py +3 -1
- gobby/cli/skills.py +209 -0
- gobby/cli/tasks/crud.py +6 -5
- gobby/cli/tasks/search.py +1 -1
- gobby/cli/ui.py +116 -0
- gobby/cli/utils.py +5 -17
- gobby/cli/workflows.py +38 -17
- gobby/config/app.py +5 -0
- gobby/config/features.py +0 -20
- gobby/config/skills.py +23 -2
- gobby/config/tasks.py +4 -0
- gobby/hooks/broadcaster.py +9 -0
- gobby/hooks/event_handlers/__init__.py +155 -0
- gobby/hooks/event_handlers/_agent.py +175 -0
- gobby/hooks/event_handlers/_base.py +92 -0
- gobby/hooks/event_handlers/_misc.py +66 -0
- gobby/hooks/event_handlers/_session.py +487 -0
- gobby/hooks/event_handlers/_tool.py +196 -0
- gobby/hooks/events.py +48 -0
- gobby/hooks/hook_manager.py +27 -3
- gobby/install/copilot/hooks/hook_dispatcher.py +203 -0
- gobby/install/cursor/hooks/hook_dispatcher.py +203 -0
- gobby/install/gemini/hooks/hook_dispatcher.py +8 -0
- gobby/install/windsurf/hooks/hook_dispatcher.py +205 -0
- gobby/llm/__init__.py +14 -1
- gobby/llm/claude.py +594 -43
- gobby/llm/service.py +149 -0
- gobby/mcp_proxy/importer.py +4 -41
- gobby/mcp_proxy/instructions.py +9 -27
- gobby/mcp_proxy/manager.py +13 -3
- gobby/mcp_proxy/models.py +1 -0
- gobby/mcp_proxy/registries.py +66 -5
- gobby/mcp_proxy/server.py +6 -2
- gobby/mcp_proxy/services/recommendation.py +2 -28
- gobby/mcp_proxy/services/tool_filter.py +7 -0
- gobby/mcp_proxy/services/tool_proxy.py +19 -1
- gobby/mcp_proxy/stdio.py +37 -21
- gobby/mcp_proxy/tools/agents.py +7 -0
- gobby/mcp_proxy/tools/artifacts.py +3 -3
- gobby/mcp_proxy/tools/hub.py +30 -1
- gobby/mcp_proxy/tools/orchestration/cleanup.py +5 -5
- gobby/mcp_proxy/tools/orchestration/monitor.py +1 -1
- gobby/mcp_proxy/tools/orchestration/orchestrate.py +8 -3
- gobby/mcp_proxy/tools/orchestration/review.py +17 -4
- gobby/mcp_proxy/tools/orchestration/wait.py +7 -7
- gobby/mcp_proxy/tools/pipelines/__init__.py +254 -0
- gobby/mcp_proxy/tools/pipelines/_discovery.py +67 -0
- gobby/mcp_proxy/tools/pipelines/_execution.py +281 -0
- gobby/mcp_proxy/tools/sessions/_crud.py +4 -4
- gobby/mcp_proxy/tools/sessions/_handoff.py +1 -1
- gobby/mcp_proxy/tools/skills/__init__.py +184 -30
- gobby/mcp_proxy/tools/spawn_agent.py +229 -14
- gobby/mcp_proxy/tools/task_readiness.py +27 -4
- gobby/mcp_proxy/tools/tasks/_context.py +8 -0
- gobby/mcp_proxy/tools/tasks/_crud.py +27 -1
- gobby/mcp_proxy/tools/tasks/_helpers.py +1 -1
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +125 -8
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +2 -1
- gobby/mcp_proxy/tools/tasks/_search.py +1 -1
- gobby/mcp_proxy/tools/workflows/__init__.py +273 -0
- gobby/mcp_proxy/tools/workflows/_artifacts.py +225 -0
- gobby/mcp_proxy/tools/workflows/_import.py +112 -0
- gobby/mcp_proxy/tools/workflows/_lifecycle.py +332 -0
- gobby/mcp_proxy/tools/workflows/_query.py +226 -0
- gobby/mcp_proxy/tools/workflows/_resolution.py +78 -0
- gobby/mcp_proxy/tools/workflows/_terminal.py +175 -0
- gobby/mcp_proxy/tools/worktrees.py +54 -15
- gobby/memory/components/__init__.py +0 -0
- gobby/memory/components/ingestion.py +98 -0
- gobby/memory/components/search.py +108 -0
- gobby/memory/context.py +5 -5
- gobby/memory/manager.py +16 -25
- gobby/paths.py +51 -0
- gobby/prompts/loader.py +1 -35
- gobby/runner.py +131 -16
- gobby/servers/http.py +193 -150
- gobby/servers/routes/__init__.py +2 -0
- gobby/servers/routes/admin.py +56 -0
- gobby/servers/routes/mcp/endpoints/execution.py +33 -32
- gobby/servers/routes/mcp/endpoints/registry.py +8 -8
- gobby/servers/routes/mcp/hooks.py +10 -1
- gobby/servers/routes/pipelines.py +227 -0
- gobby/servers/websocket.py +314 -1
- gobby/sessions/analyzer.py +89 -3
- gobby/sessions/manager.py +5 -5
- gobby/sessions/transcripts/__init__.py +3 -0
- gobby/sessions/transcripts/claude.py +5 -0
- gobby/sessions/transcripts/codex.py +5 -0
- gobby/sessions/transcripts/gemini.py +5 -0
- gobby/skills/hubs/__init__.py +25 -0
- gobby/skills/hubs/base.py +234 -0
- gobby/skills/hubs/claude_plugins.py +328 -0
- gobby/skills/hubs/clawdhub.py +289 -0
- gobby/skills/hubs/github_collection.py +465 -0
- gobby/skills/hubs/manager.py +263 -0
- gobby/skills/hubs/skillhub.py +342 -0
- gobby/skills/parser.py +23 -0
- gobby/skills/sync.py +5 -4
- gobby/storage/artifacts.py +19 -0
- gobby/storage/memories.py +4 -4
- gobby/storage/migrations.py +118 -3
- gobby/storage/pipelines.py +367 -0
- gobby/storage/sessions.py +23 -4
- gobby/storage/skills.py +48 -8
- gobby/storage/tasks/_aggregates.py +2 -2
- gobby/storage/tasks/_lifecycle.py +4 -4
- gobby/storage/tasks/_models.py +7 -1
- gobby/storage/tasks/_queries.py +3 -3
- gobby/sync/memories.py +4 -3
- gobby/tasks/commits.py +48 -17
- gobby/tasks/external_validator.py +4 -17
- gobby/tasks/validation.py +13 -87
- gobby/tools/summarizer.py +18 -51
- gobby/utils/status.py +13 -0
- gobby/workflows/actions.py +80 -0
- gobby/workflows/context_actions.py +265 -27
- gobby/workflows/definitions.py +119 -1
- gobby/workflows/detection_helpers.py +23 -11
- gobby/workflows/enforcement/__init__.py +11 -1
- gobby/workflows/enforcement/blocking.py +96 -0
- gobby/workflows/enforcement/handlers.py +35 -1
- gobby/workflows/enforcement/task_policy.py +18 -0
- gobby/workflows/engine.py +26 -4
- gobby/workflows/evaluator.py +8 -5
- gobby/workflows/lifecycle_evaluator.py +59 -27
- gobby/workflows/loader.py +567 -30
- gobby/workflows/lobster_compat.py +147 -0
- gobby/workflows/pipeline_executor.py +801 -0
- gobby/workflows/pipeline_state.py +172 -0
- gobby/workflows/pipeline_webhooks.py +206 -0
- gobby/workflows/premature_stop.py +5 -0
- gobby/worktrees/git.py +135 -20
- {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/METADATA +56 -22
- {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/RECORD +166 -122
- gobby/hooks/event_handlers.py +0 -1008
- gobby/mcp_proxy/tools/workflows.py +0 -1023
- {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/WHEEL +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/top_level.txt +0 -0
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
Provides task lifecycle tools: close, reopen, delete, and label management.
|
|
4
4
|
"""
|
|
5
5
|
|
|
6
|
+
import uuid
|
|
6
7
|
from typing import Any
|
|
7
8
|
|
|
8
9
|
from gobby.mcp_proxy.tools.internal import InternalToolRegistry
|
|
@@ -20,6 +21,15 @@ from gobby.storage.tasks import TaskNotFoundError
|
|
|
20
21
|
from gobby.storage.worktrees import LocalWorktreeManager
|
|
21
22
|
|
|
22
23
|
|
|
24
|
+
def _is_uuid(value: str) -> bool:
|
|
25
|
+
"""Check if a string is a valid UUID (not a ref like #123)."""
|
|
26
|
+
try:
|
|
27
|
+
uuid.UUID(value)
|
|
28
|
+
return True
|
|
29
|
+
except (ValueError, TypeError):
|
|
30
|
+
return False
|
|
31
|
+
|
|
32
|
+
|
|
23
33
|
def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
|
|
24
34
|
"""Create a registry with task lifecycle tools.
|
|
25
35
|
|
|
@@ -121,7 +131,7 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
|
|
|
121
131
|
"You must commit your changes and link them to the task before closing."
|
|
122
132
|
),
|
|
123
133
|
"suggestion": (
|
|
124
|
-
"Commit your changes with `[
|
|
134
|
+
f"Commit your changes with `[{ctx.get_current_project_name() or 'project'}-#task_id]` in the message, "
|
|
125
135
|
"or pass `commit_sha` to `close_task`."
|
|
126
136
|
),
|
|
127
137
|
}
|
|
@@ -184,18 +194,20 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
|
|
|
184
194
|
current_commit_sha = run_git_command(["git", "rev-parse", "--short", "HEAD"], cwd=cwd)
|
|
185
195
|
|
|
186
196
|
if route_to_review:
|
|
187
|
-
# Route to
|
|
188
|
-
# Task stays in
|
|
197
|
+
# Route to needs_review status instead of closing
|
|
198
|
+
# Task stays in needs_review until user explicitly closes
|
|
189
199
|
ctx.task_manager.update_task(
|
|
190
200
|
resolved_id,
|
|
191
|
-
status="
|
|
201
|
+
status="needs_review",
|
|
192
202
|
validation_override_reason=override_justification if store_override else None,
|
|
193
203
|
)
|
|
194
204
|
|
|
195
205
|
# Auto-link session if provided
|
|
196
206
|
if resolved_session_id:
|
|
197
207
|
try:
|
|
198
|
-
ctx.session_task_manager.link_task(
|
|
208
|
+
ctx.session_task_manager.link_task(
|
|
209
|
+
resolved_session_id, resolved_id, "needs_review"
|
|
210
|
+
)
|
|
199
211
|
except Exception:
|
|
200
212
|
pass # nosec B110 - best-effort linking
|
|
201
213
|
|
|
@@ -235,7 +247,17 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
|
|
|
235
247
|
if resolved_session_id:
|
|
236
248
|
try:
|
|
237
249
|
state = ctx.workflow_state_manager.get_state(resolved_session_id)
|
|
238
|
-
if state
|
|
250
|
+
if state:
|
|
251
|
+
# Resolve claimed_task_id to UUID if it's a ref (backward compat)
|
|
252
|
+
claimed_task_id = state.variables.get("claimed_task_id")
|
|
253
|
+
if claimed_task_id and not _is_uuid(claimed_task_id):
|
|
254
|
+
try:
|
|
255
|
+
claimed_task = ctx.task_manager.get_task(claimed_task_id)
|
|
256
|
+
if claimed_task:
|
|
257
|
+
claimed_task_id = claimed_task.id
|
|
258
|
+
except Exception: # nosec B110 - keep original ID if resolution fails
|
|
259
|
+
claimed_task_id = claimed_task_id # explicit no-op
|
|
260
|
+
if state and claimed_task_id == resolved_id:
|
|
239
261
|
# Check if clear_task_on_close is enabled (default: True)
|
|
240
262
|
clear_on_close = state.variables.get("clear_task_on_close", True)
|
|
241
263
|
if clear_on_close:
|
|
@@ -267,7 +289,7 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
|
|
|
267
289
|
|
|
268
290
|
registry.register(
|
|
269
291
|
name="close_task",
|
|
270
|
-
description="Close a task. Pass commit_sha to link and close in one call: close_task(task_id, commit_sha='abc123'). Or include [
|
|
292
|
+
description="Close a task. Pass commit_sha to link and close in one call: close_task(task_id, commit_sha='abc123'). Or include [project-#N] in commit message for auto-linking. Parent tasks require all children closed. Validation auto-skipped for: duplicate, already_implemented, wont_fix, obsolete, out_of_repo.",
|
|
271
293
|
input_schema={
|
|
272
294
|
"type": "object",
|
|
273
295
|
"properties": {
|
|
@@ -277,7 +299,7 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
|
|
|
277
299
|
},
|
|
278
300
|
"reason": {
|
|
279
301
|
"type": "string",
|
|
280
|
-
"description": 'Reason for closing. Use "duplicate", "already_implemented", "wont_fix", or "
|
|
302
|
+
"description": 'Reason for closing. Use "duplicate", "already_implemented", "wont_fix", "obsolete", or "out_of_repo" to auto-skip validation and commit check.',
|
|
281
303
|
"default": "completed",
|
|
282
304
|
},
|
|
283
305
|
"changes_summary": {
|
|
@@ -567,6 +589,17 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
|
|
|
567
589
|
except Exception:
|
|
568
590
|
pass # nosec B110 - best-effort linking
|
|
569
591
|
|
|
592
|
+
# Set task_claimed workflow variable (enables Edit/Write hooks)
|
|
593
|
+
# This mirrors create_task behavior in _crud.py
|
|
594
|
+
try:
|
|
595
|
+
state = ctx.workflow_state_manager.get_state(resolved_session_id)
|
|
596
|
+
if state:
|
|
597
|
+
state.variables["task_claimed"] = True
|
|
598
|
+
state.variables["claimed_task_id"] = resolved_id # Always use UUID
|
|
599
|
+
ctx.workflow_state_manager.save_state(state)
|
|
600
|
+
except Exception:
|
|
601
|
+
pass # nosec B110 - best-effort variable setting
|
|
602
|
+
|
|
570
603
|
return {}
|
|
571
604
|
|
|
572
605
|
registry.register(
|
|
@@ -594,4 +627,88 @@ def create_lifecycle_registry(ctx: RegistryContext) -> InternalToolRegistry:
|
|
|
594
627
|
func=claim_task,
|
|
595
628
|
)
|
|
596
629
|
|
|
630
|
+
def mark_task_for_review(
|
|
631
|
+
task_id: str,
|
|
632
|
+
session_id: str,
|
|
633
|
+
review_notes: str | None = None,
|
|
634
|
+
) -> dict[str, Any]:
|
|
635
|
+
"""Mark a task as ready for review.
|
|
636
|
+
|
|
637
|
+
Sets status to 'needs_review'. Use this when work is complete
|
|
638
|
+
but needs human verification before closing.
|
|
639
|
+
|
|
640
|
+
Args:
|
|
641
|
+
task_id: Task reference (#N, path, or UUID)
|
|
642
|
+
session_id: Session ID marking the task for review
|
|
643
|
+
review_notes: Optional notes for the reviewer
|
|
644
|
+
|
|
645
|
+
Returns:
|
|
646
|
+
Empty dict on success, or error dict with details.
|
|
647
|
+
"""
|
|
648
|
+
# Resolve task reference (supports #N, path, UUID formats)
|
|
649
|
+
try:
|
|
650
|
+
resolved_id = resolve_task_id_for_mcp(ctx.task_manager, task_id)
|
|
651
|
+
except TaskNotFoundError as e:
|
|
652
|
+
return {"success": False, "error": str(e)}
|
|
653
|
+
except ValueError as e:
|
|
654
|
+
return {"success": False, "error": str(e)}
|
|
655
|
+
|
|
656
|
+
task = ctx.task_manager.get_task(resolved_id)
|
|
657
|
+
if not task:
|
|
658
|
+
return {"success": False, "error": f"Task {task_id} not found"}
|
|
659
|
+
|
|
660
|
+
# Resolve session_id to UUID (accepts #N, N, UUID, or prefix)
|
|
661
|
+
resolved_session_id = session_id
|
|
662
|
+
try:
|
|
663
|
+
resolved_session_id = ctx.resolve_session_id(session_id)
|
|
664
|
+
except ValueError:
|
|
665
|
+
pass # Fall back to raw value if resolution fails
|
|
666
|
+
|
|
667
|
+
# Build update kwargs
|
|
668
|
+
update_kwargs: dict[str, Any] = {"status": "needs_review"}
|
|
669
|
+
|
|
670
|
+
# Append review notes to description if provided
|
|
671
|
+
if review_notes:
|
|
672
|
+
current_desc = task.description or ""
|
|
673
|
+
review_section = f"\n\n[Review Notes]\n{review_notes}"
|
|
674
|
+
update_kwargs["description"] = current_desc + review_section
|
|
675
|
+
|
|
676
|
+
# Update task status to needs_review
|
|
677
|
+
updated = ctx.task_manager.update_task(resolved_id, **update_kwargs)
|
|
678
|
+
if not updated:
|
|
679
|
+
return {"success": False, "error": f"Failed to mark task {task_id} for review"}
|
|
680
|
+
|
|
681
|
+
# Link task to session (best-effort, don't fail if this fails)
|
|
682
|
+
try:
|
|
683
|
+
ctx.session_task_manager.link_task(resolved_session_id, resolved_id, "needs_review")
|
|
684
|
+
except Exception:
|
|
685
|
+
pass # nosec B110 - best-effort linking
|
|
686
|
+
|
|
687
|
+
return {}
|
|
688
|
+
|
|
689
|
+
registry.register(
|
|
690
|
+
name="mark_task_for_review",
|
|
691
|
+
description="Mark a task as ready for review. Sets status to 'needs_review'. Use this when work is complete but needs human verification before closing.",
|
|
692
|
+
input_schema={
|
|
693
|
+
"type": "object",
|
|
694
|
+
"properties": {
|
|
695
|
+
"task_id": {
|
|
696
|
+
"type": "string",
|
|
697
|
+
"description": "Task reference: #N (e.g., #1, #47), path (e.g., 1.2.3), or UUID",
|
|
698
|
+
},
|
|
699
|
+
"session_id": {
|
|
700
|
+
"type": "string",
|
|
701
|
+
"description": "Your session ID (accepts #N, N, UUID, or prefix). The session marking the task for review.",
|
|
702
|
+
},
|
|
703
|
+
"review_notes": {
|
|
704
|
+
"type": "string",
|
|
705
|
+
"description": "Optional notes for the reviewer explaining what was done and what to verify.",
|
|
706
|
+
"default": None,
|
|
707
|
+
},
|
|
708
|
+
},
|
|
709
|
+
"required": ["task_id", "session_id"],
|
|
710
|
+
},
|
|
711
|
+
func=mark_task_for_review,
|
|
712
|
+
)
|
|
713
|
+
|
|
597
714
|
return registry
|
|
@@ -61,7 +61,8 @@ def validate_commit_requirements(
|
|
|
61
61
|
'- Task was already done: reason="already_implemented"\n'
|
|
62
62
|
'- Task is no longer needed: reason="obsolete"\n'
|
|
63
63
|
'- Task duplicates another: reason="duplicate"\n'
|
|
64
|
-
'- Decided not to do it: reason="wont_fix"'
|
|
64
|
+
'- Decided not to do it: reason="wont_fix"\n'
|
|
65
|
+
'- Changes outside repo (e.g., ~/.gobby/config.yaml): reason="out_of_repo"'
|
|
65
66
|
),
|
|
66
67
|
)
|
|
67
68
|
|
|
@@ -43,7 +43,7 @@ def create_search_registry(ctx: RegistryContext) -> InternalToolRegistry:
|
|
|
43
43
|
|
|
44
44
|
Args:
|
|
45
45
|
query: Search query text (required). Natural language query.
|
|
46
|
-
status: Filter by status (open, in_progress,
|
|
46
|
+
status: Filter by status (open, in_progress, needs_review, closed).
|
|
47
47
|
Can be a single status or comma-separated list.
|
|
48
48
|
task_type: Filter by task type (task, bug, feature, epic)
|
|
49
49
|
priority: Filter by priority (1=High, 2=Medium, 3=Low)
|
|
@@ -0,0 +1,273 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Internal MCP tools for Gobby Workflow System.
|
|
3
|
+
|
|
4
|
+
Exposes functionality for:
|
|
5
|
+
- get_workflow: Get details about a specific workflow definition
|
|
6
|
+
- list_workflows: Discover available workflow definitions
|
|
7
|
+
- activate_workflow: Start a step-based workflow (supports initial variables)
|
|
8
|
+
- end_workflow: Complete/terminate active workflow
|
|
9
|
+
- get_workflow_status: Get current workflow state
|
|
10
|
+
- request_step_transition: Request transition to a different step
|
|
11
|
+
- mark_artifact_complete: Register an artifact as complete
|
|
12
|
+
- set_variable: Set a workflow variable for the session
|
|
13
|
+
- get_variable: Get workflow variable(s) for the session
|
|
14
|
+
- import_workflow: Import a workflow from a file path
|
|
15
|
+
- reload_cache: Clear the workflow loader cache to pick up file changes
|
|
16
|
+
- close_terminal: Agent self-termination
|
|
17
|
+
|
|
18
|
+
These tools are registered with the InternalToolRegistry and accessed
|
|
19
|
+
via the downstream proxy pattern (call_tool, list_tools, get_tool_schema).
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
from typing import Any
|
|
23
|
+
|
|
24
|
+
from gobby.mcp_proxy.tools.internal import InternalToolRegistry
|
|
25
|
+
from gobby.mcp_proxy.tools.workflows._artifacts import (
|
|
26
|
+
get_variable,
|
|
27
|
+
mark_artifact_complete,
|
|
28
|
+
set_variable,
|
|
29
|
+
)
|
|
30
|
+
from gobby.mcp_proxy.tools.workflows._import import import_workflow, reload_cache
|
|
31
|
+
from gobby.mcp_proxy.tools.workflows._lifecycle import (
|
|
32
|
+
activate_workflow,
|
|
33
|
+
end_workflow,
|
|
34
|
+
request_step_transition,
|
|
35
|
+
)
|
|
36
|
+
from gobby.mcp_proxy.tools.workflows._query import (
|
|
37
|
+
get_workflow,
|
|
38
|
+
get_workflow_status,
|
|
39
|
+
list_workflows,
|
|
40
|
+
)
|
|
41
|
+
from gobby.mcp_proxy.tools.workflows._terminal import close_terminal
|
|
42
|
+
from gobby.storage.database import DatabaseProtocol
|
|
43
|
+
from gobby.storage.sessions import LocalSessionManager
|
|
44
|
+
from gobby.utils.project_context import get_workflow_project_path
|
|
45
|
+
from gobby.workflows.loader import WorkflowLoader
|
|
46
|
+
from gobby.workflows.state_manager import WorkflowStateManager
|
|
47
|
+
|
|
48
|
+
__all__ = [
|
|
49
|
+
"create_workflows_registry",
|
|
50
|
+
"get_workflow_project_path",
|
|
51
|
+
]
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def create_workflows_registry(
|
|
55
|
+
loader: WorkflowLoader | None = None,
|
|
56
|
+
state_manager: WorkflowStateManager | None = None,
|
|
57
|
+
session_manager: LocalSessionManager | None = None,
|
|
58
|
+
db: DatabaseProtocol | None = None,
|
|
59
|
+
) -> InternalToolRegistry:
|
|
60
|
+
"""
|
|
61
|
+
Create a workflow tool registry with all workflow-related tools.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
loader: WorkflowLoader instance
|
|
65
|
+
state_manager: WorkflowStateManager instance (created from db if not provided)
|
|
66
|
+
session_manager: LocalSessionManager instance (created from db if not provided)
|
|
67
|
+
db: Database instance for creating default managers
|
|
68
|
+
|
|
69
|
+
Returns:
|
|
70
|
+
InternalToolRegistry with workflow tools registered
|
|
71
|
+
|
|
72
|
+
Note:
|
|
73
|
+
If db is None and state_manager/session_manager are not provided,
|
|
74
|
+
tools requiring database access will return errors when called.
|
|
75
|
+
"""
|
|
76
|
+
_db = db
|
|
77
|
+
_loader = loader or WorkflowLoader()
|
|
78
|
+
|
|
79
|
+
# Create default managers only if db is provided
|
|
80
|
+
if state_manager is not None:
|
|
81
|
+
_state_manager = state_manager
|
|
82
|
+
elif _db is not None:
|
|
83
|
+
_state_manager = WorkflowStateManager(_db)
|
|
84
|
+
else:
|
|
85
|
+
_state_manager = None
|
|
86
|
+
|
|
87
|
+
if session_manager is not None:
|
|
88
|
+
_session_manager = session_manager
|
|
89
|
+
elif _db is not None:
|
|
90
|
+
_session_manager = LocalSessionManager(_db)
|
|
91
|
+
else:
|
|
92
|
+
_session_manager = None
|
|
93
|
+
|
|
94
|
+
registry = InternalToolRegistry(
|
|
95
|
+
name="gobby-workflows",
|
|
96
|
+
description="Workflow management - list, activate, status, transition, end",
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
@registry.tool(
|
|
100
|
+
name="get_workflow",
|
|
101
|
+
description="Get details about a specific workflow definition.",
|
|
102
|
+
)
|
|
103
|
+
def _get_workflow(
|
|
104
|
+
name: str,
|
|
105
|
+
project_path: str | None = None,
|
|
106
|
+
) -> dict[str, Any]:
|
|
107
|
+
return get_workflow(_loader, name, project_path)
|
|
108
|
+
|
|
109
|
+
@registry.tool(
|
|
110
|
+
name="list_workflows",
|
|
111
|
+
description="List available workflow definitions from project and global directories.",
|
|
112
|
+
)
|
|
113
|
+
def _list_workflows(
|
|
114
|
+
project_path: str | None = None,
|
|
115
|
+
workflow_type: str | None = None,
|
|
116
|
+
global_only: bool = False,
|
|
117
|
+
) -> dict[str, Any]:
|
|
118
|
+
return list_workflows(_loader, project_path, workflow_type, global_only)
|
|
119
|
+
|
|
120
|
+
@registry.tool(
|
|
121
|
+
name="activate_workflow",
|
|
122
|
+
description="Activate a step-based workflow for the current session. Accepts #N, N, UUID, or prefix for session_id.",
|
|
123
|
+
)
|
|
124
|
+
def _activate_workflow(
|
|
125
|
+
name: str,
|
|
126
|
+
session_id: str | None = None,
|
|
127
|
+
initial_step: str | None = None,
|
|
128
|
+
variables: dict[str, Any] | None = None,
|
|
129
|
+
project_path: str | None = None,
|
|
130
|
+
) -> dict[str, Any]:
|
|
131
|
+
if _state_manager is None or _session_manager is None or _db is None:
|
|
132
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
133
|
+
return activate_workflow(
|
|
134
|
+
_loader,
|
|
135
|
+
_state_manager,
|
|
136
|
+
_session_manager,
|
|
137
|
+
_db,
|
|
138
|
+
name,
|
|
139
|
+
session_id,
|
|
140
|
+
initial_step,
|
|
141
|
+
variables,
|
|
142
|
+
project_path,
|
|
143
|
+
)
|
|
144
|
+
|
|
145
|
+
@registry.tool(
|
|
146
|
+
name="end_workflow",
|
|
147
|
+
description="End the currently active step-based workflow. Accepts #N, N, UUID, or prefix for session_id.",
|
|
148
|
+
)
|
|
149
|
+
def _end_workflow(
|
|
150
|
+
session_id: str | None = None,
|
|
151
|
+
reason: str | None = None,
|
|
152
|
+
project_path: str | None = None,
|
|
153
|
+
) -> dict[str, Any]:
|
|
154
|
+
if _state_manager is None or _session_manager is None:
|
|
155
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
156
|
+
return end_workflow(
|
|
157
|
+
_loader, _state_manager, _session_manager, session_id, reason, project_path
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
@registry.tool(
|
|
161
|
+
name="get_workflow_status",
|
|
162
|
+
description="Get current workflow step and state. Accepts #N, N, UUID, or prefix for session_id.",
|
|
163
|
+
)
|
|
164
|
+
def _get_workflow_status(session_id: str | None = None) -> dict[str, Any]:
|
|
165
|
+
if _state_manager is None or _session_manager is None:
|
|
166
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
167
|
+
return get_workflow_status(_state_manager, _session_manager, session_id)
|
|
168
|
+
|
|
169
|
+
@registry.tool(
|
|
170
|
+
name="request_step_transition",
|
|
171
|
+
description="Request transition to a different step. Accepts #N, N, UUID, or prefix for session_id.",
|
|
172
|
+
)
|
|
173
|
+
def _request_step_transition(
|
|
174
|
+
to_step: str,
|
|
175
|
+
reason: str | None = None,
|
|
176
|
+
session_id: str | None = None,
|
|
177
|
+
force: bool = False,
|
|
178
|
+
project_path: str | None = None,
|
|
179
|
+
) -> dict[str, Any]:
|
|
180
|
+
if _state_manager is None or _session_manager is None:
|
|
181
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
182
|
+
return request_step_transition(
|
|
183
|
+
_loader,
|
|
184
|
+
_state_manager,
|
|
185
|
+
_session_manager,
|
|
186
|
+
to_step,
|
|
187
|
+
reason,
|
|
188
|
+
session_id,
|
|
189
|
+
force,
|
|
190
|
+
project_path,
|
|
191
|
+
)
|
|
192
|
+
|
|
193
|
+
@registry.tool(
|
|
194
|
+
name="mark_artifact_complete",
|
|
195
|
+
description="Register an artifact as complete (plan, spec, etc.). Accepts #N, N, UUID, or prefix for session_id.",
|
|
196
|
+
)
|
|
197
|
+
def _mark_artifact_complete(
|
|
198
|
+
artifact_type: str,
|
|
199
|
+
file_path: str,
|
|
200
|
+
session_id: str | None = None,
|
|
201
|
+
) -> dict[str, Any]:
|
|
202
|
+
if _state_manager is None or _session_manager is None:
|
|
203
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
204
|
+
return mark_artifact_complete(
|
|
205
|
+
_state_manager, _session_manager, artifact_type, file_path, session_id
|
|
206
|
+
)
|
|
207
|
+
|
|
208
|
+
@registry.tool(
|
|
209
|
+
name="set_variable",
|
|
210
|
+
description="Set a workflow variable for the current session (session-scoped, not persisted to YAML). Accepts #N, N, UUID, or prefix for session_id.",
|
|
211
|
+
)
|
|
212
|
+
def _set_variable(
|
|
213
|
+
name: str,
|
|
214
|
+
value: str | int | float | bool | None,
|
|
215
|
+
session_id: str | None = None,
|
|
216
|
+
) -> dict[str, Any]:
|
|
217
|
+
if _state_manager is None or _session_manager is None or _db is None:
|
|
218
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
219
|
+
return set_variable(_state_manager, _session_manager, _db, name, value, session_id)
|
|
220
|
+
|
|
221
|
+
@registry.tool(
|
|
222
|
+
name="get_variable",
|
|
223
|
+
description="Get workflow variable(s) for the current session. Accepts #N, N, UUID, or prefix for session_id.",
|
|
224
|
+
)
|
|
225
|
+
def _get_variable(
|
|
226
|
+
name: str | None = None,
|
|
227
|
+
session_id: str | None = None,
|
|
228
|
+
) -> dict[str, Any]:
|
|
229
|
+
if _state_manager is None or _session_manager is None:
|
|
230
|
+
return {"success": False, "error": "Workflow tools require database connection"}
|
|
231
|
+
return get_variable(_state_manager, _session_manager, name, session_id)
|
|
232
|
+
|
|
233
|
+
@registry.tool(
|
|
234
|
+
name="import_workflow",
|
|
235
|
+
description="Import a workflow from a file path into the project or global directory.",
|
|
236
|
+
)
|
|
237
|
+
def _import_workflow(
|
|
238
|
+
source_path: str,
|
|
239
|
+
workflow_name: str | None = None,
|
|
240
|
+
is_global: bool = False,
|
|
241
|
+
project_path: str | None = None,
|
|
242
|
+
) -> dict[str, Any]:
|
|
243
|
+
return import_workflow(_loader, source_path, workflow_name, is_global, project_path)
|
|
244
|
+
|
|
245
|
+
@registry.tool(
|
|
246
|
+
name="reload_cache",
|
|
247
|
+
description="Clear the workflow cache. Use this after modifying workflow YAML files.",
|
|
248
|
+
)
|
|
249
|
+
def _reload_cache() -> dict[str, Any]:
|
|
250
|
+
return reload_cache(_loader)
|
|
251
|
+
|
|
252
|
+
@registry.tool(
|
|
253
|
+
name="close_terminal",
|
|
254
|
+
description=(
|
|
255
|
+
"Close the current terminal window/pane (agent self-termination). "
|
|
256
|
+
"Launches ~/.gobby/scripts/agent_shutdown.sh which handles "
|
|
257
|
+
"terminal-specific shutdown (tmux, iTerm, etc.). Rebuilds script if missing. "
|
|
258
|
+
"Pass session_id to reliably target the correct terminal PID."
|
|
259
|
+
),
|
|
260
|
+
)
|
|
261
|
+
async def _close_terminal(
|
|
262
|
+
session_id: str | None = None,
|
|
263
|
+
signal: str = "TERM",
|
|
264
|
+
delay_ms: int = 0,
|
|
265
|
+
) -> dict[str, Any]:
|
|
266
|
+
return await close_terminal(
|
|
267
|
+
session_id=session_id,
|
|
268
|
+
session_manager=_session_manager,
|
|
269
|
+
signal=signal,
|
|
270
|
+
delay_ms=delay_ms,
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
return registry
|