gobby 0.2.5__py3-none-any.whl → 0.2.7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +1 -1
- gobby/adapters/__init__.py +2 -1
- gobby/adapters/claude_code.py +13 -4
- gobby/adapters/codex_impl/__init__.py +28 -0
- gobby/adapters/codex_impl/adapter.py +722 -0
- gobby/adapters/codex_impl/client.py +679 -0
- gobby/adapters/codex_impl/protocol.py +20 -0
- gobby/adapters/codex_impl/types.py +68 -0
- gobby/agents/definitions.py +11 -1
- gobby/agents/isolation.py +395 -0
- gobby/agents/runner.py +8 -0
- gobby/agents/sandbox.py +261 -0
- gobby/agents/spawn.py +42 -287
- gobby/agents/spawn_executor.py +385 -0
- gobby/agents/spawners/__init__.py +24 -0
- gobby/agents/spawners/command_builder.py +189 -0
- gobby/agents/spawners/embedded.py +21 -2
- gobby/agents/spawners/headless.py +21 -2
- gobby/agents/spawners/prompt_manager.py +125 -0
- gobby/cli/__init__.py +6 -0
- gobby/cli/clones.py +419 -0
- gobby/cli/conductor.py +266 -0
- gobby/cli/install.py +4 -4
- gobby/cli/installers/antigravity.py +3 -9
- gobby/cli/installers/claude.py +15 -9
- gobby/cli/installers/codex.py +2 -8
- gobby/cli/installers/gemini.py +8 -8
- gobby/cli/installers/shared.py +175 -13
- gobby/cli/sessions.py +1 -1
- gobby/cli/skills.py +858 -0
- gobby/cli/tasks/ai.py +0 -440
- gobby/cli/tasks/crud.py +44 -6
- gobby/cli/tasks/main.py +0 -4
- gobby/cli/tui.py +2 -2
- gobby/cli/utils.py +12 -5
- gobby/clones/__init__.py +13 -0
- gobby/clones/git.py +547 -0
- gobby/conductor/__init__.py +16 -0
- gobby/conductor/alerts.py +135 -0
- gobby/conductor/loop.py +164 -0
- gobby/conductor/monitors/__init__.py +11 -0
- gobby/conductor/monitors/agents.py +116 -0
- gobby/conductor/monitors/tasks.py +155 -0
- gobby/conductor/pricing.py +234 -0
- gobby/conductor/token_tracker.py +160 -0
- gobby/config/__init__.py +12 -97
- gobby/config/app.py +69 -91
- gobby/config/extensions.py +2 -2
- gobby/config/features.py +7 -130
- gobby/config/search.py +110 -0
- gobby/config/servers.py +1 -1
- gobby/config/skills.py +43 -0
- gobby/config/tasks.py +9 -41
- gobby/hooks/__init__.py +0 -13
- gobby/hooks/event_handlers.py +188 -2
- gobby/hooks/hook_manager.py +50 -4
- gobby/hooks/plugins.py +1 -1
- gobby/hooks/skill_manager.py +130 -0
- gobby/hooks/webhooks.py +1 -1
- gobby/install/claude/hooks/hook_dispatcher.py +4 -4
- gobby/install/codex/hooks/hook_dispatcher.py +1 -1
- gobby/install/gemini/hooks/hook_dispatcher.py +87 -12
- gobby/llm/claude.py +22 -34
- gobby/llm/claude_executor.py +46 -256
- gobby/llm/codex_executor.py +59 -291
- gobby/llm/executor.py +21 -0
- gobby/llm/gemini.py +134 -110
- gobby/llm/litellm_executor.py +143 -6
- gobby/llm/resolver.py +98 -35
- gobby/mcp_proxy/importer.py +62 -4
- gobby/mcp_proxy/instructions.py +56 -0
- gobby/mcp_proxy/models.py +15 -0
- gobby/mcp_proxy/registries.py +68 -8
- gobby/mcp_proxy/server.py +33 -3
- gobby/mcp_proxy/services/recommendation.py +43 -11
- gobby/mcp_proxy/services/tool_proxy.py +81 -1
- gobby/mcp_proxy/stdio.py +2 -1
- gobby/mcp_proxy/tools/__init__.py +0 -2
- gobby/mcp_proxy/tools/agent_messaging.py +317 -0
- gobby/mcp_proxy/tools/agents.py +31 -731
- gobby/mcp_proxy/tools/clones.py +518 -0
- gobby/mcp_proxy/tools/memory.py +3 -26
- gobby/mcp_proxy/tools/metrics.py +65 -1
- gobby/mcp_proxy/tools/orchestration/__init__.py +3 -0
- gobby/mcp_proxy/tools/orchestration/cleanup.py +151 -0
- gobby/mcp_proxy/tools/orchestration/wait.py +467 -0
- gobby/mcp_proxy/tools/sessions/__init__.py +14 -0
- gobby/mcp_proxy/tools/sessions/_commits.py +232 -0
- gobby/mcp_proxy/tools/sessions/_crud.py +253 -0
- gobby/mcp_proxy/tools/sessions/_factory.py +63 -0
- gobby/mcp_proxy/tools/sessions/_handoff.py +499 -0
- gobby/mcp_proxy/tools/sessions/_messages.py +138 -0
- gobby/mcp_proxy/tools/skills/__init__.py +616 -0
- gobby/mcp_proxy/tools/spawn_agent.py +417 -0
- gobby/mcp_proxy/tools/task_orchestration.py +7 -0
- gobby/mcp_proxy/tools/task_readiness.py +14 -0
- gobby/mcp_proxy/tools/task_sync.py +1 -1
- gobby/mcp_proxy/tools/tasks/_context.py +0 -20
- gobby/mcp_proxy/tools/tasks/_crud.py +91 -4
- gobby/mcp_proxy/tools/tasks/_expansion.py +348 -0
- gobby/mcp_proxy/tools/tasks/_factory.py +6 -16
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +110 -45
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +18 -29
- gobby/mcp_proxy/tools/workflows.py +1 -1
- gobby/mcp_proxy/tools/worktrees.py +0 -338
- gobby/memory/backends/__init__.py +6 -1
- gobby/memory/backends/mem0.py +6 -1
- gobby/memory/extractor.py +477 -0
- gobby/memory/ingestion/__init__.py +5 -0
- gobby/memory/ingestion/multimodal.py +221 -0
- gobby/memory/manager.py +73 -285
- gobby/memory/search/__init__.py +10 -0
- gobby/memory/search/coordinator.py +248 -0
- gobby/memory/services/__init__.py +5 -0
- gobby/memory/services/crossref.py +142 -0
- gobby/prompts/loader.py +5 -2
- gobby/runner.py +37 -16
- gobby/search/__init__.py +48 -6
- gobby/search/backends/__init__.py +159 -0
- gobby/search/backends/embedding.py +225 -0
- gobby/search/embeddings.py +238 -0
- gobby/search/models.py +148 -0
- gobby/search/unified.py +496 -0
- gobby/servers/http.py +24 -12
- gobby/servers/routes/admin.py +294 -0
- gobby/servers/routes/mcp/endpoints/__init__.py +61 -0
- gobby/servers/routes/mcp/endpoints/discovery.py +405 -0
- gobby/servers/routes/mcp/endpoints/execution.py +568 -0
- gobby/servers/routes/mcp/endpoints/registry.py +378 -0
- gobby/servers/routes/mcp/endpoints/server.py +304 -0
- gobby/servers/routes/mcp/hooks.py +1 -1
- gobby/servers/routes/mcp/tools.py +48 -1317
- gobby/servers/websocket.py +2 -2
- gobby/sessions/analyzer.py +2 -0
- gobby/sessions/lifecycle.py +1 -1
- gobby/sessions/processor.py +10 -0
- gobby/sessions/transcripts/base.py +2 -0
- gobby/sessions/transcripts/claude.py +79 -10
- gobby/skills/__init__.py +91 -0
- gobby/skills/loader.py +685 -0
- gobby/skills/manager.py +384 -0
- gobby/skills/parser.py +286 -0
- gobby/skills/search.py +463 -0
- gobby/skills/sync.py +119 -0
- gobby/skills/updater.py +385 -0
- gobby/skills/validator.py +368 -0
- gobby/storage/clones.py +378 -0
- gobby/storage/database.py +1 -1
- gobby/storage/memories.py +43 -13
- gobby/storage/migrations.py +162 -201
- gobby/storage/sessions.py +116 -7
- gobby/storage/skills.py +782 -0
- gobby/storage/tasks/_crud.py +4 -4
- gobby/storage/tasks/_lifecycle.py +57 -7
- gobby/storage/tasks/_manager.py +14 -5
- gobby/storage/tasks/_models.py +8 -3
- gobby/sync/memories.py +40 -5
- gobby/sync/tasks.py +83 -6
- gobby/tasks/__init__.py +1 -2
- gobby/tasks/external_validator.py +1 -1
- gobby/tasks/validation.py +46 -35
- gobby/tools/summarizer.py +91 -10
- gobby/tui/api_client.py +4 -7
- gobby/tui/app.py +5 -3
- gobby/tui/screens/orchestrator.py +1 -2
- gobby/tui/screens/tasks.py +2 -4
- gobby/tui/ws_client.py +1 -1
- gobby/utils/daemon_client.py +2 -2
- gobby/utils/project_context.py +2 -3
- gobby/utils/status.py +13 -0
- gobby/workflows/actions.py +221 -1135
- gobby/workflows/artifact_actions.py +31 -0
- gobby/workflows/autonomous_actions.py +11 -0
- gobby/workflows/context_actions.py +93 -1
- gobby/workflows/detection_helpers.py +115 -31
- gobby/workflows/enforcement/__init__.py +47 -0
- gobby/workflows/enforcement/blocking.py +269 -0
- gobby/workflows/enforcement/commit_policy.py +283 -0
- gobby/workflows/enforcement/handlers.py +269 -0
- gobby/workflows/{task_enforcement_actions.py → enforcement/task_policy.py} +29 -388
- gobby/workflows/engine.py +13 -2
- gobby/workflows/git_utils.py +106 -0
- gobby/workflows/lifecycle_evaluator.py +29 -1
- gobby/workflows/llm_actions.py +30 -0
- gobby/workflows/loader.py +19 -6
- gobby/workflows/mcp_actions.py +20 -1
- gobby/workflows/memory_actions.py +154 -0
- gobby/workflows/safe_evaluator.py +183 -0
- gobby/workflows/session_actions.py +44 -0
- gobby/workflows/state_actions.py +60 -1
- gobby/workflows/stop_signal_actions.py +55 -0
- gobby/workflows/summary_actions.py +111 -1
- gobby/workflows/task_sync_actions.py +347 -0
- gobby/workflows/todo_actions.py +34 -1
- gobby/workflows/webhook_actions.py +185 -0
- {gobby-0.2.5.dist-info → gobby-0.2.7.dist-info}/METADATA +87 -21
- {gobby-0.2.5.dist-info → gobby-0.2.7.dist-info}/RECORD +201 -172
- {gobby-0.2.5.dist-info → gobby-0.2.7.dist-info}/WHEEL +1 -1
- gobby/adapters/codex.py +0 -1292
- gobby/install/claude/commands/gobby/bug.md +0 -51
- gobby/install/claude/commands/gobby/chore.md +0 -51
- gobby/install/claude/commands/gobby/epic.md +0 -52
- gobby/install/claude/commands/gobby/eval.md +0 -235
- gobby/install/claude/commands/gobby/feat.md +0 -49
- gobby/install/claude/commands/gobby/nit.md +0 -52
- gobby/install/claude/commands/gobby/ref.md +0 -52
- gobby/install/codex/prompts/forget.md +0 -7
- gobby/install/codex/prompts/memories.md +0 -7
- gobby/install/codex/prompts/recall.md +0 -7
- gobby/install/codex/prompts/remember.md +0 -13
- gobby/llm/gemini_executor.py +0 -339
- gobby/mcp_proxy/tools/session_messages.py +0 -1056
- gobby/mcp_proxy/tools/task_expansion.py +0 -591
- gobby/prompts/defaults/expansion/system.md +0 -119
- gobby/prompts/defaults/expansion/user.md +0 -48
- gobby/prompts/defaults/external_validation/agent.md +0 -72
- gobby/prompts/defaults/external_validation/external.md +0 -63
- gobby/prompts/defaults/external_validation/spawn.md +0 -83
- gobby/prompts/defaults/external_validation/system.md +0 -6
- gobby/prompts/defaults/features/import_mcp.md +0 -22
- gobby/prompts/defaults/features/import_mcp_github.md +0 -17
- gobby/prompts/defaults/features/import_mcp_search.md +0 -16
- gobby/prompts/defaults/features/recommend_tools.md +0 -32
- gobby/prompts/defaults/features/recommend_tools_hybrid.md +0 -35
- gobby/prompts/defaults/features/recommend_tools_llm.md +0 -30
- gobby/prompts/defaults/features/server_description.md +0 -20
- gobby/prompts/defaults/features/server_description_system.md +0 -6
- gobby/prompts/defaults/features/task_description.md +0 -31
- gobby/prompts/defaults/features/task_description_system.md +0 -6
- gobby/prompts/defaults/features/tool_summary.md +0 -17
- gobby/prompts/defaults/features/tool_summary_system.md +0 -6
- gobby/prompts/defaults/research/step.md +0 -58
- gobby/prompts/defaults/validation/criteria.md +0 -47
- gobby/prompts/defaults/validation/validate.md +0 -38
- gobby/storage/migrations_legacy.py +0 -1359
- gobby/tasks/context.py +0 -747
- gobby/tasks/criteria.py +0 -342
- gobby/tasks/expansion.py +0 -626
- gobby/tasks/prompts/expand.py +0 -327
- gobby/tasks/research.py +0 -421
- gobby/tasks/tdd.py +0 -352
- {gobby-0.2.5.dist-info → gobby-0.2.7.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.5.dist-info → gobby-0.2.7.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.5.dist-info → gobby-0.2.7.dist-info}/top_level.txt +0 -0
|
@@ -26,6 +26,157 @@ def register_cleanup(
|
|
|
26
26
|
default_project_id: str | None = None,
|
|
27
27
|
) -> None:
|
|
28
28
|
"""Register cleanup tools."""
|
|
29
|
+
from gobby.mcp_proxy.tools.tasks import resolve_task_id_for_mcp
|
|
30
|
+
from gobby.storage.tasks import TaskNotFoundError
|
|
31
|
+
|
|
32
|
+
async def approve_and_cleanup(
|
|
33
|
+
task_id: str,
|
|
34
|
+
push_branch: bool = False,
|
|
35
|
+
delete_worktree: bool = True,
|
|
36
|
+
force: bool = False,
|
|
37
|
+
) -> dict[str, Any]:
|
|
38
|
+
"""
|
|
39
|
+
Approve a reviewed task and clean up its worktree.
|
|
40
|
+
|
|
41
|
+
This tool transitions a task from "review" to "closed" status
|
|
42
|
+
and optionally deletes the associated worktree.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
task_id: Task reference (#N, N, path, or UUID)
|
|
46
|
+
push_branch: Whether to push the branch to remote before cleanup
|
|
47
|
+
delete_worktree: Whether to delete the git worktree (default: True)
|
|
48
|
+
force: Force deletion even if worktree is dirty
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
Dict with:
|
|
52
|
+
- success: Whether the operation succeeded
|
|
53
|
+
- task_status: New task status
|
|
54
|
+
- worktree_deleted: Whether worktree was deleted
|
|
55
|
+
- branch_pushed: Whether branch was pushed
|
|
56
|
+
"""
|
|
57
|
+
# Resolve task ID
|
|
58
|
+
try:
|
|
59
|
+
resolved_task_id = resolve_task_id_for_mcp(task_manager, task_id)
|
|
60
|
+
except (TaskNotFoundError, ValueError) as e:
|
|
61
|
+
return {
|
|
62
|
+
"success": False,
|
|
63
|
+
"error": f"Task not found: {task_id} ({e})",
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
# Get the task
|
|
67
|
+
task = task_manager.get_task(resolved_task_id)
|
|
68
|
+
if task is None:
|
|
69
|
+
return {
|
|
70
|
+
"success": False,
|
|
71
|
+
"error": f"Task not found: {task_id}",
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
# Verify task is in review status
|
|
75
|
+
if task.status != "review":
|
|
76
|
+
return {
|
|
77
|
+
"success": False,
|
|
78
|
+
"error": f"Task must be in 'review' status to approve. Current status: {task.status}",
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
# Get associated worktree (if any)
|
|
82
|
+
worktree = worktree_storage.get_by_task(resolved_task_id)
|
|
83
|
+
branch_pushed = False
|
|
84
|
+
worktree_deleted = False
|
|
85
|
+
|
|
86
|
+
# Push branch to remote if requested
|
|
87
|
+
if push_branch and worktree and git_manager:
|
|
88
|
+
try:
|
|
89
|
+
push_result = git_manager._run_git(
|
|
90
|
+
["push", "origin", worktree.branch_name],
|
|
91
|
+
timeout=60,
|
|
92
|
+
)
|
|
93
|
+
branch_pushed = push_result.returncode == 0
|
|
94
|
+
if not branch_pushed:
|
|
95
|
+
logger.warning(f"Failed to push branch: {push_result.stderr}")
|
|
96
|
+
except Exception as e:
|
|
97
|
+
logger.warning(f"Error pushing branch: {e}")
|
|
98
|
+
|
|
99
|
+
# Update task status FIRST - before worktree deletion
|
|
100
|
+
try:
|
|
101
|
+
task_manager.update_task(
|
|
102
|
+
resolved_task_id,
|
|
103
|
+
status="closed",
|
|
104
|
+
)
|
|
105
|
+
except Exception as e:
|
|
106
|
+
return {
|
|
107
|
+
"success": False,
|
|
108
|
+
"error": f"Failed to update task status: {e}",
|
|
109
|
+
"task_id": resolved_task_id,
|
|
110
|
+
"worktree_deleted": False,
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
# Delete worktree if requested and available (after task is closed)
|
|
114
|
+
if delete_worktree and worktree:
|
|
115
|
+
if git_manager is None:
|
|
116
|
+
# No git manager - can't delete worktree, but continue
|
|
117
|
+
logger.warning("Git manager not available, skipping worktree deletion")
|
|
118
|
+
else:
|
|
119
|
+
try:
|
|
120
|
+
delete_result = git_manager.delete_worktree(
|
|
121
|
+
worktree_path=worktree.worktree_path,
|
|
122
|
+
force=force,
|
|
123
|
+
delete_branch=False, # Keep branch for history
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
if delete_result.success:
|
|
127
|
+
worktree_deleted = True
|
|
128
|
+
# Mark worktree as merged and delete record
|
|
129
|
+
worktree_storage.mark_merged(worktree.id)
|
|
130
|
+
worktree_storage.delete(worktree.id)
|
|
131
|
+
else:
|
|
132
|
+
# Task is closed but worktree deletion failed
|
|
133
|
+
logger.warning(f"Failed to delete worktree: {delete_result.message}")
|
|
134
|
+
except Exception as e:
|
|
135
|
+
# Task is closed but worktree deletion failed
|
|
136
|
+
logger.warning(f"Error deleting worktree: {e}")
|
|
137
|
+
|
|
138
|
+
return {
|
|
139
|
+
"success": True,
|
|
140
|
+
"task_id": resolved_task_id,
|
|
141
|
+
"task_status": "closed",
|
|
142
|
+
"worktree_deleted": worktree_deleted,
|
|
143
|
+
"branch_pushed": branch_pushed,
|
|
144
|
+
"message": f"Task {task_id} approved and marked as closed",
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
registry.register(
|
|
148
|
+
name="approve_and_cleanup",
|
|
149
|
+
description=(
|
|
150
|
+
"Approve a reviewed task and clean up its worktree. "
|
|
151
|
+
"Transitions task from 'review' to 'closed' status and deletes worktree."
|
|
152
|
+
),
|
|
153
|
+
input_schema={
|
|
154
|
+
"type": "object",
|
|
155
|
+
"properties": {
|
|
156
|
+
"task_id": {
|
|
157
|
+
"type": "string",
|
|
158
|
+
"description": "Task reference: #N, N (seq_num), path (1.2.3), or UUID",
|
|
159
|
+
},
|
|
160
|
+
"push_branch": {
|
|
161
|
+
"type": "boolean",
|
|
162
|
+
"description": "Whether to push branch to remote before cleanup",
|
|
163
|
+
"default": False,
|
|
164
|
+
},
|
|
165
|
+
"delete_worktree": {
|
|
166
|
+
"type": "boolean",
|
|
167
|
+
"description": "Whether to delete the git worktree",
|
|
168
|
+
"default": True,
|
|
169
|
+
},
|
|
170
|
+
"force": {
|
|
171
|
+
"type": "boolean",
|
|
172
|
+
"description": "Force deletion even if worktree is dirty",
|
|
173
|
+
"default": False,
|
|
174
|
+
},
|
|
175
|
+
},
|
|
176
|
+
"required": ["task_id"],
|
|
177
|
+
},
|
|
178
|
+
func=approve_and_cleanup,
|
|
179
|
+
)
|
|
29
180
|
|
|
30
181
|
async def cleanup_reviewed_worktrees(
|
|
31
182
|
parent_session_id: str,
|
|
@@ -0,0 +1,467 @@
|
|
|
1
|
+
"""Task orchestration tools: wait (wait_for_task, wait_for_any_task, wait_for_all_tasks).
|
|
2
|
+
|
|
3
|
+
Provides blocking wait operations for task completion with timeout support.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import asyncio
|
|
9
|
+
import logging
|
|
10
|
+
import time
|
|
11
|
+
from typing import TYPE_CHECKING, Any
|
|
12
|
+
|
|
13
|
+
from gobby.mcp_proxy.tools.internal import InternalToolRegistry
|
|
14
|
+
from gobby.storage.tasks import TaskNotFoundError
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from gobby.storage.tasks import LocalTaskManager
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
# Default timeout and poll interval
|
|
22
|
+
DEFAULT_TIMEOUT = 300.0 # 5 minutes
|
|
23
|
+
DEFAULT_POLL_INTERVAL = 5.0 # 5 seconds
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def register_wait(
|
|
27
|
+
registry: InternalToolRegistry,
|
|
28
|
+
task_manager: LocalTaskManager,
|
|
29
|
+
) -> None:
|
|
30
|
+
"""
|
|
31
|
+
Register wait tools for task completion.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
registry: The tool registry to add tools to
|
|
35
|
+
task_manager: Task manager for checking task status
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def _resolve_task_id(task_ref: str) -> str:
|
|
39
|
+
"""Resolve a task reference to its UUID."""
|
|
40
|
+
from gobby.mcp_proxy.tools.tasks import resolve_task_id_for_mcp
|
|
41
|
+
|
|
42
|
+
return resolve_task_id_for_mcp(task_manager, task_ref)
|
|
43
|
+
|
|
44
|
+
def _is_task_complete(task_id: str) -> tuple[bool, dict[str, Any] | None]:
|
|
45
|
+
"""
|
|
46
|
+
Check if a task is complete.
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
Tuple of (is_complete, task_info_dict or None)
|
|
50
|
+
"""
|
|
51
|
+
task = task_manager.get_task(task_id)
|
|
52
|
+
if task is None:
|
|
53
|
+
return False, None
|
|
54
|
+
|
|
55
|
+
task_info = {
|
|
56
|
+
"id": task.id,
|
|
57
|
+
"seq_num": task.seq_num,
|
|
58
|
+
"title": task.title,
|
|
59
|
+
"status": task.status,
|
|
60
|
+
"closed_at": task.closed_at,
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
# Consider task complete if status is "closed" or "review"
|
|
64
|
+
# (review tasks have completed their work, just awaiting human approval)
|
|
65
|
+
is_complete = task.status in ("closed", "review")
|
|
66
|
+
return is_complete, task_info
|
|
67
|
+
|
|
68
|
+
async def wait_for_task(
|
|
69
|
+
task_id: str,
|
|
70
|
+
timeout: float = DEFAULT_TIMEOUT,
|
|
71
|
+
poll_interval: float = DEFAULT_POLL_INTERVAL,
|
|
72
|
+
) -> dict[str, Any]:
|
|
73
|
+
"""
|
|
74
|
+
Wait for a single task to complete.
|
|
75
|
+
|
|
76
|
+
Blocks until the task reaches "closed" or "review" status, or timeout expires.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
task_id: Task reference (#N, N, path, or UUID)
|
|
80
|
+
timeout: Maximum wait time in seconds (default: 300)
|
|
81
|
+
poll_interval: Time between status checks in seconds (default: 5)
|
|
82
|
+
|
|
83
|
+
Returns:
|
|
84
|
+
Dict with:
|
|
85
|
+
- success: Whether the operation succeeded
|
|
86
|
+
- completed: Whether the task completed
|
|
87
|
+
- timed_out: Whether timeout was reached
|
|
88
|
+
- task: Task info dict (if found)
|
|
89
|
+
- wait_time: How long we waited
|
|
90
|
+
"""
|
|
91
|
+
# Validate poll_interval
|
|
92
|
+
if poll_interval <= 0:
|
|
93
|
+
poll_interval = DEFAULT_POLL_INTERVAL
|
|
94
|
+
|
|
95
|
+
start_time = time.monotonic()
|
|
96
|
+
|
|
97
|
+
try:
|
|
98
|
+
resolved_id = _resolve_task_id(task_id)
|
|
99
|
+
except (TaskNotFoundError, ValueError) as e:
|
|
100
|
+
return {
|
|
101
|
+
"success": False,
|
|
102
|
+
"error": f"Task not found: {task_id} ({e})",
|
|
103
|
+
}
|
|
104
|
+
except Exception as e:
|
|
105
|
+
return {
|
|
106
|
+
"success": False,
|
|
107
|
+
"error": f"Failed to resolve task: {task_id} ({e})",
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
# Check initial state
|
|
111
|
+
try:
|
|
112
|
+
is_complete, task_info = _is_task_complete(resolved_id)
|
|
113
|
+
except Exception as e:
|
|
114
|
+
return {
|
|
115
|
+
"success": False,
|
|
116
|
+
"error": f"Failed to check task status: {e}",
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
if task_info is None:
|
|
120
|
+
return {
|
|
121
|
+
"success": False,
|
|
122
|
+
"error": f"Task not found: {task_id}",
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
if is_complete:
|
|
126
|
+
return {
|
|
127
|
+
"success": True,
|
|
128
|
+
"completed": True,
|
|
129
|
+
"timed_out": False,
|
|
130
|
+
"task": task_info,
|
|
131
|
+
"wait_time": 0.0,
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
# Poll until complete or timeout
|
|
135
|
+
while True:
|
|
136
|
+
elapsed = time.monotonic() - start_time
|
|
137
|
+
|
|
138
|
+
if elapsed >= timeout:
|
|
139
|
+
# Re-fetch latest task state before returning timeout
|
|
140
|
+
try:
|
|
141
|
+
_, task_info = _is_task_complete(resolved_id)
|
|
142
|
+
except Exception as e:
|
|
143
|
+
logger.warning(f"Error fetching final task status on timeout: {e}")
|
|
144
|
+
return {
|
|
145
|
+
"success": True,
|
|
146
|
+
"completed": False,
|
|
147
|
+
"timed_out": True,
|
|
148
|
+
"task": task_info,
|
|
149
|
+
"wait_time": elapsed,
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
await asyncio.sleep(poll_interval)
|
|
153
|
+
|
|
154
|
+
try:
|
|
155
|
+
is_complete, task_info = _is_task_complete(resolved_id)
|
|
156
|
+
except Exception as e:
|
|
157
|
+
logger.warning(f"Error checking task status: {e}")
|
|
158
|
+
continue
|
|
159
|
+
|
|
160
|
+
if is_complete:
|
|
161
|
+
return {
|
|
162
|
+
"success": True,
|
|
163
|
+
"completed": True,
|
|
164
|
+
"timed_out": False,
|
|
165
|
+
"task": task_info,
|
|
166
|
+
"wait_time": time.monotonic() - start_time,
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
registry.register(
|
|
170
|
+
name="wait_for_task",
|
|
171
|
+
description=(
|
|
172
|
+
"Wait for a single task to complete. "
|
|
173
|
+
"Blocks until task reaches 'closed' or 'review' status, or timeout expires."
|
|
174
|
+
),
|
|
175
|
+
input_schema={
|
|
176
|
+
"type": "object",
|
|
177
|
+
"properties": {
|
|
178
|
+
"task_id": {
|
|
179
|
+
"type": "string",
|
|
180
|
+
"description": "Task reference: #N, N (seq_num), path (1.2.3), or UUID",
|
|
181
|
+
},
|
|
182
|
+
"timeout": {
|
|
183
|
+
"type": "number",
|
|
184
|
+
"description": f"Maximum wait time in seconds (default: {DEFAULT_TIMEOUT})",
|
|
185
|
+
},
|
|
186
|
+
"poll_interval": {
|
|
187
|
+
"type": "number",
|
|
188
|
+
"description": f"Time between status checks in seconds (default: {DEFAULT_POLL_INTERVAL})",
|
|
189
|
+
},
|
|
190
|
+
},
|
|
191
|
+
"required": ["task_id"],
|
|
192
|
+
},
|
|
193
|
+
func=wait_for_task,
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
async def wait_for_any_task(
|
|
197
|
+
task_ids: list[str],
|
|
198
|
+
timeout: float = DEFAULT_TIMEOUT,
|
|
199
|
+
poll_interval: float = DEFAULT_POLL_INTERVAL,
|
|
200
|
+
) -> dict[str, Any]:
|
|
201
|
+
"""
|
|
202
|
+
Wait for any one of multiple tasks to complete.
|
|
203
|
+
|
|
204
|
+
Blocks until at least one task reaches "closed" or "review" status, or timeout expires.
|
|
205
|
+
|
|
206
|
+
Args:
|
|
207
|
+
task_ids: List of task references (#N, N, path, or UUID)
|
|
208
|
+
timeout: Maximum wait time in seconds (default: 300)
|
|
209
|
+
poll_interval: Time between status checks in seconds (default: 5)
|
|
210
|
+
|
|
211
|
+
Returns:
|
|
212
|
+
Dict with:
|
|
213
|
+
- success: Whether the operation succeeded
|
|
214
|
+
- completed_task_id: ID of the first completed task (or None)
|
|
215
|
+
- timed_out: Whether timeout was reached
|
|
216
|
+
- wait_time: How long we waited
|
|
217
|
+
"""
|
|
218
|
+
if not task_ids:
|
|
219
|
+
return {
|
|
220
|
+
"success": False,
|
|
221
|
+
"error": "No task IDs provided - task_ids list is empty",
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
# Validate poll_interval
|
|
225
|
+
if poll_interval <= 0:
|
|
226
|
+
poll_interval = DEFAULT_POLL_INTERVAL
|
|
227
|
+
|
|
228
|
+
start_time = time.monotonic()
|
|
229
|
+
|
|
230
|
+
# Resolve all task IDs upfront
|
|
231
|
+
resolved_ids = []
|
|
232
|
+
for task_ref in task_ids:
|
|
233
|
+
try:
|
|
234
|
+
resolved_id = _resolve_task_id(task_ref)
|
|
235
|
+
resolved_ids.append(resolved_id)
|
|
236
|
+
except (TaskNotFoundError, ValueError) as e:
|
|
237
|
+
logger.warning(f"Could not resolve task {task_ref}: {e}")
|
|
238
|
+
# Continue with other tasks
|
|
239
|
+
|
|
240
|
+
if not resolved_ids:
|
|
241
|
+
return {
|
|
242
|
+
"success": False,
|
|
243
|
+
"error": "None of the provided task IDs could be resolved",
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
# Check if any are already complete
|
|
247
|
+
for resolved_id in resolved_ids:
|
|
248
|
+
try:
|
|
249
|
+
is_complete, task_info = _is_task_complete(resolved_id)
|
|
250
|
+
if is_complete:
|
|
251
|
+
return {
|
|
252
|
+
"success": True,
|
|
253
|
+
"completed_task_id": resolved_id,
|
|
254
|
+
"task": task_info,
|
|
255
|
+
"timed_out": False,
|
|
256
|
+
"wait_time": 0.0,
|
|
257
|
+
}
|
|
258
|
+
except Exception as e:
|
|
259
|
+
logger.warning(f"Error checking task {resolved_id}: {e}")
|
|
260
|
+
|
|
261
|
+
# Poll until one completes or timeout
|
|
262
|
+
while True:
|
|
263
|
+
elapsed = time.monotonic() - start_time
|
|
264
|
+
|
|
265
|
+
if elapsed >= timeout:
|
|
266
|
+
return {
|
|
267
|
+
"success": True,
|
|
268
|
+
"completed_task_id": None,
|
|
269
|
+
"timed_out": True,
|
|
270
|
+
"wait_time": elapsed,
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
await asyncio.sleep(poll_interval)
|
|
274
|
+
|
|
275
|
+
for resolved_id in resolved_ids:
|
|
276
|
+
try:
|
|
277
|
+
is_complete, task_info = _is_task_complete(resolved_id)
|
|
278
|
+
if is_complete:
|
|
279
|
+
return {
|
|
280
|
+
"success": True,
|
|
281
|
+
"completed_task_id": resolved_id,
|
|
282
|
+
"task": task_info,
|
|
283
|
+
"timed_out": False,
|
|
284
|
+
"wait_time": time.monotonic() - start_time,
|
|
285
|
+
}
|
|
286
|
+
except Exception as e:
|
|
287
|
+
logger.warning(f"Error checking task {resolved_id}: {e}")
|
|
288
|
+
|
|
289
|
+
registry.register(
|
|
290
|
+
name="wait_for_any_task",
|
|
291
|
+
description=(
|
|
292
|
+
"Wait for any one of multiple tasks to complete. "
|
|
293
|
+
"Returns as soon as the first task reaches 'closed' or 'review' status."
|
|
294
|
+
),
|
|
295
|
+
input_schema={
|
|
296
|
+
"type": "object",
|
|
297
|
+
"properties": {
|
|
298
|
+
"task_ids": {
|
|
299
|
+
"type": "array",
|
|
300
|
+
"items": {"type": "string"},
|
|
301
|
+
"description": "List of task references",
|
|
302
|
+
},
|
|
303
|
+
"timeout": {
|
|
304
|
+
"type": "number",
|
|
305
|
+
"description": f"Maximum wait time in seconds (default: {DEFAULT_TIMEOUT})",
|
|
306
|
+
},
|
|
307
|
+
"poll_interval": {
|
|
308
|
+
"type": "number",
|
|
309
|
+
"description": f"Time between status checks in seconds (default: {DEFAULT_POLL_INTERVAL})",
|
|
310
|
+
},
|
|
311
|
+
},
|
|
312
|
+
"required": ["task_ids"],
|
|
313
|
+
},
|
|
314
|
+
func=wait_for_any_task,
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
async def wait_for_all_tasks(
|
|
318
|
+
task_ids: list[str],
|
|
319
|
+
timeout: float = DEFAULT_TIMEOUT,
|
|
320
|
+
poll_interval: float = DEFAULT_POLL_INTERVAL,
|
|
321
|
+
) -> dict[str, Any]:
|
|
322
|
+
"""
|
|
323
|
+
Wait for all tasks to complete.
|
|
324
|
+
|
|
325
|
+
Blocks until all tasks reach "closed" or "review" status, or timeout expires.
|
|
326
|
+
|
|
327
|
+
Args:
|
|
328
|
+
task_ids: List of task references (#N, N, path, or UUID)
|
|
329
|
+
timeout: Maximum wait time in seconds (default: 300)
|
|
330
|
+
poll_interval: Time between status checks in seconds (default: 5)
|
|
331
|
+
|
|
332
|
+
Returns:
|
|
333
|
+
Dict with:
|
|
334
|
+
- success: Whether the operation succeeded
|
|
335
|
+
- all_completed: Whether all tasks completed
|
|
336
|
+
- completed_count: Number of completed tasks
|
|
337
|
+
- pending_count: Number of still-pending tasks
|
|
338
|
+
- timed_out: Whether timeout was reached
|
|
339
|
+
- completed_tasks: List of completed task IDs
|
|
340
|
+
- pending_tasks: List of pending task IDs
|
|
341
|
+
- wait_time: How long we waited
|
|
342
|
+
"""
|
|
343
|
+
if not task_ids:
|
|
344
|
+
# Empty list is vacuously true - all (zero) tasks are complete
|
|
345
|
+
return {
|
|
346
|
+
"success": True,
|
|
347
|
+
"all_completed": True,
|
|
348
|
+
"completed_count": 0,
|
|
349
|
+
"pending_count": 0,
|
|
350
|
+
"timed_out": False,
|
|
351
|
+
"completed_tasks": [],
|
|
352
|
+
"pending_tasks": [],
|
|
353
|
+
"wait_time": 0.0,
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
# Validate poll_interval
|
|
357
|
+
if poll_interval <= 0:
|
|
358
|
+
poll_interval = DEFAULT_POLL_INTERVAL
|
|
359
|
+
|
|
360
|
+
start_time = time.monotonic()
|
|
361
|
+
|
|
362
|
+
# Resolve all task IDs upfront
|
|
363
|
+
resolved_ids = []
|
|
364
|
+
for task_ref in task_ids:
|
|
365
|
+
try:
|
|
366
|
+
resolved_id = _resolve_task_id(task_ref)
|
|
367
|
+
resolved_ids.append(resolved_id)
|
|
368
|
+
except (TaskNotFoundError, ValueError) as e:
|
|
369
|
+
logger.warning(f"Could not resolve task {task_ref}: {e}")
|
|
370
|
+
|
|
371
|
+
if not resolved_ids:
|
|
372
|
+
return {
|
|
373
|
+
"success": False,
|
|
374
|
+
"error": "None of the provided task IDs could be resolved",
|
|
375
|
+
}
|
|
376
|
+
|
|
377
|
+
def check_all_complete() -> tuple[list[str], list[str]]:
|
|
378
|
+
"""Check which tasks are complete. Returns (completed, pending)."""
|
|
379
|
+
completed = []
|
|
380
|
+
pending = []
|
|
381
|
+
for resolved_id in resolved_ids:
|
|
382
|
+
try:
|
|
383
|
+
is_complete, _ = _is_task_complete(resolved_id)
|
|
384
|
+
if is_complete:
|
|
385
|
+
completed.append(resolved_id)
|
|
386
|
+
else:
|
|
387
|
+
pending.append(resolved_id)
|
|
388
|
+
except Exception as e:
|
|
389
|
+
logger.warning(f"Error checking task {resolved_id}: {e}")
|
|
390
|
+
pending.append(resolved_id) # Assume not complete on error
|
|
391
|
+
return completed, pending
|
|
392
|
+
|
|
393
|
+
# Check initial state
|
|
394
|
+
completed, pending = check_all_complete()
|
|
395
|
+
|
|
396
|
+
if not pending:
|
|
397
|
+
return {
|
|
398
|
+
"success": True,
|
|
399
|
+
"all_completed": True,
|
|
400
|
+
"completed_count": len(completed),
|
|
401
|
+
"pending_count": 0,
|
|
402
|
+
"timed_out": False,
|
|
403
|
+
"completed_tasks": completed,
|
|
404
|
+
"pending_tasks": [],
|
|
405
|
+
"wait_time": 0.0,
|
|
406
|
+
}
|
|
407
|
+
|
|
408
|
+
# Poll until all complete or timeout
|
|
409
|
+
while True:
|
|
410
|
+
elapsed = time.monotonic() - start_time
|
|
411
|
+
|
|
412
|
+
if elapsed >= timeout:
|
|
413
|
+
completed, pending = check_all_complete()
|
|
414
|
+
return {
|
|
415
|
+
"success": True,
|
|
416
|
+
"all_completed": False,
|
|
417
|
+
"completed_count": len(completed),
|
|
418
|
+
"pending_count": len(pending),
|
|
419
|
+
"timed_out": True,
|
|
420
|
+
"completed_tasks": completed,
|
|
421
|
+
"pending_tasks": pending,
|
|
422
|
+
"wait_time": elapsed,
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
await asyncio.sleep(poll_interval)
|
|
426
|
+
|
|
427
|
+
completed, pending = check_all_complete()
|
|
428
|
+
|
|
429
|
+
if not pending:
|
|
430
|
+
return {
|
|
431
|
+
"success": True,
|
|
432
|
+
"all_completed": True,
|
|
433
|
+
"completed_count": len(completed),
|
|
434
|
+
"pending_count": 0,
|
|
435
|
+
"timed_out": False,
|
|
436
|
+
"completed_tasks": completed,
|
|
437
|
+
"pending_tasks": [],
|
|
438
|
+
"wait_time": time.monotonic() - start_time,
|
|
439
|
+
}
|
|
440
|
+
|
|
441
|
+
registry.register(
|
|
442
|
+
name="wait_for_all_tasks",
|
|
443
|
+
description=(
|
|
444
|
+
"Wait for all tasks to complete. "
|
|
445
|
+
"Blocks until all tasks reach 'closed' or 'review' status, or timeout expires."
|
|
446
|
+
),
|
|
447
|
+
input_schema={
|
|
448
|
+
"type": "object",
|
|
449
|
+
"properties": {
|
|
450
|
+
"task_ids": {
|
|
451
|
+
"type": "array",
|
|
452
|
+
"items": {"type": "string"},
|
|
453
|
+
"description": "List of task references",
|
|
454
|
+
},
|
|
455
|
+
"timeout": {
|
|
456
|
+
"type": "number",
|
|
457
|
+
"description": f"Maximum wait time in seconds (default: {DEFAULT_TIMEOUT})",
|
|
458
|
+
},
|
|
459
|
+
"poll_interval": {
|
|
460
|
+
"type": "number",
|
|
461
|
+
"description": f"Time between status checks in seconds (default: {DEFAULT_POLL_INTERVAL})",
|
|
462
|
+
},
|
|
463
|
+
},
|
|
464
|
+
"required": ["task_ids"],
|
|
465
|
+
},
|
|
466
|
+
func=wait_for_all_tasks,
|
|
467
|
+
)
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
"""Session tools package.
|
|
2
|
+
|
|
3
|
+
This package provides MCP tools for session management. Re-exports maintain
|
|
4
|
+
backwards compatibility with the original session_messages.py module.
|
|
5
|
+
|
|
6
|
+
Public API:
|
|
7
|
+
- create_session_messages_registry: Factory function to create the session tool registry
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from gobby.mcp_proxy.tools.sessions._factory import create_session_messages_registry
|
|
11
|
+
|
|
12
|
+
__all__ = [
|
|
13
|
+
"create_session_messages_registry",
|
|
14
|
+
]
|