gobby 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +3 -0
- gobby/adapters/__init__.py +30 -0
- gobby/adapters/base.py +93 -0
- gobby/adapters/claude_code.py +276 -0
- gobby/adapters/codex.py +1292 -0
- gobby/adapters/gemini.py +343 -0
- gobby/agents/__init__.py +37 -0
- gobby/agents/codex_session.py +120 -0
- gobby/agents/constants.py +112 -0
- gobby/agents/context.py +362 -0
- gobby/agents/definitions.py +133 -0
- gobby/agents/gemini_session.py +111 -0
- gobby/agents/registry.py +618 -0
- gobby/agents/runner.py +968 -0
- gobby/agents/session.py +259 -0
- gobby/agents/spawn.py +916 -0
- gobby/agents/spawners/__init__.py +77 -0
- gobby/agents/spawners/base.py +142 -0
- gobby/agents/spawners/cross_platform.py +266 -0
- gobby/agents/spawners/embedded.py +225 -0
- gobby/agents/spawners/headless.py +226 -0
- gobby/agents/spawners/linux.py +125 -0
- gobby/agents/spawners/macos.py +277 -0
- gobby/agents/spawners/windows.py +308 -0
- gobby/agents/tty_config.py +319 -0
- gobby/autonomous/__init__.py +32 -0
- gobby/autonomous/progress_tracker.py +447 -0
- gobby/autonomous/stop_registry.py +269 -0
- gobby/autonomous/stuck_detector.py +383 -0
- gobby/cli/__init__.py +67 -0
- gobby/cli/__main__.py +8 -0
- gobby/cli/agents.py +529 -0
- gobby/cli/artifacts.py +266 -0
- gobby/cli/daemon.py +329 -0
- gobby/cli/extensions.py +526 -0
- gobby/cli/github.py +263 -0
- gobby/cli/init.py +53 -0
- gobby/cli/install.py +614 -0
- gobby/cli/installers/__init__.py +37 -0
- gobby/cli/installers/antigravity.py +65 -0
- gobby/cli/installers/claude.py +363 -0
- gobby/cli/installers/codex.py +192 -0
- gobby/cli/installers/gemini.py +294 -0
- gobby/cli/installers/git_hooks.py +377 -0
- gobby/cli/installers/shared.py +737 -0
- gobby/cli/linear.py +250 -0
- gobby/cli/mcp.py +30 -0
- gobby/cli/mcp_proxy.py +698 -0
- gobby/cli/memory.py +304 -0
- gobby/cli/merge.py +384 -0
- gobby/cli/projects.py +79 -0
- gobby/cli/sessions.py +622 -0
- gobby/cli/tasks/__init__.py +30 -0
- gobby/cli/tasks/_utils.py +658 -0
- gobby/cli/tasks/ai.py +1025 -0
- gobby/cli/tasks/commits.py +169 -0
- gobby/cli/tasks/crud.py +685 -0
- gobby/cli/tasks/deps.py +135 -0
- gobby/cli/tasks/labels.py +63 -0
- gobby/cli/tasks/main.py +273 -0
- gobby/cli/tasks/search.py +178 -0
- gobby/cli/tui.py +34 -0
- gobby/cli/utils.py +513 -0
- gobby/cli/workflows.py +927 -0
- gobby/cli/worktrees.py +481 -0
- gobby/config/__init__.py +129 -0
- gobby/config/app.py +551 -0
- gobby/config/extensions.py +167 -0
- gobby/config/features.py +472 -0
- gobby/config/llm_providers.py +98 -0
- gobby/config/logging.py +66 -0
- gobby/config/mcp.py +346 -0
- gobby/config/persistence.py +247 -0
- gobby/config/servers.py +141 -0
- gobby/config/sessions.py +250 -0
- gobby/config/tasks.py +784 -0
- gobby/hooks/__init__.py +104 -0
- gobby/hooks/artifact_capture.py +213 -0
- gobby/hooks/broadcaster.py +243 -0
- gobby/hooks/event_handlers.py +723 -0
- gobby/hooks/events.py +218 -0
- gobby/hooks/git.py +169 -0
- gobby/hooks/health_monitor.py +171 -0
- gobby/hooks/hook_manager.py +856 -0
- gobby/hooks/hook_types.py +575 -0
- gobby/hooks/plugins.py +813 -0
- gobby/hooks/session_coordinator.py +396 -0
- gobby/hooks/verification_runner.py +268 -0
- gobby/hooks/webhooks.py +339 -0
- gobby/install/claude/commands/gobby/bug.md +51 -0
- gobby/install/claude/commands/gobby/chore.md +51 -0
- gobby/install/claude/commands/gobby/epic.md +52 -0
- gobby/install/claude/commands/gobby/eval.md +235 -0
- gobby/install/claude/commands/gobby/feat.md +49 -0
- gobby/install/claude/commands/gobby/nit.md +52 -0
- gobby/install/claude/commands/gobby/ref.md +52 -0
- gobby/install/claude/hooks/HOOK_SCHEMAS.md +632 -0
- gobby/install/claude/hooks/hook_dispatcher.py +364 -0
- gobby/install/claude/hooks/validate_settings.py +102 -0
- gobby/install/claude/hooks-template.json +118 -0
- gobby/install/codex/hooks/hook_dispatcher.py +153 -0
- gobby/install/codex/prompts/forget.md +7 -0
- gobby/install/codex/prompts/memories.md +7 -0
- gobby/install/codex/prompts/recall.md +7 -0
- gobby/install/codex/prompts/remember.md +13 -0
- gobby/install/gemini/hooks/hook_dispatcher.py +268 -0
- gobby/install/gemini/hooks-template.json +138 -0
- gobby/install/shared/plugins/code_guardian.py +456 -0
- gobby/install/shared/plugins/example_notify.py +331 -0
- gobby/integrations/__init__.py +10 -0
- gobby/integrations/github.py +145 -0
- gobby/integrations/linear.py +145 -0
- gobby/llm/__init__.py +40 -0
- gobby/llm/base.py +120 -0
- gobby/llm/claude.py +578 -0
- gobby/llm/claude_executor.py +503 -0
- gobby/llm/codex.py +322 -0
- gobby/llm/codex_executor.py +513 -0
- gobby/llm/executor.py +316 -0
- gobby/llm/factory.py +34 -0
- gobby/llm/gemini.py +258 -0
- gobby/llm/gemini_executor.py +339 -0
- gobby/llm/litellm.py +287 -0
- gobby/llm/litellm_executor.py +303 -0
- gobby/llm/resolver.py +499 -0
- gobby/llm/service.py +236 -0
- gobby/mcp_proxy/__init__.py +29 -0
- gobby/mcp_proxy/actions.py +175 -0
- gobby/mcp_proxy/daemon_control.py +198 -0
- gobby/mcp_proxy/importer.py +436 -0
- gobby/mcp_proxy/lazy.py +325 -0
- gobby/mcp_proxy/manager.py +798 -0
- gobby/mcp_proxy/metrics.py +609 -0
- gobby/mcp_proxy/models.py +139 -0
- gobby/mcp_proxy/registries.py +215 -0
- gobby/mcp_proxy/schema_hash.py +381 -0
- gobby/mcp_proxy/semantic_search.py +706 -0
- gobby/mcp_proxy/server.py +549 -0
- gobby/mcp_proxy/services/__init__.py +0 -0
- gobby/mcp_proxy/services/fallback.py +306 -0
- gobby/mcp_proxy/services/recommendation.py +224 -0
- gobby/mcp_proxy/services/server_mgmt.py +214 -0
- gobby/mcp_proxy/services/system.py +72 -0
- gobby/mcp_proxy/services/tool_filter.py +231 -0
- gobby/mcp_proxy/services/tool_proxy.py +309 -0
- gobby/mcp_proxy/stdio.py +565 -0
- gobby/mcp_proxy/tools/__init__.py +27 -0
- gobby/mcp_proxy/tools/agents.py +1103 -0
- gobby/mcp_proxy/tools/artifacts.py +207 -0
- gobby/mcp_proxy/tools/hub.py +335 -0
- gobby/mcp_proxy/tools/internal.py +337 -0
- gobby/mcp_proxy/tools/memory.py +543 -0
- gobby/mcp_proxy/tools/merge.py +422 -0
- gobby/mcp_proxy/tools/metrics.py +283 -0
- gobby/mcp_proxy/tools/orchestration/__init__.py +23 -0
- gobby/mcp_proxy/tools/orchestration/cleanup.py +619 -0
- gobby/mcp_proxy/tools/orchestration/monitor.py +380 -0
- gobby/mcp_proxy/tools/orchestration/orchestrate.py +746 -0
- gobby/mcp_proxy/tools/orchestration/review.py +736 -0
- gobby/mcp_proxy/tools/orchestration/utils.py +16 -0
- gobby/mcp_proxy/tools/session_messages.py +1056 -0
- gobby/mcp_proxy/tools/task_dependencies.py +219 -0
- gobby/mcp_proxy/tools/task_expansion.py +591 -0
- gobby/mcp_proxy/tools/task_github.py +393 -0
- gobby/mcp_proxy/tools/task_linear.py +379 -0
- gobby/mcp_proxy/tools/task_orchestration.py +77 -0
- gobby/mcp_proxy/tools/task_readiness.py +522 -0
- gobby/mcp_proxy/tools/task_sync.py +351 -0
- gobby/mcp_proxy/tools/task_validation.py +843 -0
- gobby/mcp_proxy/tools/tasks/__init__.py +25 -0
- gobby/mcp_proxy/tools/tasks/_context.py +112 -0
- gobby/mcp_proxy/tools/tasks/_crud.py +516 -0
- gobby/mcp_proxy/tools/tasks/_factory.py +176 -0
- gobby/mcp_proxy/tools/tasks/_helpers.py +129 -0
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +517 -0
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +301 -0
- gobby/mcp_proxy/tools/tasks/_resolution.py +55 -0
- gobby/mcp_proxy/tools/tasks/_search.py +215 -0
- gobby/mcp_proxy/tools/tasks/_session.py +125 -0
- gobby/mcp_proxy/tools/workflows.py +973 -0
- gobby/mcp_proxy/tools/worktrees.py +1264 -0
- gobby/mcp_proxy/transports/__init__.py +0 -0
- gobby/mcp_proxy/transports/base.py +95 -0
- gobby/mcp_proxy/transports/factory.py +44 -0
- gobby/mcp_proxy/transports/http.py +139 -0
- gobby/mcp_proxy/transports/stdio.py +213 -0
- gobby/mcp_proxy/transports/websocket.py +136 -0
- gobby/memory/backends/__init__.py +116 -0
- gobby/memory/backends/mem0.py +408 -0
- gobby/memory/backends/memu.py +485 -0
- gobby/memory/backends/null.py +111 -0
- gobby/memory/backends/openmemory.py +537 -0
- gobby/memory/backends/sqlite.py +304 -0
- gobby/memory/context.py +87 -0
- gobby/memory/manager.py +1001 -0
- gobby/memory/protocol.py +451 -0
- gobby/memory/search/__init__.py +66 -0
- gobby/memory/search/text.py +127 -0
- gobby/memory/viz.py +258 -0
- gobby/prompts/__init__.py +13 -0
- gobby/prompts/defaults/expansion/system.md +119 -0
- gobby/prompts/defaults/expansion/user.md +48 -0
- gobby/prompts/defaults/external_validation/agent.md +72 -0
- gobby/prompts/defaults/external_validation/external.md +63 -0
- gobby/prompts/defaults/external_validation/spawn.md +83 -0
- gobby/prompts/defaults/external_validation/system.md +6 -0
- gobby/prompts/defaults/features/import_mcp.md +22 -0
- gobby/prompts/defaults/features/import_mcp_github.md +17 -0
- gobby/prompts/defaults/features/import_mcp_search.md +16 -0
- gobby/prompts/defaults/features/recommend_tools.md +32 -0
- gobby/prompts/defaults/features/recommend_tools_hybrid.md +35 -0
- gobby/prompts/defaults/features/recommend_tools_llm.md +30 -0
- gobby/prompts/defaults/features/server_description.md +20 -0
- gobby/prompts/defaults/features/server_description_system.md +6 -0
- gobby/prompts/defaults/features/task_description.md +31 -0
- gobby/prompts/defaults/features/task_description_system.md +6 -0
- gobby/prompts/defaults/features/tool_summary.md +17 -0
- gobby/prompts/defaults/features/tool_summary_system.md +6 -0
- gobby/prompts/defaults/research/step.md +58 -0
- gobby/prompts/defaults/validation/criteria.md +47 -0
- gobby/prompts/defaults/validation/validate.md +38 -0
- gobby/prompts/loader.py +346 -0
- gobby/prompts/models.py +113 -0
- gobby/py.typed +0 -0
- gobby/runner.py +488 -0
- gobby/search/__init__.py +23 -0
- gobby/search/protocol.py +104 -0
- gobby/search/tfidf.py +232 -0
- gobby/servers/__init__.py +7 -0
- gobby/servers/http.py +636 -0
- gobby/servers/models.py +31 -0
- gobby/servers/routes/__init__.py +23 -0
- gobby/servers/routes/admin.py +416 -0
- gobby/servers/routes/dependencies.py +118 -0
- gobby/servers/routes/mcp/__init__.py +24 -0
- gobby/servers/routes/mcp/hooks.py +135 -0
- gobby/servers/routes/mcp/plugins.py +121 -0
- gobby/servers/routes/mcp/tools.py +1337 -0
- gobby/servers/routes/mcp/webhooks.py +159 -0
- gobby/servers/routes/sessions.py +582 -0
- gobby/servers/websocket.py +766 -0
- gobby/sessions/__init__.py +13 -0
- gobby/sessions/analyzer.py +322 -0
- gobby/sessions/lifecycle.py +240 -0
- gobby/sessions/manager.py +563 -0
- gobby/sessions/processor.py +225 -0
- gobby/sessions/summary.py +532 -0
- gobby/sessions/transcripts/__init__.py +41 -0
- gobby/sessions/transcripts/base.py +125 -0
- gobby/sessions/transcripts/claude.py +386 -0
- gobby/sessions/transcripts/codex.py +143 -0
- gobby/sessions/transcripts/gemini.py +195 -0
- gobby/storage/__init__.py +21 -0
- gobby/storage/agents.py +409 -0
- gobby/storage/artifact_classifier.py +341 -0
- gobby/storage/artifacts.py +285 -0
- gobby/storage/compaction.py +67 -0
- gobby/storage/database.py +357 -0
- gobby/storage/inter_session_messages.py +194 -0
- gobby/storage/mcp.py +680 -0
- gobby/storage/memories.py +562 -0
- gobby/storage/merge_resolutions.py +550 -0
- gobby/storage/migrations.py +860 -0
- gobby/storage/migrations_legacy.py +1359 -0
- gobby/storage/projects.py +166 -0
- gobby/storage/session_messages.py +251 -0
- gobby/storage/session_tasks.py +97 -0
- gobby/storage/sessions.py +817 -0
- gobby/storage/task_dependencies.py +223 -0
- gobby/storage/tasks/__init__.py +42 -0
- gobby/storage/tasks/_aggregates.py +180 -0
- gobby/storage/tasks/_crud.py +449 -0
- gobby/storage/tasks/_id.py +104 -0
- gobby/storage/tasks/_lifecycle.py +311 -0
- gobby/storage/tasks/_manager.py +889 -0
- gobby/storage/tasks/_models.py +300 -0
- gobby/storage/tasks/_ordering.py +119 -0
- gobby/storage/tasks/_path_cache.py +110 -0
- gobby/storage/tasks/_queries.py +343 -0
- gobby/storage/tasks/_search.py +143 -0
- gobby/storage/workflow_audit.py +393 -0
- gobby/storage/worktrees.py +547 -0
- gobby/sync/__init__.py +29 -0
- gobby/sync/github.py +333 -0
- gobby/sync/linear.py +304 -0
- gobby/sync/memories.py +284 -0
- gobby/sync/tasks.py +641 -0
- gobby/tasks/__init__.py +8 -0
- gobby/tasks/build_verification.py +193 -0
- gobby/tasks/commits.py +633 -0
- gobby/tasks/context.py +747 -0
- gobby/tasks/criteria.py +342 -0
- gobby/tasks/enhanced_validator.py +226 -0
- gobby/tasks/escalation.py +263 -0
- gobby/tasks/expansion.py +626 -0
- gobby/tasks/external_validator.py +764 -0
- gobby/tasks/issue_extraction.py +171 -0
- gobby/tasks/prompts/expand.py +327 -0
- gobby/tasks/research.py +421 -0
- gobby/tasks/tdd.py +352 -0
- gobby/tasks/tree_builder.py +263 -0
- gobby/tasks/validation.py +712 -0
- gobby/tasks/validation_history.py +357 -0
- gobby/tasks/validation_models.py +89 -0
- gobby/tools/__init__.py +0 -0
- gobby/tools/summarizer.py +170 -0
- gobby/tui/__init__.py +5 -0
- gobby/tui/api_client.py +281 -0
- gobby/tui/app.py +327 -0
- gobby/tui/screens/__init__.py +25 -0
- gobby/tui/screens/agents.py +333 -0
- gobby/tui/screens/chat.py +450 -0
- gobby/tui/screens/dashboard.py +377 -0
- gobby/tui/screens/memory.py +305 -0
- gobby/tui/screens/metrics.py +231 -0
- gobby/tui/screens/orchestrator.py +904 -0
- gobby/tui/screens/sessions.py +412 -0
- gobby/tui/screens/tasks.py +442 -0
- gobby/tui/screens/workflows.py +289 -0
- gobby/tui/screens/worktrees.py +174 -0
- gobby/tui/widgets/__init__.py +21 -0
- gobby/tui/widgets/chat.py +210 -0
- gobby/tui/widgets/conductor.py +104 -0
- gobby/tui/widgets/menu.py +132 -0
- gobby/tui/widgets/message_panel.py +160 -0
- gobby/tui/widgets/review_gate.py +224 -0
- gobby/tui/widgets/task_tree.py +99 -0
- gobby/tui/widgets/token_budget.py +166 -0
- gobby/tui/ws_client.py +258 -0
- gobby/utils/__init__.py +3 -0
- gobby/utils/daemon_client.py +235 -0
- gobby/utils/git.py +222 -0
- gobby/utils/id.py +38 -0
- gobby/utils/json_helpers.py +161 -0
- gobby/utils/logging.py +376 -0
- gobby/utils/machine_id.py +135 -0
- gobby/utils/metrics.py +589 -0
- gobby/utils/project_context.py +182 -0
- gobby/utils/project_init.py +263 -0
- gobby/utils/status.py +256 -0
- gobby/utils/validation.py +80 -0
- gobby/utils/version.py +23 -0
- gobby/workflows/__init__.py +4 -0
- gobby/workflows/actions.py +1310 -0
- gobby/workflows/approval_flow.py +138 -0
- gobby/workflows/artifact_actions.py +103 -0
- gobby/workflows/audit_helpers.py +110 -0
- gobby/workflows/autonomous_actions.py +286 -0
- gobby/workflows/context_actions.py +394 -0
- gobby/workflows/definitions.py +130 -0
- gobby/workflows/detection_helpers.py +208 -0
- gobby/workflows/engine.py +485 -0
- gobby/workflows/evaluator.py +669 -0
- gobby/workflows/git_utils.py +96 -0
- gobby/workflows/hooks.py +169 -0
- gobby/workflows/lifecycle_evaluator.py +613 -0
- gobby/workflows/llm_actions.py +70 -0
- gobby/workflows/loader.py +333 -0
- gobby/workflows/mcp_actions.py +60 -0
- gobby/workflows/memory_actions.py +272 -0
- gobby/workflows/premature_stop.py +164 -0
- gobby/workflows/session_actions.py +139 -0
- gobby/workflows/state_actions.py +123 -0
- gobby/workflows/state_manager.py +104 -0
- gobby/workflows/stop_signal_actions.py +163 -0
- gobby/workflows/summary_actions.py +344 -0
- gobby/workflows/task_actions.py +249 -0
- gobby/workflows/task_enforcement_actions.py +901 -0
- gobby/workflows/templates.py +52 -0
- gobby/workflows/todo_actions.py +84 -0
- gobby/workflows/webhook.py +223 -0
- gobby/workflows/webhook_executor.py +399 -0
- gobby/worktrees/__init__.py +5 -0
- gobby/worktrees/git.py +690 -0
- gobby/worktrees/merge/__init__.py +20 -0
- gobby/worktrees/merge/conflict_parser.py +177 -0
- gobby/worktrees/merge/resolver.py +485 -0
- gobby-0.2.5.dist-info/METADATA +351 -0
- gobby-0.2.5.dist-info/RECORD +383 -0
- gobby-0.2.5.dist-info/WHEEL +5 -0
- gobby-0.2.5.dist-info/entry_points.txt +2 -0
- gobby-0.2.5.dist-info/licenses/LICENSE.md +193 -0
- gobby-0.2.5.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,901 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Task enforcement actions for workflow engine.
|
|
3
|
+
|
|
4
|
+
Provides actions that enforce task tracking before allowing certain tools,
|
|
5
|
+
and enforce task completion before allowing agent to stop.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
import subprocess # nosec B404 - subprocess needed for git commands
|
|
10
|
+
from typing import TYPE_CHECKING, Any
|
|
11
|
+
|
|
12
|
+
from gobby.mcp_proxy.tools.task_readiness import is_descendant_of
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
from gobby.config.app import DaemonConfig
|
|
16
|
+
from gobby.storage.session_tasks import SessionTaskManager
|
|
17
|
+
from gobby.storage.sessions import LocalSessionManager
|
|
18
|
+
from gobby.storage.tasks import LocalTaskManager
|
|
19
|
+
from gobby.workflows.definitions import WorkflowState
|
|
20
|
+
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def _get_dirty_files(project_path: str | None = None) -> set[str]:
|
|
25
|
+
"""
|
|
26
|
+
Get the set of dirty files from git status --porcelain.
|
|
27
|
+
|
|
28
|
+
Excludes .gobby/ files from the result.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
project_path: Path to the project directory
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
Set of dirty file paths (relative to repo root)
|
|
35
|
+
"""
|
|
36
|
+
if project_path is None:
|
|
37
|
+
logger.warning(
|
|
38
|
+
"_get_dirty_files: project_path is None, git status will use daemon's cwd "
|
|
39
|
+
"which may not be the project directory"
|
|
40
|
+
)
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
result = subprocess.run( # nosec B603 B607 - hardcoded git command
|
|
44
|
+
["git", "status", "--porcelain"],
|
|
45
|
+
cwd=project_path,
|
|
46
|
+
capture_output=True,
|
|
47
|
+
text=True,
|
|
48
|
+
timeout=10,
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
if result.returncode != 0:
|
|
52
|
+
logger.warning(f"_get_dirty_files: git status failed: {result.stderr}")
|
|
53
|
+
return set()
|
|
54
|
+
|
|
55
|
+
dirty_files = set()
|
|
56
|
+
# Split by newline first, don't strip() the whole string as it removes
|
|
57
|
+
# the leading space from git status format (e.g., " M file.py")
|
|
58
|
+
for line in result.stdout.split("\n"):
|
|
59
|
+
line = line.rstrip() # Remove trailing whitespace only
|
|
60
|
+
if not line:
|
|
61
|
+
continue
|
|
62
|
+
# Format is "XY filename" or "XY filename -> newname" for renames
|
|
63
|
+
# Skip the status prefix (first 3 chars: 2 status chars + space)
|
|
64
|
+
filepath = line[3:].split(" -> ")[0] # Handle renames
|
|
65
|
+
# Exclude .gobby/ files
|
|
66
|
+
if not filepath.startswith(".gobby/"):
|
|
67
|
+
dirty_files.add(filepath)
|
|
68
|
+
|
|
69
|
+
return dirty_files
|
|
70
|
+
|
|
71
|
+
except subprocess.TimeoutExpired:
|
|
72
|
+
logger.warning("_get_dirty_files: git status timed out")
|
|
73
|
+
return set()
|
|
74
|
+
except FileNotFoundError:
|
|
75
|
+
logger.warning("_get_dirty_files: git not found")
|
|
76
|
+
return set()
|
|
77
|
+
except Exception as e:
|
|
78
|
+
logger.error(f"_get_dirty_files: Error running git status: {e}")
|
|
79
|
+
return set()
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def _get_task_session_liveness(
|
|
83
|
+
task_id: str,
|
|
84
|
+
session_task_manager: "SessionTaskManager | None",
|
|
85
|
+
session_manager: "LocalSessionManager | None",
|
|
86
|
+
exclude_session_id: str | None = None,
|
|
87
|
+
) -> bool:
|
|
88
|
+
"""
|
|
89
|
+
Check if a task is currently being worked on by an active session.
|
|
90
|
+
|
|
91
|
+
Args:
|
|
92
|
+
task_id: The task ID to check
|
|
93
|
+
session_task_manager: Manager to look up session-task links
|
|
94
|
+
session_manager: Manager to check session status
|
|
95
|
+
exclude_session_id: ID of session to exclude from check (e.g. current one)
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
True if an active session (status='active') is linked to this task.
|
|
99
|
+
"""
|
|
100
|
+
if not session_task_manager or not session_manager:
|
|
101
|
+
return False
|
|
102
|
+
|
|
103
|
+
try:
|
|
104
|
+
# Get all sessions linked to this task
|
|
105
|
+
linked_sessions = session_task_manager.get_task_sessions(task_id)
|
|
106
|
+
|
|
107
|
+
for link in linked_sessions:
|
|
108
|
+
session_id = link.get("session_id")
|
|
109
|
+
if not session_id or session_id == exclude_session_id:
|
|
110
|
+
continue
|
|
111
|
+
|
|
112
|
+
# Check if session is truly active
|
|
113
|
+
session = session_manager.get(session_id)
|
|
114
|
+
if session and session.status == "active":
|
|
115
|
+
return True
|
|
116
|
+
|
|
117
|
+
return False
|
|
118
|
+
except Exception as e:
|
|
119
|
+
logger.warning(f"_get_task_session_liveness: Error checking liveness for {task_id}: {e}")
|
|
120
|
+
return False
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
async def capture_baseline_dirty_files(
|
|
124
|
+
workflow_state: "WorkflowState | None",
|
|
125
|
+
project_path: str | None = None,
|
|
126
|
+
) -> dict[str, Any] | None:
|
|
127
|
+
"""
|
|
128
|
+
Capture current dirty files as baseline for session-aware detection.
|
|
129
|
+
|
|
130
|
+
Called on session_start to record pre-existing dirty files. The
|
|
131
|
+
require_commit_before_stop action will compare against this baseline
|
|
132
|
+
to detect only NEW dirty files made during the session.
|
|
133
|
+
|
|
134
|
+
Args:
|
|
135
|
+
workflow_state: Workflow state to store baseline in
|
|
136
|
+
project_path: Path to the project directory for git status check
|
|
137
|
+
|
|
138
|
+
Returns:
|
|
139
|
+
Dict with captured baseline info, or None if no workflow_state
|
|
140
|
+
"""
|
|
141
|
+
if not workflow_state:
|
|
142
|
+
logger.debug("capture_baseline_dirty_files: No workflow_state, skipping")
|
|
143
|
+
return None
|
|
144
|
+
|
|
145
|
+
dirty_files = _get_dirty_files(project_path)
|
|
146
|
+
|
|
147
|
+
# Store as a list in workflow state (sets aren't JSON serializable)
|
|
148
|
+
workflow_state.variables["baseline_dirty_files"] = list(dirty_files)
|
|
149
|
+
|
|
150
|
+
# Log for debugging baseline capture issues
|
|
151
|
+
files_preview = list(dirty_files)[:5]
|
|
152
|
+
logger.info(
|
|
153
|
+
f"capture_baseline_dirty_files: project_path={project_path}, "
|
|
154
|
+
f"captured {len(dirty_files)} files: {files_preview}"
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
return {
|
|
158
|
+
"baseline_captured": True,
|
|
159
|
+
"file_count": len(dirty_files),
|
|
160
|
+
"files": list(dirty_files),
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
async def require_commit_before_stop(
|
|
165
|
+
workflow_state: "WorkflowState | None",
|
|
166
|
+
project_path: str | None = None,
|
|
167
|
+
task_manager: "LocalTaskManager | None" = None,
|
|
168
|
+
) -> dict[str, Any] | None:
|
|
169
|
+
"""
|
|
170
|
+
Block stop if there's an in_progress task with uncommitted changes.
|
|
171
|
+
|
|
172
|
+
This action is designed for on_stop triggers to enforce that agents
|
|
173
|
+
commit their work and close tasks before stopping.
|
|
174
|
+
|
|
175
|
+
Args:
|
|
176
|
+
workflow_state: Workflow state with variables (claimed_task_id, etc.)
|
|
177
|
+
project_path: Path to the project directory for git status check
|
|
178
|
+
task_manager: LocalTaskManager to verify task status
|
|
179
|
+
|
|
180
|
+
Returns:
|
|
181
|
+
Dict with decision="block" and reason if task has uncommitted changes,
|
|
182
|
+
or None to allow the stop.
|
|
183
|
+
"""
|
|
184
|
+
if not workflow_state:
|
|
185
|
+
logger.debug("require_commit_before_stop: No workflow_state, allowing")
|
|
186
|
+
return None
|
|
187
|
+
|
|
188
|
+
claimed_task_id = workflow_state.variables.get("claimed_task_id")
|
|
189
|
+
if not claimed_task_id:
|
|
190
|
+
logger.debug("require_commit_before_stop: No claimed task, allowing")
|
|
191
|
+
return None
|
|
192
|
+
|
|
193
|
+
# Verify the task is actually still in_progress (not just cached in workflow state)
|
|
194
|
+
if task_manager:
|
|
195
|
+
task = task_manager.get_task(claimed_task_id)
|
|
196
|
+
if not task or task.status != "in_progress":
|
|
197
|
+
# Task was changed - clear the stale workflow state
|
|
198
|
+
logger.debug(
|
|
199
|
+
f"require_commit_before_stop: Task '{claimed_task_id}' is no longer "
|
|
200
|
+
f"in_progress (status={task.status if task else 'not found'}), clearing state"
|
|
201
|
+
)
|
|
202
|
+
workflow_state.variables["claimed_task_id"] = None
|
|
203
|
+
workflow_state.variables["task_claimed"] = False
|
|
204
|
+
return None
|
|
205
|
+
|
|
206
|
+
# Check for uncommitted changes using baseline-aware comparison
|
|
207
|
+
current_dirty = _get_dirty_files(project_path)
|
|
208
|
+
|
|
209
|
+
if not current_dirty:
|
|
210
|
+
logger.debug("require_commit_before_stop: No uncommitted changes, allowing")
|
|
211
|
+
return None
|
|
212
|
+
|
|
213
|
+
# Get baseline dirty files captured at session start
|
|
214
|
+
baseline_dirty = set(workflow_state.variables.get("baseline_dirty_files", []))
|
|
215
|
+
|
|
216
|
+
# Calculate NEW dirty files (not in baseline)
|
|
217
|
+
new_dirty = current_dirty - baseline_dirty
|
|
218
|
+
|
|
219
|
+
if not new_dirty:
|
|
220
|
+
logger.debug(
|
|
221
|
+
f"require_commit_before_stop: All {len(current_dirty)} dirty files were pre-existing "
|
|
222
|
+
f"(in baseline), allowing"
|
|
223
|
+
)
|
|
224
|
+
return None
|
|
225
|
+
|
|
226
|
+
logger.debug(
|
|
227
|
+
f"require_commit_before_stop: Found {len(new_dirty)} new dirty files "
|
|
228
|
+
f"(baseline had {len(baseline_dirty)}, current has {len(current_dirty)})"
|
|
229
|
+
)
|
|
230
|
+
|
|
231
|
+
# Track how many times we've blocked to prevent infinite loops
|
|
232
|
+
block_count = workflow_state.variables.get("_commit_block_count", 0)
|
|
233
|
+
if block_count >= 3:
|
|
234
|
+
logger.warning(
|
|
235
|
+
f"require_commit_before_stop: Reached max block count ({block_count}), allowing"
|
|
236
|
+
)
|
|
237
|
+
return None
|
|
238
|
+
|
|
239
|
+
workflow_state.variables["_commit_block_count"] = block_count + 1
|
|
240
|
+
|
|
241
|
+
# Block - agent needs to commit and close
|
|
242
|
+
logger.info(
|
|
243
|
+
f"require_commit_before_stop: Blocking stop - task '{claimed_task_id}' "
|
|
244
|
+
f"has {len(new_dirty)} uncommitted changes"
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
# Build list of new dirty files for the message (limit to 10 for readability)
|
|
248
|
+
new_dirty_list = sorted(new_dirty)[:10]
|
|
249
|
+
files_display = "\n".join(f" - {f}" for f in new_dirty_list)
|
|
250
|
+
if len(new_dirty) > 10:
|
|
251
|
+
files_display += f"\n ... and {len(new_dirty) - 10} more files"
|
|
252
|
+
|
|
253
|
+
return {
|
|
254
|
+
"decision": "block",
|
|
255
|
+
"reason": (
|
|
256
|
+
f"Task '{claimed_task_id}' is in_progress with {len(new_dirty)} uncommitted "
|
|
257
|
+
f"changes made during this session:\n{files_display}\n\n"
|
|
258
|
+
f"Before stopping, commit your changes and close the task:\n"
|
|
259
|
+
f"1. Commit with [{claimed_task_id}] in the message\n"
|
|
260
|
+
f'2. Close the task: close_task(task_id="{claimed_task_id}", commit_sha="...")'
|
|
261
|
+
),
|
|
262
|
+
}
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
async def require_task_review_or_close_before_stop(
|
|
266
|
+
workflow_state: "WorkflowState | None",
|
|
267
|
+
task_manager: "LocalTaskManager | None" = None,
|
|
268
|
+
project_id: str | None = None,
|
|
269
|
+
**kwargs: Any,
|
|
270
|
+
) -> dict[str, Any] | None:
|
|
271
|
+
"""Block stop if session has an in_progress task.
|
|
272
|
+
|
|
273
|
+
Agents must close their task (or send to review) before stopping.
|
|
274
|
+
The close_task() validation already requires a commit, so we don't
|
|
275
|
+
need to check for uncommitted changes here - that's handled by
|
|
276
|
+
require_commit_before_stop if needed.
|
|
277
|
+
|
|
278
|
+
Checks both:
|
|
279
|
+
1. claimed_task_id - task explicitly claimed via update_task(status="in_progress")
|
|
280
|
+
2. session_task - task(s) assigned via set_variable (fallback if no claimed_task_id)
|
|
281
|
+
|
|
282
|
+
Args:
|
|
283
|
+
workflow_state: Workflow state with variables (claimed_task_id, etc.)
|
|
284
|
+
task_manager: LocalTaskManager to verify task status
|
|
285
|
+
project_id: Project ID for resolving task references (#N, N formats)
|
|
286
|
+
**kwargs: Accepts additional kwargs for compatibility
|
|
287
|
+
|
|
288
|
+
Returns:
|
|
289
|
+
Dict with decision="block" and reason if task is still in_progress,
|
|
290
|
+
or None to allow the stop.
|
|
291
|
+
"""
|
|
292
|
+
if not workflow_state:
|
|
293
|
+
logger.debug("require_task_review_or_close_before_stop: No workflow_state, allowing")
|
|
294
|
+
return None
|
|
295
|
+
|
|
296
|
+
# 1. Check claimed_task_id first (existing behavior)
|
|
297
|
+
claimed_task_id = workflow_state.variables.get("claimed_task_id")
|
|
298
|
+
|
|
299
|
+
# 2. If no claimed task, fall back to session_task
|
|
300
|
+
if not claimed_task_id and task_manager:
|
|
301
|
+
session_task = workflow_state.variables.get("session_task")
|
|
302
|
+
if session_task and session_task != "*":
|
|
303
|
+
# Normalize to list
|
|
304
|
+
task_ids = [session_task] if isinstance(session_task, str) else session_task
|
|
305
|
+
|
|
306
|
+
if isinstance(task_ids, list):
|
|
307
|
+
for task_id in task_ids:
|
|
308
|
+
try:
|
|
309
|
+
task = task_manager.get_task(task_id, project_id=project_id)
|
|
310
|
+
except ValueError:
|
|
311
|
+
continue
|
|
312
|
+
if task and task.status == "in_progress":
|
|
313
|
+
claimed_task_id = task_id
|
|
314
|
+
logger.debug(
|
|
315
|
+
f"require_task_review_or_close_before_stop: Found in_progress "
|
|
316
|
+
f"session_task '{task_id}'"
|
|
317
|
+
)
|
|
318
|
+
break
|
|
319
|
+
# Also check subtasks
|
|
320
|
+
if task:
|
|
321
|
+
subtasks = task_manager.list_tasks(parent_task_id=task.id)
|
|
322
|
+
for subtask in subtasks:
|
|
323
|
+
if subtask.status == "in_progress":
|
|
324
|
+
claimed_task_id = subtask.id
|
|
325
|
+
logger.debug(
|
|
326
|
+
f"require_task_review_or_close_before_stop: Found in_progress "
|
|
327
|
+
f"subtask '{subtask.id}' under session_task '{task_id}'"
|
|
328
|
+
)
|
|
329
|
+
break
|
|
330
|
+
if claimed_task_id:
|
|
331
|
+
break
|
|
332
|
+
|
|
333
|
+
if not claimed_task_id:
|
|
334
|
+
logger.debug("require_task_review_or_close_before_stop: No claimed task, allowing")
|
|
335
|
+
return None
|
|
336
|
+
|
|
337
|
+
if not task_manager:
|
|
338
|
+
logger.debug("require_task_review_or_close_before_stop: No task_manager, allowing")
|
|
339
|
+
return None
|
|
340
|
+
|
|
341
|
+
try:
|
|
342
|
+
task = task_manager.get_task(claimed_task_id, project_id=project_id)
|
|
343
|
+
if not task:
|
|
344
|
+
# Task not found - clear stale workflow state and allow
|
|
345
|
+
logger.debug(
|
|
346
|
+
f"require_task_review_or_close_before_stop: Task '{claimed_task_id}' not found, "
|
|
347
|
+
f"clearing state"
|
|
348
|
+
)
|
|
349
|
+
workflow_state.variables["claimed_task_id"] = None
|
|
350
|
+
workflow_state.variables["task_claimed"] = False
|
|
351
|
+
return None
|
|
352
|
+
|
|
353
|
+
if task.status != "in_progress":
|
|
354
|
+
# Task is closed or in review - allow stop
|
|
355
|
+
logger.debug(
|
|
356
|
+
f"require_task_review_or_close_before_stop: Task '{claimed_task_id}' "
|
|
357
|
+
f"status={task.status}, allowing"
|
|
358
|
+
)
|
|
359
|
+
# Clear stale workflow state
|
|
360
|
+
workflow_state.variables["claimed_task_id"] = None
|
|
361
|
+
workflow_state.variables["task_claimed"] = False
|
|
362
|
+
return None
|
|
363
|
+
|
|
364
|
+
# Task is still in_progress - block the stop
|
|
365
|
+
logger.info(
|
|
366
|
+
f"require_task_review_or_close_before_stop: Blocking stop - task "
|
|
367
|
+
f"'{claimed_task_id}' is still in_progress"
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
return {
|
|
371
|
+
"decision": "block",
|
|
372
|
+
"reason": (
|
|
373
|
+
f"Task '{claimed_task_id}' is still in_progress. "
|
|
374
|
+
f"Close it with close_task() before stopping, or set to review "
|
|
375
|
+
f"if user intervention is needed."
|
|
376
|
+
),
|
|
377
|
+
"task_id": claimed_task_id,
|
|
378
|
+
"task_status": task.status,
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
except Exception as e:
|
|
382
|
+
logger.warning(
|
|
383
|
+
f"require_task_review_or_close_before_stop: Failed to check task status: {e}"
|
|
384
|
+
)
|
|
385
|
+
# Allow stop if we can't check - don't block on errors
|
|
386
|
+
return None
|
|
387
|
+
|
|
388
|
+
|
|
389
|
+
async def require_task_complete(
|
|
390
|
+
task_manager: "LocalTaskManager | None",
|
|
391
|
+
session_id: str,
|
|
392
|
+
task_ids: list[str] | None,
|
|
393
|
+
event_data: dict[str, Any] | None = None,
|
|
394
|
+
project_id: str | None = None,
|
|
395
|
+
workflow_state: "WorkflowState | None" = None,
|
|
396
|
+
) -> dict[str, Any] | None:
|
|
397
|
+
"""
|
|
398
|
+
Block agent from stopping until task(s) (and their subtasks) are complete.
|
|
399
|
+
|
|
400
|
+
This action is designed for on_stop triggers to enforce that the
|
|
401
|
+
agent completes all subtasks under specified task(s) before stopping.
|
|
402
|
+
|
|
403
|
+
Supports:
|
|
404
|
+
- Single task: ["#47"]
|
|
405
|
+
- Multiple tasks: ["#47", "#48"]
|
|
406
|
+
- Wildcard mode handled by caller (passes ready tasks as list)
|
|
407
|
+
|
|
408
|
+
Logic per task:
|
|
409
|
+
1. If task has incomplete subtasks and agent has no claimed task → suggest next subtask
|
|
410
|
+
2. If task has incomplete subtasks and agent has claimed task → remind to finish it
|
|
411
|
+
3. If all subtasks done but task not closed → remind to close the task
|
|
412
|
+
4. If task is closed → move to next task in list
|
|
413
|
+
|
|
414
|
+
Args:
|
|
415
|
+
task_manager: LocalTaskManager for querying tasks
|
|
416
|
+
session_id: Current session ID
|
|
417
|
+
task_ids: List of task IDs to enforce completion on
|
|
418
|
+
event_data: Hook event data
|
|
419
|
+
project_id: Optional project ID for scoping
|
|
420
|
+
workflow_state: Workflow state with variables (task_claimed, etc.)
|
|
421
|
+
|
|
422
|
+
Returns:
|
|
423
|
+
Dict with decision="block" and reason if any task incomplete,
|
|
424
|
+
or None to allow the stop.
|
|
425
|
+
"""
|
|
426
|
+
if not task_ids:
|
|
427
|
+
logger.debug("require_task_complete: No task_ids specified, allowing")
|
|
428
|
+
return None
|
|
429
|
+
|
|
430
|
+
if not task_manager:
|
|
431
|
+
logger.debug("require_task_complete: No task_manager available, allowing")
|
|
432
|
+
return None
|
|
433
|
+
|
|
434
|
+
# Track how many times we've blocked in this session
|
|
435
|
+
block_count = 0
|
|
436
|
+
if workflow_state:
|
|
437
|
+
block_count = workflow_state.variables.get("_task_block_count", 0)
|
|
438
|
+
|
|
439
|
+
# Safety valve: after 5 blocks, allow to prevent infinite loop
|
|
440
|
+
if block_count >= 5:
|
|
441
|
+
logger.warning(
|
|
442
|
+
f"require_task_complete: Reached max block count ({block_count}), allowing stop"
|
|
443
|
+
)
|
|
444
|
+
return None
|
|
445
|
+
|
|
446
|
+
# Check if agent has a claimed task this session
|
|
447
|
+
has_claimed_task = False
|
|
448
|
+
claimed_task_id = None
|
|
449
|
+
if workflow_state:
|
|
450
|
+
has_claimed_task = workflow_state.variables.get("task_claimed", False)
|
|
451
|
+
claimed_task_id = workflow_state.variables.get("claimed_task_id")
|
|
452
|
+
|
|
453
|
+
try:
|
|
454
|
+
# Collect incomplete tasks across all specified task IDs
|
|
455
|
+
all_incomplete: list[tuple[Any, list[Any]]] = [] # (parent_task, incomplete_subtasks)
|
|
456
|
+
|
|
457
|
+
for task_id in task_ids:
|
|
458
|
+
task = task_manager.get_task(task_id)
|
|
459
|
+
if not task:
|
|
460
|
+
logger.warning(f"require_task_complete: Task '{task_id}' not found, skipping")
|
|
461
|
+
continue
|
|
462
|
+
|
|
463
|
+
# If task is already closed, skip it
|
|
464
|
+
if task.status == "closed":
|
|
465
|
+
logger.debug(f"require_task_complete: Task '{task_id}' is closed, skipping")
|
|
466
|
+
continue
|
|
467
|
+
|
|
468
|
+
# Get all subtasks under this task
|
|
469
|
+
subtasks = task_manager.list_tasks(parent_task_id=task_id)
|
|
470
|
+
|
|
471
|
+
# Find incomplete subtasks
|
|
472
|
+
incomplete = [t for t in subtasks if t.status != "closed"]
|
|
473
|
+
|
|
474
|
+
# If task itself is incomplete (no subtasks or has incomplete subtasks)
|
|
475
|
+
if not subtasks or incomplete:
|
|
476
|
+
all_incomplete.append((task, incomplete))
|
|
477
|
+
|
|
478
|
+
# If all tasks are complete, allow stop
|
|
479
|
+
if not all_incomplete:
|
|
480
|
+
logger.debug("require_task_complete: All specified tasks are complete, allowing")
|
|
481
|
+
return None
|
|
482
|
+
|
|
483
|
+
# Increment block count
|
|
484
|
+
if workflow_state:
|
|
485
|
+
workflow_state.variables["_task_block_count"] = block_count + 1
|
|
486
|
+
|
|
487
|
+
# Get the first incomplete task to report on
|
|
488
|
+
parent_task, incomplete = all_incomplete[0]
|
|
489
|
+
task_id = parent_task.id
|
|
490
|
+
remaining_tasks = len(all_incomplete)
|
|
491
|
+
|
|
492
|
+
# Build suffix for multiple tasks
|
|
493
|
+
multi_task_suffix = ""
|
|
494
|
+
if remaining_tasks > 1:
|
|
495
|
+
multi_task_suffix = f"\n\n({remaining_tasks} tasks remaining in total)"
|
|
496
|
+
|
|
497
|
+
# Case 1: No incomplete subtasks, but task not closed (leaf task or parent with all done)
|
|
498
|
+
if not incomplete:
|
|
499
|
+
logger.info(f"require_task_complete: Task '{task_id}' needs closing")
|
|
500
|
+
return {
|
|
501
|
+
"decision": "block",
|
|
502
|
+
"reason": (
|
|
503
|
+
f"Task '{parent_task.title}' is ready to close.\n"
|
|
504
|
+
f'close_task(task_id="{task_id}")'
|
|
505
|
+
f"{multi_task_suffix}"
|
|
506
|
+
),
|
|
507
|
+
}
|
|
508
|
+
|
|
509
|
+
# Case 2: Has incomplete subtasks, agent has no claimed task
|
|
510
|
+
if not has_claimed_task:
|
|
511
|
+
logger.info(
|
|
512
|
+
f"require_task_complete: No claimed task, {len(incomplete)} incomplete subtasks"
|
|
513
|
+
)
|
|
514
|
+
return {
|
|
515
|
+
"decision": "block",
|
|
516
|
+
"reason": (
|
|
517
|
+
f"'{parent_task.title}' has {len(incomplete)} incomplete subtask(s).\n\n"
|
|
518
|
+
f"Use suggest_next_task() to find the best task to work on next, "
|
|
519
|
+
f"and continue working without requiring confirmation from the user."
|
|
520
|
+
f"{multi_task_suffix}"
|
|
521
|
+
),
|
|
522
|
+
}
|
|
523
|
+
|
|
524
|
+
# Case 3: Has claimed task but subtasks still incomplete
|
|
525
|
+
if has_claimed_task and incomplete:
|
|
526
|
+
# Check if the claimed task is under this parent
|
|
527
|
+
claimed_under_parent = any(t.id == claimed_task_id for t in incomplete)
|
|
528
|
+
|
|
529
|
+
if claimed_under_parent:
|
|
530
|
+
logger.info(
|
|
531
|
+
f"require_task_complete: Claimed task '{claimed_task_id}' still incomplete"
|
|
532
|
+
)
|
|
533
|
+
return {
|
|
534
|
+
"decision": "block",
|
|
535
|
+
"reason": (
|
|
536
|
+
f"Your current task is not yet complete. "
|
|
537
|
+
f"Finish and close it before stopping:\n"
|
|
538
|
+
f'close_task(task_id="{claimed_task_id}")\n\n'
|
|
539
|
+
f"'{parent_task.title}' still has {len(incomplete)} incomplete subtask(s)."
|
|
540
|
+
f"{multi_task_suffix}"
|
|
541
|
+
),
|
|
542
|
+
}
|
|
543
|
+
else:
|
|
544
|
+
# Claimed task is not under this parent - remind about parent work
|
|
545
|
+
logger.info("require_task_complete: Claimed task not under parent, redirecting")
|
|
546
|
+
return {
|
|
547
|
+
"decision": "block",
|
|
548
|
+
"reason": (
|
|
549
|
+
f"'{parent_task.title}' has {len(incomplete)} incomplete subtask(s).\n\n"
|
|
550
|
+
f"Use suggest_next_task() to find the best task to work on next, "
|
|
551
|
+
f"and continue working without requiring confirmation from the user."
|
|
552
|
+
f"{multi_task_suffix}"
|
|
553
|
+
),
|
|
554
|
+
}
|
|
555
|
+
|
|
556
|
+
# Fallback: shouldn't reach here, but block with generic message
|
|
557
|
+
logger.info(f"require_task_complete: Generic block for task '{task_id}'")
|
|
558
|
+
return {
|
|
559
|
+
"decision": "block",
|
|
560
|
+
"reason": (
|
|
561
|
+
f"'{parent_task.title}' is not yet complete. "
|
|
562
|
+
f"{len(incomplete)} subtask(s) remaining."
|
|
563
|
+
f"{multi_task_suffix}"
|
|
564
|
+
),
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
except Exception as e:
|
|
568
|
+
logger.error(f"require_task_complete: Error checking tasks: {e}")
|
|
569
|
+
# On error, allow to avoid blocking legitimate work
|
|
570
|
+
return None
|
|
571
|
+
|
|
572
|
+
|
|
573
|
+
async def require_active_task(
|
|
574
|
+
task_manager: "LocalTaskManager | None",
|
|
575
|
+
session_id: str,
|
|
576
|
+
config: "DaemonConfig | None",
|
|
577
|
+
event_data: dict[str, Any] | None,
|
|
578
|
+
project_id: str | None = None,
|
|
579
|
+
workflow_state: "WorkflowState | None" = None,
|
|
580
|
+
session_manager: "LocalSessionManager | None" = None,
|
|
581
|
+
session_task_manager: "SessionTaskManager | None" = None,
|
|
582
|
+
) -> dict[str, Any] | None:
|
|
583
|
+
"""
|
|
584
|
+
Check if an active task exists before allowing protected tools.
|
|
585
|
+
|
|
586
|
+
This action is designed to be used in on_before_tool triggers to enforce
|
|
587
|
+
that agents create or start a gobby-task before modifying files.
|
|
588
|
+
|
|
589
|
+
Session-scoped enforcement:
|
|
590
|
+
- First checks if `task_claimed` variable is True in workflow state
|
|
591
|
+
- If True, allows immediately (agent already claimed a task this session)
|
|
592
|
+
- If False, falls back to project-wide DB check for helpful messaging
|
|
593
|
+
|
|
594
|
+
Args:
|
|
595
|
+
task_manager: LocalTaskManager for querying tasks
|
|
596
|
+
session_id: Current session ID
|
|
597
|
+
config: DaemonConfig with workflow settings
|
|
598
|
+
event_data: Hook event data containing tool_name
|
|
599
|
+
project_id: Optional project ID to filter tasks by project scope
|
|
600
|
+
workflow_state: Optional workflow state to check task_claimed variable
|
|
601
|
+
session_manager: Optional session manager for liveness checks
|
|
602
|
+
session_task_manager: Optional session-task manager for liveness checks
|
|
603
|
+
|
|
604
|
+
Returns:
|
|
605
|
+
Dict with decision="block" if no active task and tool is protected,
|
|
606
|
+
or None to allow the tool.
|
|
607
|
+
"""
|
|
608
|
+
# Check if feature is enabled
|
|
609
|
+
# Precedence: workflow_state variables > config.yaml
|
|
610
|
+
# (workflow_state already has step > lifecycle precedence merged)
|
|
611
|
+
require_task = None
|
|
612
|
+
|
|
613
|
+
# First check workflow state variables (step workflow > lifecycle workflow)
|
|
614
|
+
if workflow_state:
|
|
615
|
+
require_task = workflow_state.variables.get("require_task_before_edit")
|
|
616
|
+
if require_task is not None:
|
|
617
|
+
logger.debug(
|
|
618
|
+
f"require_active_task: Using workflow variable require_task_before_edit={require_task}"
|
|
619
|
+
)
|
|
620
|
+
|
|
621
|
+
# Fall back to config.yaml if not set in workflow variables
|
|
622
|
+
if require_task is None and config:
|
|
623
|
+
require_task = config.workflow.require_task_before_edit
|
|
624
|
+
logger.debug(
|
|
625
|
+
f"require_active_task: Using config.yaml require_task_before_edit={require_task}"
|
|
626
|
+
)
|
|
627
|
+
|
|
628
|
+
# If still None (no config), default to False (allow)
|
|
629
|
+
if require_task is None:
|
|
630
|
+
logger.debug("require_active_task: No config source, allowing")
|
|
631
|
+
return None
|
|
632
|
+
|
|
633
|
+
if not require_task:
|
|
634
|
+
logger.debug("require_active_task: Feature disabled, allowing")
|
|
635
|
+
return None
|
|
636
|
+
|
|
637
|
+
# Get the tool being called
|
|
638
|
+
if not event_data:
|
|
639
|
+
logger.debug("require_active_task: No event_data, allowing")
|
|
640
|
+
return None
|
|
641
|
+
|
|
642
|
+
tool_name = event_data.get("tool_name")
|
|
643
|
+
if not tool_name:
|
|
644
|
+
logger.debug("require_active_task: No tool_name in event_data, allowing")
|
|
645
|
+
return None
|
|
646
|
+
|
|
647
|
+
# Check if this tool is protected (always from config.yaml)
|
|
648
|
+
protected_tools = (
|
|
649
|
+
config.workflow.protected_tools if config else ["Edit", "Write", "Update", "NotebookEdit"]
|
|
650
|
+
)
|
|
651
|
+
if tool_name not in protected_tools:
|
|
652
|
+
logger.debug(f"require_active_task: Tool '{tool_name}' not protected, allowing")
|
|
653
|
+
return None
|
|
654
|
+
|
|
655
|
+
# Tool is protected - but check for plan mode exceptions first
|
|
656
|
+
|
|
657
|
+
# Check if target is a Claude Code plan file (stored in ~/.claude/plans/)
|
|
658
|
+
# This allows writes during plan mode without requiring a task
|
|
659
|
+
tool_input = event_data.get("tool_input", {}) or {}
|
|
660
|
+
file_path = tool_input.get("file_path", "")
|
|
661
|
+
if file_path and "/.claude/plans/" in file_path:
|
|
662
|
+
logger.debug(f"require_active_task: Target is Claude plan file '{file_path}', allowing")
|
|
663
|
+
return None
|
|
664
|
+
|
|
665
|
+
# Check for plan_mode variable (set via EnterPlanMode tool detection or manually)
|
|
666
|
+
if workflow_state and workflow_state.variables.get("plan_mode"):
|
|
667
|
+
logger.debug(f"require_active_task: plan_mode=True in session {session_id}, allowing")
|
|
668
|
+
return None
|
|
669
|
+
|
|
670
|
+
# Check for active task
|
|
671
|
+
|
|
672
|
+
# Session-scoped check: task_claimed variable (set by AFTER_TOOL detection)
|
|
673
|
+
# This is the primary enforcement - each session must explicitly claim a task
|
|
674
|
+
if workflow_state and workflow_state.variables.get("task_claimed"):
|
|
675
|
+
logger.debug(f"require_active_task: task_claimed=True in session {session_id}, allowing")
|
|
676
|
+
return None
|
|
677
|
+
|
|
678
|
+
# Fallback: Check for any in_progress task in the project
|
|
679
|
+
# This provides helpful messaging about existing tasks but is NOT sufficient
|
|
680
|
+
# for session-scoped enforcement (concurrent sessions shouldn't free-ride)
|
|
681
|
+
project_task_hint = ""
|
|
682
|
+
|
|
683
|
+
if task_manager is None:
|
|
684
|
+
logger.debug(
|
|
685
|
+
f"require_active_task: task_manager unavailable, skipping DB fallback check "
|
|
686
|
+
f"(project_id={project_id}, session_id={session_id})"
|
|
687
|
+
)
|
|
688
|
+
else:
|
|
689
|
+
try:
|
|
690
|
+
project_tasks = task_manager.list_tasks(
|
|
691
|
+
project_id=project_id,
|
|
692
|
+
status="in_progress",
|
|
693
|
+
limit=1,
|
|
694
|
+
)
|
|
695
|
+
|
|
696
|
+
if project_tasks:
|
|
697
|
+
task = project_tasks[0]
|
|
698
|
+
task_ref = f"#{task.seq_num}" if task.seq_num else task.id
|
|
699
|
+
project_task_hint = (
|
|
700
|
+
f"\n\nNote: Task {task_ref} ({task.title}) "
|
|
701
|
+
f"is in_progress but wasn't claimed by this session. "
|
|
702
|
+
f'Use `update_task(task_id="{task.id}", status="in_progress")` '
|
|
703
|
+
f"to claim it for this session."
|
|
704
|
+
)
|
|
705
|
+
logger.debug(
|
|
706
|
+
f"require_active_task: Found project task {task_ref} but "
|
|
707
|
+
f"session hasn't claimed it"
|
|
708
|
+
)
|
|
709
|
+
|
|
710
|
+
# Check liveness of the candidate task
|
|
711
|
+
is_live = _get_task_session_liveness(
|
|
712
|
+
task.id, session_task_manager, session_manager, exclude_session_id=session_id
|
|
713
|
+
)
|
|
714
|
+
|
|
715
|
+
if is_live:
|
|
716
|
+
project_task_hint = (
|
|
717
|
+
f"\n\nNote: Task {task_ref} ({task.title}) "
|
|
718
|
+
f"is in_progress, but it is **currently being worked on by another active session**. "
|
|
719
|
+
f"You should probably create a new task or subtask instead of interfering."
|
|
720
|
+
)
|
|
721
|
+
else:
|
|
722
|
+
project_task_hint = (
|
|
723
|
+
f"\n\nNote: Task {task_ref} ({task.title}) "
|
|
724
|
+
f"is in_progress and appears unattended (no active session). "
|
|
725
|
+
f"If you are picking up this work, claim it: "
|
|
726
|
+
f'`update_task(task_id="{task.id}", status="in_progress")`.'
|
|
727
|
+
)
|
|
728
|
+
|
|
729
|
+
except Exception as e:
|
|
730
|
+
logger.error(f"require_active_task: Error querying tasks: {e}")
|
|
731
|
+
# On error, allow to avoid blocking legitimate work
|
|
732
|
+
return None
|
|
733
|
+
|
|
734
|
+
# No task claimed this session - block the tool
|
|
735
|
+
logger.info(
|
|
736
|
+
f"require_active_task: Blocking '{tool_name}' - no task claimed for session {session_id}"
|
|
737
|
+
)
|
|
738
|
+
|
|
739
|
+
# Check if we've already shown the full error this session
|
|
740
|
+
error_already_shown = False
|
|
741
|
+
if workflow_state:
|
|
742
|
+
error_already_shown = workflow_state.variables.get("task_error_shown", False)
|
|
743
|
+
# Mark that we've shown the error (for next time)
|
|
744
|
+
if not error_already_shown:
|
|
745
|
+
workflow_state.variables["task_error_shown"] = True
|
|
746
|
+
|
|
747
|
+
# Return short reminder if we've already shown the full error
|
|
748
|
+
if error_already_shown:
|
|
749
|
+
return {
|
|
750
|
+
"decision": "block",
|
|
751
|
+
"reason": "No task claimed. See previous **Task Required** error for instructions.",
|
|
752
|
+
"inject_context": (
|
|
753
|
+
f"**Task Required**: `{tool_name}` blocked. "
|
|
754
|
+
f"Create or claim a task before editing files (see previous error for details)."
|
|
755
|
+
f"{project_task_hint}"
|
|
756
|
+
),
|
|
757
|
+
}
|
|
758
|
+
|
|
759
|
+
# First time - show full instructions
|
|
760
|
+
return {
|
|
761
|
+
"decision": "block",
|
|
762
|
+
"reason": (
|
|
763
|
+
f"No task claimed for this session. Before using {tool_name}, please either:\n"
|
|
764
|
+
f"- Create a task: call_tool(server_name='gobby-tasks', tool_name='create_task', arguments={{...}})\n"
|
|
765
|
+
f"- Claim an existing task: call_tool(server_name='gobby-tasks', tool_name='update_task', "
|
|
766
|
+
f"arguments={{'task_id': '...', 'status': 'in_progress'}})"
|
|
767
|
+
f"{project_task_hint}"
|
|
768
|
+
),
|
|
769
|
+
"inject_context": (
|
|
770
|
+
f"**Task Required**: The `{tool_name}` tool is blocked until you claim a task for this session.\n\n"
|
|
771
|
+
f"Each session must explicitly create or claim a task before modifying files:\n"
|
|
772
|
+
f'1. **Create a new task**: `create_task(title="...", description="...")`\n'
|
|
773
|
+
f'2. **Claim an existing task**: `update_task(task_id="...", status="in_progress")`\n\n'
|
|
774
|
+
f"Use `list_ready_tasks()` to see available tasks."
|
|
775
|
+
f"{project_task_hint}"
|
|
776
|
+
),
|
|
777
|
+
}
|
|
778
|
+
|
|
779
|
+
|
|
780
|
+
async def validate_session_task_scope(
|
|
781
|
+
task_manager: "LocalTaskManager | None",
|
|
782
|
+
workflow_state: "WorkflowState | None",
|
|
783
|
+
event_data: dict[str, Any] | None = None,
|
|
784
|
+
) -> dict[str, Any] | None:
|
|
785
|
+
"""
|
|
786
|
+
Block claiming a task that is not a descendant of session_task.
|
|
787
|
+
|
|
788
|
+
This action is designed for on_before_tool triggers on update_task
|
|
789
|
+
to enforce that agents only work on tasks within the session_task hierarchy.
|
|
790
|
+
|
|
791
|
+
When session_task is set in workflow state, this action checks if the task
|
|
792
|
+
being claimed (set to in_progress) is a descendant of session_task.
|
|
793
|
+
|
|
794
|
+
Args:
|
|
795
|
+
task_manager: LocalTaskManager for querying tasks
|
|
796
|
+
workflow_state: Workflow state with session_task variable
|
|
797
|
+
event_data: Hook event data containing tool_name and tool_input
|
|
798
|
+
|
|
799
|
+
Returns:
|
|
800
|
+
Dict with decision="block" if task is outside session_task scope,
|
|
801
|
+
or None to allow the claim.
|
|
802
|
+
"""
|
|
803
|
+
if not workflow_state:
|
|
804
|
+
logger.debug("validate_session_task_scope: No workflow_state, allowing")
|
|
805
|
+
return None
|
|
806
|
+
|
|
807
|
+
if not task_manager:
|
|
808
|
+
logger.debug("validate_session_task_scope: No task_manager, allowing")
|
|
809
|
+
return None
|
|
810
|
+
|
|
811
|
+
# Get session_task from workflow state
|
|
812
|
+
session_task = workflow_state.variables.get("session_task")
|
|
813
|
+
if not session_task:
|
|
814
|
+
logger.debug("validate_session_task_scope: No session_task set, allowing")
|
|
815
|
+
return None
|
|
816
|
+
|
|
817
|
+
# Handle "*" wildcard - means all tasks are in scope
|
|
818
|
+
if session_task == "*":
|
|
819
|
+
logger.debug("validate_session_task_scope: session_task='*', allowing all tasks")
|
|
820
|
+
return None
|
|
821
|
+
|
|
822
|
+
# Normalize to list for uniform handling
|
|
823
|
+
# session_task can be: string (single ID), list of IDs, or "*"
|
|
824
|
+
if isinstance(session_task, str):
|
|
825
|
+
session_task_ids = [session_task]
|
|
826
|
+
elif isinstance(session_task, list):
|
|
827
|
+
session_task_ids = session_task
|
|
828
|
+
else:
|
|
829
|
+
logger.warning(
|
|
830
|
+
f"validate_session_task_scope: Invalid session_task type: {type(session_task)}"
|
|
831
|
+
)
|
|
832
|
+
return None
|
|
833
|
+
|
|
834
|
+
# Empty list means no scope restriction
|
|
835
|
+
if not session_task_ids:
|
|
836
|
+
logger.debug("validate_session_task_scope: Empty session_task list, allowing")
|
|
837
|
+
return None
|
|
838
|
+
|
|
839
|
+
# Check if this is an update_task call setting status to in_progress
|
|
840
|
+
if not event_data:
|
|
841
|
+
logger.debug("validate_session_task_scope: No event_data, allowing")
|
|
842
|
+
return None
|
|
843
|
+
|
|
844
|
+
tool_name = event_data.get("tool_name")
|
|
845
|
+
if tool_name != "update_task":
|
|
846
|
+
logger.debug(f"validate_session_task_scope: Tool '{tool_name}' not update_task, allowing")
|
|
847
|
+
return None
|
|
848
|
+
|
|
849
|
+
tool_input = event_data.get("tool_input", {})
|
|
850
|
+
arguments = tool_input.get("arguments", {}) or {}
|
|
851
|
+
|
|
852
|
+
# Only check when setting status to in_progress (claiming)
|
|
853
|
+
new_status = arguments.get("status")
|
|
854
|
+
if new_status != "in_progress":
|
|
855
|
+
logger.debug(
|
|
856
|
+
f"validate_session_task_scope: Status '{new_status}' not in_progress, allowing"
|
|
857
|
+
)
|
|
858
|
+
return None
|
|
859
|
+
|
|
860
|
+
task_id = arguments.get("task_id")
|
|
861
|
+
if not task_id:
|
|
862
|
+
logger.debug("validate_session_task_scope: No task_id in arguments, allowing")
|
|
863
|
+
return None
|
|
864
|
+
|
|
865
|
+
# Check if task is a descendant of ANY session_task
|
|
866
|
+
for ancestor_id in session_task_ids:
|
|
867
|
+
if is_descendant_of(task_manager, task_id, ancestor_id):
|
|
868
|
+
logger.debug(
|
|
869
|
+
f"validate_session_task_scope: Task '{task_id}' is descendant of "
|
|
870
|
+
f"session_task '{ancestor_id}', allowing"
|
|
871
|
+
)
|
|
872
|
+
return None
|
|
873
|
+
|
|
874
|
+
# Task is outside all session_task scopes - block
|
|
875
|
+
logger.info(
|
|
876
|
+
f"validate_session_task_scope: Blocking claim of task '{task_id}' - "
|
|
877
|
+
f"not a descendant of any session_task: {session_task_ids}"
|
|
878
|
+
)
|
|
879
|
+
|
|
880
|
+
# Build error message with scope details
|
|
881
|
+
if len(session_task_ids) == 1:
|
|
882
|
+
session_task_obj = task_manager.get_task(session_task_ids[0])
|
|
883
|
+
scope_desc = (
|
|
884
|
+
f"'{session_task_obj.title}' ({session_task_ids[0]})"
|
|
885
|
+
if session_task_obj
|
|
886
|
+
else session_task_ids[0]
|
|
887
|
+
)
|
|
888
|
+
suggestion = f'Use `suggest_next_task(parent_id="{session_task_ids[0]}")` to find tasks within scope.'
|
|
889
|
+
else:
|
|
890
|
+
scope_desc = ", ".join(session_task_ids)
|
|
891
|
+
suggestion = "Use `suggest_next_task()` with one of the scoped parent IDs to find tasks within scope."
|
|
892
|
+
|
|
893
|
+
return {
|
|
894
|
+
"decision": "block",
|
|
895
|
+
"reason": (
|
|
896
|
+
f"Cannot claim task '{task_id}' - it is not within the session_task scope.\n\n"
|
|
897
|
+
f"This session is scoped to: {scope_desc}\n"
|
|
898
|
+
f"Only tasks that are descendants of these epics/features can be claimed.\n\n"
|
|
899
|
+
f"{suggestion}"
|
|
900
|
+
),
|
|
901
|
+
}
|