gobby 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +3 -0
- gobby/adapters/__init__.py +30 -0
- gobby/adapters/base.py +93 -0
- gobby/adapters/claude_code.py +276 -0
- gobby/adapters/codex.py +1292 -0
- gobby/adapters/gemini.py +343 -0
- gobby/agents/__init__.py +37 -0
- gobby/agents/codex_session.py +120 -0
- gobby/agents/constants.py +112 -0
- gobby/agents/context.py +362 -0
- gobby/agents/definitions.py +133 -0
- gobby/agents/gemini_session.py +111 -0
- gobby/agents/registry.py +618 -0
- gobby/agents/runner.py +968 -0
- gobby/agents/session.py +259 -0
- gobby/agents/spawn.py +916 -0
- gobby/agents/spawners/__init__.py +77 -0
- gobby/agents/spawners/base.py +142 -0
- gobby/agents/spawners/cross_platform.py +266 -0
- gobby/agents/spawners/embedded.py +225 -0
- gobby/agents/spawners/headless.py +226 -0
- gobby/agents/spawners/linux.py +125 -0
- gobby/agents/spawners/macos.py +277 -0
- gobby/agents/spawners/windows.py +308 -0
- gobby/agents/tty_config.py +319 -0
- gobby/autonomous/__init__.py +32 -0
- gobby/autonomous/progress_tracker.py +447 -0
- gobby/autonomous/stop_registry.py +269 -0
- gobby/autonomous/stuck_detector.py +383 -0
- gobby/cli/__init__.py +67 -0
- gobby/cli/__main__.py +8 -0
- gobby/cli/agents.py +529 -0
- gobby/cli/artifacts.py +266 -0
- gobby/cli/daemon.py +329 -0
- gobby/cli/extensions.py +526 -0
- gobby/cli/github.py +263 -0
- gobby/cli/init.py +53 -0
- gobby/cli/install.py +614 -0
- gobby/cli/installers/__init__.py +37 -0
- gobby/cli/installers/antigravity.py +65 -0
- gobby/cli/installers/claude.py +363 -0
- gobby/cli/installers/codex.py +192 -0
- gobby/cli/installers/gemini.py +294 -0
- gobby/cli/installers/git_hooks.py +377 -0
- gobby/cli/installers/shared.py +737 -0
- gobby/cli/linear.py +250 -0
- gobby/cli/mcp.py +30 -0
- gobby/cli/mcp_proxy.py +698 -0
- gobby/cli/memory.py +304 -0
- gobby/cli/merge.py +384 -0
- gobby/cli/projects.py +79 -0
- gobby/cli/sessions.py +622 -0
- gobby/cli/tasks/__init__.py +30 -0
- gobby/cli/tasks/_utils.py +658 -0
- gobby/cli/tasks/ai.py +1025 -0
- gobby/cli/tasks/commits.py +169 -0
- gobby/cli/tasks/crud.py +685 -0
- gobby/cli/tasks/deps.py +135 -0
- gobby/cli/tasks/labels.py +63 -0
- gobby/cli/tasks/main.py +273 -0
- gobby/cli/tasks/search.py +178 -0
- gobby/cli/tui.py +34 -0
- gobby/cli/utils.py +513 -0
- gobby/cli/workflows.py +927 -0
- gobby/cli/worktrees.py +481 -0
- gobby/config/__init__.py +129 -0
- gobby/config/app.py +551 -0
- gobby/config/extensions.py +167 -0
- gobby/config/features.py +472 -0
- gobby/config/llm_providers.py +98 -0
- gobby/config/logging.py +66 -0
- gobby/config/mcp.py +346 -0
- gobby/config/persistence.py +247 -0
- gobby/config/servers.py +141 -0
- gobby/config/sessions.py +250 -0
- gobby/config/tasks.py +784 -0
- gobby/hooks/__init__.py +104 -0
- gobby/hooks/artifact_capture.py +213 -0
- gobby/hooks/broadcaster.py +243 -0
- gobby/hooks/event_handlers.py +723 -0
- gobby/hooks/events.py +218 -0
- gobby/hooks/git.py +169 -0
- gobby/hooks/health_monitor.py +171 -0
- gobby/hooks/hook_manager.py +856 -0
- gobby/hooks/hook_types.py +575 -0
- gobby/hooks/plugins.py +813 -0
- gobby/hooks/session_coordinator.py +396 -0
- gobby/hooks/verification_runner.py +268 -0
- gobby/hooks/webhooks.py +339 -0
- gobby/install/claude/commands/gobby/bug.md +51 -0
- gobby/install/claude/commands/gobby/chore.md +51 -0
- gobby/install/claude/commands/gobby/epic.md +52 -0
- gobby/install/claude/commands/gobby/eval.md +235 -0
- gobby/install/claude/commands/gobby/feat.md +49 -0
- gobby/install/claude/commands/gobby/nit.md +52 -0
- gobby/install/claude/commands/gobby/ref.md +52 -0
- gobby/install/claude/hooks/HOOK_SCHEMAS.md +632 -0
- gobby/install/claude/hooks/hook_dispatcher.py +364 -0
- gobby/install/claude/hooks/validate_settings.py +102 -0
- gobby/install/claude/hooks-template.json +118 -0
- gobby/install/codex/hooks/hook_dispatcher.py +153 -0
- gobby/install/codex/prompts/forget.md +7 -0
- gobby/install/codex/prompts/memories.md +7 -0
- gobby/install/codex/prompts/recall.md +7 -0
- gobby/install/codex/prompts/remember.md +13 -0
- gobby/install/gemini/hooks/hook_dispatcher.py +268 -0
- gobby/install/gemini/hooks-template.json +138 -0
- gobby/install/shared/plugins/code_guardian.py +456 -0
- gobby/install/shared/plugins/example_notify.py +331 -0
- gobby/integrations/__init__.py +10 -0
- gobby/integrations/github.py +145 -0
- gobby/integrations/linear.py +145 -0
- gobby/llm/__init__.py +40 -0
- gobby/llm/base.py +120 -0
- gobby/llm/claude.py +578 -0
- gobby/llm/claude_executor.py +503 -0
- gobby/llm/codex.py +322 -0
- gobby/llm/codex_executor.py +513 -0
- gobby/llm/executor.py +316 -0
- gobby/llm/factory.py +34 -0
- gobby/llm/gemini.py +258 -0
- gobby/llm/gemini_executor.py +339 -0
- gobby/llm/litellm.py +287 -0
- gobby/llm/litellm_executor.py +303 -0
- gobby/llm/resolver.py +499 -0
- gobby/llm/service.py +236 -0
- gobby/mcp_proxy/__init__.py +29 -0
- gobby/mcp_proxy/actions.py +175 -0
- gobby/mcp_proxy/daemon_control.py +198 -0
- gobby/mcp_proxy/importer.py +436 -0
- gobby/mcp_proxy/lazy.py +325 -0
- gobby/mcp_proxy/manager.py +798 -0
- gobby/mcp_proxy/metrics.py +609 -0
- gobby/mcp_proxy/models.py +139 -0
- gobby/mcp_proxy/registries.py +215 -0
- gobby/mcp_proxy/schema_hash.py +381 -0
- gobby/mcp_proxy/semantic_search.py +706 -0
- gobby/mcp_proxy/server.py +549 -0
- gobby/mcp_proxy/services/__init__.py +0 -0
- gobby/mcp_proxy/services/fallback.py +306 -0
- gobby/mcp_proxy/services/recommendation.py +224 -0
- gobby/mcp_proxy/services/server_mgmt.py +214 -0
- gobby/mcp_proxy/services/system.py +72 -0
- gobby/mcp_proxy/services/tool_filter.py +231 -0
- gobby/mcp_proxy/services/tool_proxy.py +309 -0
- gobby/mcp_proxy/stdio.py +565 -0
- gobby/mcp_proxy/tools/__init__.py +27 -0
- gobby/mcp_proxy/tools/agents.py +1103 -0
- gobby/mcp_proxy/tools/artifacts.py +207 -0
- gobby/mcp_proxy/tools/hub.py +335 -0
- gobby/mcp_proxy/tools/internal.py +337 -0
- gobby/mcp_proxy/tools/memory.py +543 -0
- gobby/mcp_proxy/tools/merge.py +422 -0
- gobby/mcp_proxy/tools/metrics.py +283 -0
- gobby/mcp_proxy/tools/orchestration/__init__.py +23 -0
- gobby/mcp_proxy/tools/orchestration/cleanup.py +619 -0
- gobby/mcp_proxy/tools/orchestration/monitor.py +380 -0
- gobby/mcp_proxy/tools/orchestration/orchestrate.py +746 -0
- gobby/mcp_proxy/tools/orchestration/review.py +736 -0
- gobby/mcp_proxy/tools/orchestration/utils.py +16 -0
- gobby/mcp_proxy/tools/session_messages.py +1056 -0
- gobby/mcp_proxy/tools/task_dependencies.py +219 -0
- gobby/mcp_proxy/tools/task_expansion.py +591 -0
- gobby/mcp_proxy/tools/task_github.py +393 -0
- gobby/mcp_proxy/tools/task_linear.py +379 -0
- gobby/mcp_proxy/tools/task_orchestration.py +77 -0
- gobby/mcp_proxy/tools/task_readiness.py +522 -0
- gobby/mcp_proxy/tools/task_sync.py +351 -0
- gobby/mcp_proxy/tools/task_validation.py +843 -0
- gobby/mcp_proxy/tools/tasks/__init__.py +25 -0
- gobby/mcp_proxy/tools/tasks/_context.py +112 -0
- gobby/mcp_proxy/tools/tasks/_crud.py +516 -0
- gobby/mcp_proxy/tools/tasks/_factory.py +176 -0
- gobby/mcp_proxy/tools/tasks/_helpers.py +129 -0
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +517 -0
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +301 -0
- gobby/mcp_proxy/tools/tasks/_resolution.py +55 -0
- gobby/mcp_proxy/tools/tasks/_search.py +215 -0
- gobby/mcp_proxy/tools/tasks/_session.py +125 -0
- gobby/mcp_proxy/tools/workflows.py +973 -0
- gobby/mcp_proxy/tools/worktrees.py +1264 -0
- gobby/mcp_proxy/transports/__init__.py +0 -0
- gobby/mcp_proxy/transports/base.py +95 -0
- gobby/mcp_proxy/transports/factory.py +44 -0
- gobby/mcp_proxy/transports/http.py +139 -0
- gobby/mcp_proxy/transports/stdio.py +213 -0
- gobby/mcp_proxy/transports/websocket.py +136 -0
- gobby/memory/backends/__init__.py +116 -0
- gobby/memory/backends/mem0.py +408 -0
- gobby/memory/backends/memu.py +485 -0
- gobby/memory/backends/null.py +111 -0
- gobby/memory/backends/openmemory.py +537 -0
- gobby/memory/backends/sqlite.py +304 -0
- gobby/memory/context.py +87 -0
- gobby/memory/manager.py +1001 -0
- gobby/memory/protocol.py +451 -0
- gobby/memory/search/__init__.py +66 -0
- gobby/memory/search/text.py +127 -0
- gobby/memory/viz.py +258 -0
- gobby/prompts/__init__.py +13 -0
- gobby/prompts/defaults/expansion/system.md +119 -0
- gobby/prompts/defaults/expansion/user.md +48 -0
- gobby/prompts/defaults/external_validation/agent.md +72 -0
- gobby/prompts/defaults/external_validation/external.md +63 -0
- gobby/prompts/defaults/external_validation/spawn.md +83 -0
- gobby/prompts/defaults/external_validation/system.md +6 -0
- gobby/prompts/defaults/features/import_mcp.md +22 -0
- gobby/prompts/defaults/features/import_mcp_github.md +17 -0
- gobby/prompts/defaults/features/import_mcp_search.md +16 -0
- gobby/prompts/defaults/features/recommend_tools.md +32 -0
- gobby/prompts/defaults/features/recommend_tools_hybrid.md +35 -0
- gobby/prompts/defaults/features/recommend_tools_llm.md +30 -0
- gobby/prompts/defaults/features/server_description.md +20 -0
- gobby/prompts/defaults/features/server_description_system.md +6 -0
- gobby/prompts/defaults/features/task_description.md +31 -0
- gobby/prompts/defaults/features/task_description_system.md +6 -0
- gobby/prompts/defaults/features/tool_summary.md +17 -0
- gobby/prompts/defaults/features/tool_summary_system.md +6 -0
- gobby/prompts/defaults/research/step.md +58 -0
- gobby/prompts/defaults/validation/criteria.md +47 -0
- gobby/prompts/defaults/validation/validate.md +38 -0
- gobby/prompts/loader.py +346 -0
- gobby/prompts/models.py +113 -0
- gobby/py.typed +0 -0
- gobby/runner.py +488 -0
- gobby/search/__init__.py +23 -0
- gobby/search/protocol.py +104 -0
- gobby/search/tfidf.py +232 -0
- gobby/servers/__init__.py +7 -0
- gobby/servers/http.py +636 -0
- gobby/servers/models.py +31 -0
- gobby/servers/routes/__init__.py +23 -0
- gobby/servers/routes/admin.py +416 -0
- gobby/servers/routes/dependencies.py +118 -0
- gobby/servers/routes/mcp/__init__.py +24 -0
- gobby/servers/routes/mcp/hooks.py +135 -0
- gobby/servers/routes/mcp/plugins.py +121 -0
- gobby/servers/routes/mcp/tools.py +1337 -0
- gobby/servers/routes/mcp/webhooks.py +159 -0
- gobby/servers/routes/sessions.py +582 -0
- gobby/servers/websocket.py +766 -0
- gobby/sessions/__init__.py +13 -0
- gobby/sessions/analyzer.py +322 -0
- gobby/sessions/lifecycle.py +240 -0
- gobby/sessions/manager.py +563 -0
- gobby/sessions/processor.py +225 -0
- gobby/sessions/summary.py +532 -0
- gobby/sessions/transcripts/__init__.py +41 -0
- gobby/sessions/transcripts/base.py +125 -0
- gobby/sessions/transcripts/claude.py +386 -0
- gobby/sessions/transcripts/codex.py +143 -0
- gobby/sessions/transcripts/gemini.py +195 -0
- gobby/storage/__init__.py +21 -0
- gobby/storage/agents.py +409 -0
- gobby/storage/artifact_classifier.py +341 -0
- gobby/storage/artifacts.py +285 -0
- gobby/storage/compaction.py +67 -0
- gobby/storage/database.py +357 -0
- gobby/storage/inter_session_messages.py +194 -0
- gobby/storage/mcp.py +680 -0
- gobby/storage/memories.py +562 -0
- gobby/storage/merge_resolutions.py +550 -0
- gobby/storage/migrations.py +860 -0
- gobby/storage/migrations_legacy.py +1359 -0
- gobby/storage/projects.py +166 -0
- gobby/storage/session_messages.py +251 -0
- gobby/storage/session_tasks.py +97 -0
- gobby/storage/sessions.py +817 -0
- gobby/storage/task_dependencies.py +223 -0
- gobby/storage/tasks/__init__.py +42 -0
- gobby/storage/tasks/_aggregates.py +180 -0
- gobby/storage/tasks/_crud.py +449 -0
- gobby/storage/tasks/_id.py +104 -0
- gobby/storage/tasks/_lifecycle.py +311 -0
- gobby/storage/tasks/_manager.py +889 -0
- gobby/storage/tasks/_models.py +300 -0
- gobby/storage/tasks/_ordering.py +119 -0
- gobby/storage/tasks/_path_cache.py +110 -0
- gobby/storage/tasks/_queries.py +343 -0
- gobby/storage/tasks/_search.py +143 -0
- gobby/storage/workflow_audit.py +393 -0
- gobby/storage/worktrees.py +547 -0
- gobby/sync/__init__.py +29 -0
- gobby/sync/github.py +333 -0
- gobby/sync/linear.py +304 -0
- gobby/sync/memories.py +284 -0
- gobby/sync/tasks.py +641 -0
- gobby/tasks/__init__.py +8 -0
- gobby/tasks/build_verification.py +193 -0
- gobby/tasks/commits.py +633 -0
- gobby/tasks/context.py +747 -0
- gobby/tasks/criteria.py +342 -0
- gobby/tasks/enhanced_validator.py +226 -0
- gobby/tasks/escalation.py +263 -0
- gobby/tasks/expansion.py +626 -0
- gobby/tasks/external_validator.py +764 -0
- gobby/tasks/issue_extraction.py +171 -0
- gobby/tasks/prompts/expand.py +327 -0
- gobby/tasks/research.py +421 -0
- gobby/tasks/tdd.py +352 -0
- gobby/tasks/tree_builder.py +263 -0
- gobby/tasks/validation.py +712 -0
- gobby/tasks/validation_history.py +357 -0
- gobby/tasks/validation_models.py +89 -0
- gobby/tools/__init__.py +0 -0
- gobby/tools/summarizer.py +170 -0
- gobby/tui/__init__.py +5 -0
- gobby/tui/api_client.py +281 -0
- gobby/tui/app.py +327 -0
- gobby/tui/screens/__init__.py +25 -0
- gobby/tui/screens/agents.py +333 -0
- gobby/tui/screens/chat.py +450 -0
- gobby/tui/screens/dashboard.py +377 -0
- gobby/tui/screens/memory.py +305 -0
- gobby/tui/screens/metrics.py +231 -0
- gobby/tui/screens/orchestrator.py +904 -0
- gobby/tui/screens/sessions.py +412 -0
- gobby/tui/screens/tasks.py +442 -0
- gobby/tui/screens/workflows.py +289 -0
- gobby/tui/screens/worktrees.py +174 -0
- gobby/tui/widgets/__init__.py +21 -0
- gobby/tui/widgets/chat.py +210 -0
- gobby/tui/widgets/conductor.py +104 -0
- gobby/tui/widgets/menu.py +132 -0
- gobby/tui/widgets/message_panel.py +160 -0
- gobby/tui/widgets/review_gate.py +224 -0
- gobby/tui/widgets/task_tree.py +99 -0
- gobby/tui/widgets/token_budget.py +166 -0
- gobby/tui/ws_client.py +258 -0
- gobby/utils/__init__.py +3 -0
- gobby/utils/daemon_client.py +235 -0
- gobby/utils/git.py +222 -0
- gobby/utils/id.py +38 -0
- gobby/utils/json_helpers.py +161 -0
- gobby/utils/logging.py +376 -0
- gobby/utils/machine_id.py +135 -0
- gobby/utils/metrics.py +589 -0
- gobby/utils/project_context.py +182 -0
- gobby/utils/project_init.py +263 -0
- gobby/utils/status.py +256 -0
- gobby/utils/validation.py +80 -0
- gobby/utils/version.py +23 -0
- gobby/workflows/__init__.py +4 -0
- gobby/workflows/actions.py +1310 -0
- gobby/workflows/approval_flow.py +138 -0
- gobby/workflows/artifact_actions.py +103 -0
- gobby/workflows/audit_helpers.py +110 -0
- gobby/workflows/autonomous_actions.py +286 -0
- gobby/workflows/context_actions.py +394 -0
- gobby/workflows/definitions.py +130 -0
- gobby/workflows/detection_helpers.py +208 -0
- gobby/workflows/engine.py +485 -0
- gobby/workflows/evaluator.py +669 -0
- gobby/workflows/git_utils.py +96 -0
- gobby/workflows/hooks.py +169 -0
- gobby/workflows/lifecycle_evaluator.py +613 -0
- gobby/workflows/llm_actions.py +70 -0
- gobby/workflows/loader.py +333 -0
- gobby/workflows/mcp_actions.py +60 -0
- gobby/workflows/memory_actions.py +272 -0
- gobby/workflows/premature_stop.py +164 -0
- gobby/workflows/session_actions.py +139 -0
- gobby/workflows/state_actions.py +123 -0
- gobby/workflows/state_manager.py +104 -0
- gobby/workflows/stop_signal_actions.py +163 -0
- gobby/workflows/summary_actions.py +344 -0
- gobby/workflows/task_actions.py +249 -0
- gobby/workflows/task_enforcement_actions.py +901 -0
- gobby/workflows/templates.py +52 -0
- gobby/workflows/todo_actions.py +84 -0
- gobby/workflows/webhook.py +223 -0
- gobby/workflows/webhook_executor.py +399 -0
- gobby/worktrees/__init__.py +5 -0
- gobby/worktrees/git.py +690 -0
- gobby/worktrees/merge/__init__.py +20 -0
- gobby/worktrees/merge/conflict_parser.py +177 -0
- gobby/worktrees/merge/resolver.py +485 -0
- gobby-0.2.5.dist-info/METADATA +351 -0
- gobby-0.2.5.dist-info/RECORD +383 -0
- gobby-0.2.5.dist-info/WHEEL +5 -0
- gobby-0.2.5.dist-info/entry_points.txt +2 -0
- gobby-0.2.5.dist-info/licenses/LICENSE.md +193 -0
- gobby-0.2.5.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,449 @@
|
|
|
1
|
+
"""Core CRUD operations for tasks.
|
|
2
|
+
|
|
3
|
+
This module provides the core create, read, update operations for tasks.
|
|
4
|
+
Functions take a database protocol instance as their first parameter.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import json
|
|
8
|
+
import logging
|
|
9
|
+
import sqlite3
|
|
10
|
+
from datetime import UTC, datetime
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
from gobby.storage.database import DatabaseProtocol
|
|
14
|
+
from gobby.storage.tasks._id import generate_task_id, resolve_task_reference
|
|
15
|
+
from gobby.storage.tasks._models import (
|
|
16
|
+
UNSET,
|
|
17
|
+
Task,
|
|
18
|
+
TaskIDCollisionError,
|
|
19
|
+
TaskNotFoundError,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
logger = logging.getLogger(__name__)
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def create_task(
|
|
26
|
+
db: DatabaseProtocol,
|
|
27
|
+
project_id: str,
|
|
28
|
+
title: str,
|
|
29
|
+
description: str | None = None,
|
|
30
|
+
parent_task_id: str | None = None,
|
|
31
|
+
created_in_session_id: str | None = None,
|
|
32
|
+
priority: int = 2,
|
|
33
|
+
task_type: str = "task",
|
|
34
|
+
assignee: str | None = None,
|
|
35
|
+
labels: list[str] | None = None,
|
|
36
|
+
category: str | None = None,
|
|
37
|
+
complexity_score: int | None = None,
|
|
38
|
+
estimated_subtasks: int | None = None,
|
|
39
|
+
expansion_context: str | None = None,
|
|
40
|
+
validation_criteria: str | None = None,
|
|
41
|
+
use_external_validator: bool = False,
|
|
42
|
+
workflow_name: str | None = None,
|
|
43
|
+
verification: str | None = None,
|
|
44
|
+
sequence_order: int | None = None,
|
|
45
|
+
github_issue_number: int | None = None,
|
|
46
|
+
github_pr_number: int | None = None,
|
|
47
|
+
github_repo: str | None = None,
|
|
48
|
+
linear_issue_id: str | None = None,
|
|
49
|
+
linear_team_id: str | None = None,
|
|
50
|
+
agent_name: str | None = None,
|
|
51
|
+
reference_doc: str | None = None,
|
|
52
|
+
requires_user_review: bool = False,
|
|
53
|
+
) -> str:
|
|
54
|
+
"""Create a new task with collision handling.
|
|
55
|
+
|
|
56
|
+
Returns the task_id of the created task.
|
|
57
|
+
"""
|
|
58
|
+
max_retries = 3
|
|
59
|
+
now = datetime.now(UTC).isoformat()
|
|
60
|
+
|
|
61
|
+
# Serialize labels
|
|
62
|
+
labels_json = json.dumps(labels) if labels else None
|
|
63
|
+
task_id = ""
|
|
64
|
+
|
|
65
|
+
# Default validation status
|
|
66
|
+
validation_status = "pending" if validation_criteria else None
|
|
67
|
+
|
|
68
|
+
for attempt in range(max_retries + 1):
|
|
69
|
+
try:
|
|
70
|
+
task_id = generate_task_id(project_id, salt=str(attempt))
|
|
71
|
+
|
|
72
|
+
with db.transaction() as conn:
|
|
73
|
+
# Get next seq_num for this project (auto-increment per project)
|
|
74
|
+
max_seq_row = conn.execute(
|
|
75
|
+
"SELECT MAX(seq_num) as max_seq FROM tasks WHERE project_id = ?",
|
|
76
|
+
(project_id,),
|
|
77
|
+
).fetchone()
|
|
78
|
+
next_seq_num = ((max_seq_row["max_seq"] if max_seq_row else None) or 0) + 1
|
|
79
|
+
|
|
80
|
+
conn.execute(
|
|
81
|
+
"""
|
|
82
|
+
INSERT INTO tasks (
|
|
83
|
+
id, project_id, title, description, parent_task_id,
|
|
84
|
+
created_in_session_id, priority, task_type, assignee,
|
|
85
|
+
labels, status, created_at, updated_at,
|
|
86
|
+
validation_status, category, complexity_score,
|
|
87
|
+
estimated_subtasks, expansion_context,
|
|
88
|
+
validation_criteria, use_external_validator, validation_fail_count,
|
|
89
|
+
workflow_name, verification, sequence_order,
|
|
90
|
+
github_issue_number, github_pr_number, github_repo,
|
|
91
|
+
linear_issue_id, linear_team_id, seq_num, agent_name, reference_doc,
|
|
92
|
+
requires_user_review
|
|
93
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'open', ?, ?, ?, ?, ?, ?, ?, ?, ?, 0, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
94
|
+
""",
|
|
95
|
+
(
|
|
96
|
+
task_id,
|
|
97
|
+
project_id,
|
|
98
|
+
title,
|
|
99
|
+
description,
|
|
100
|
+
parent_task_id,
|
|
101
|
+
created_in_session_id,
|
|
102
|
+
priority,
|
|
103
|
+
task_type,
|
|
104
|
+
assignee,
|
|
105
|
+
labels_json,
|
|
106
|
+
now,
|
|
107
|
+
now,
|
|
108
|
+
validation_status,
|
|
109
|
+
category,
|
|
110
|
+
complexity_score,
|
|
111
|
+
estimated_subtasks,
|
|
112
|
+
expansion_context,
|
|
113
|
+
validation_criteria,
|
|
114
|
+
use_external_validator,
|
|
115
|
+
workflow_name,
|
|
116
|
+
verification,
|
|
117
|
+
sequence_order,
|
|
118
|
+
github_issue_number,
|
|
119
|
+
github_pr_number,
|
|
120
|
+
github_repo,
|
|
121
|
+
linear_issue_id,
|
|
122
|
+
linear_team_id,
|
|
123
|
+
next_seq_num,
|
|
124
|
+
agent_name,
|
|
125
|
+
reference_doc,
|
|
126
|
+
requires_user_review,
|
|
127
|
+
),
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
logger.debug(f"Created task {task_id} in project {project_id}")
|
|
131
|
+
|
|
132
|
+
# Compute and store path_cache for the new task
|
|
133
|
+
# Build path by traversing parent chain
|
|
134
|
+
path_parts: list[str] = [str(next_seq_num)]
|
|
135
|
+
current_parent = parent_task_id
|
|
136
|
+
max_depth = 100
|
|
137
|
+
depth = 0
|
|
138
|
+
while current_parent and depth < max_depth:
|
|
139
|
+
parent_row = conn.execute(
|
|
140
|
+
"SELECT seq_num, parent_task_id FROM tasks WHERE id = ?",
|
|
141
|
+
(current_parent,),
|
|
142
|
+
).fetchone()
|
|
143
|
+
if not parent_row or parent_row["seq_num"] is None:
|
|
144
|
+
break
|
|
145
|
+
path_parts.append(str(parent_row["seq_num"]))
|
|
146
|
+
current_parent = parent_row["parent_task_id"]
|
|
147
|
+
depth += 1
|
|
148
|
+
|
|
149
|
+
path_parts.reverse()
|
|
150
|
+
path_cache = ".".join(path_parts)
|
|
151
|
+
conn.execute(
|
|
152
|
+
"UPDATE tasks SET path_cache = ? WHERE id = ?",
|
|
153
|
+
(path_cache, task_id),
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
# Auto-transition parent from needs_decomposition to open
|
|
157
|
+
if parent_task_id:
|
|
158
|
+
parent = db.fetchone(
|
|
159
|
+
"SELECT status FROM tasks WHERE id = ?",
|
|
160
|
+
(parent_task_id,),
|
|
161
|
+
)
|
|
162
|
+
if parent and parent["status"] == "needs_decomposition":
|
|
163
|
+
transition_now = datetime.now(UTC).isoformat()
|
|
164
|
+
conn.execute(
|
|
165
|
+
"UPDATE tasks SET status = 'open', updated_at = ? WHERE id = ?",
|
|
166
|
+
(transition_now, parent_task_id),
|
|
167
|
+
)
|
|
168
|
+
logger.debug(
|
|
169
|
+
f"Auto-transitioned parent task {parent_task_id} from "
|
|
170
|
+
"needs_decomposition to open"
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
return task_id
|
|
174
|
+
|
|
175
|
+
except sqlite3.IntegrityError as e:
|
|
176
|
+
# Check if it's a primary key violation (ID collision)
|
|
177
|
+
if "UNIQUE constraint failed: tasks.id" in str(e) or "tasks.id" in str(e):
|
|
178
|
+
if attempt == max_retries:
|
|
179
|
+
raise TaskIDCollisionError(
|
|
180
|
+
f"Failed to generate unique task ID after {max_retries} retries"
|
|
181
|
+
) from e
|
|
182
|
+
logger.warning(f"Task ID collision for {task_id}, retrying...")
|
|
183
|
+
continue
|
|
184
|
+
raise e
|
|
185
|
+
|
|
186
|
+
raise TaskIDCollisionError("Unreachable")
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
def get_task(db: DatabaseProtocol, task_id: str, project_id: str | None = None) -> Task:
|
|
190
|
+
"""Get a task by ID or reference.
|
|
191
|
+
|
|
192
|
+
Accepts multiple formats:
|
|
193
|
+
- UUID: Direct lookup
|
|
194
|
+
- #N: Project-scoped seq_num (requires project_id)
|
|
195
|
+
- N: Plain seq_num (requires project_id)
|
|
196
|
+
|
|
197
|
+
Args:
|
|
198
|
+
db: Database protocol instance
|
|
199
|
+
task_id: Task identifier in any supported format
|
|
200
|
+
project_id: Required for #N and N formats
|
|
201
|
+
|
|
202
|
+
Returns:
|
|
203
|
+
The Task object
|
|
204
|
+
|
|
205
|
+
Raises:
|
|
206
|
+
ValueError: If task not found or format requires project_id
|
|
207
|
+
"""
|
|
208
|
+
# Check if this looks like a seq_num reference (#N or plain N)
|
|
209
|
+
is_seq_ref = task_id.startswith("#") or task_id.isdigit()
|
|
210
|
+
|
|
211
|
+
if is_seq_ref:
|
|
212
|
+
if not project_id:
|
|
213
|
+
raise ValueError(f"Task {task_id} requires project_id for seq_num lookup")
|
|
214
|
+
try:
|
|
215
|
+
resolved_id = resolve_task_reference(db, task_id, project_id)
|
|
216
|
+
task_id = resolved_id
|
|
217
|
+
except TaskNotFoundError as e:
|
|
218
|
+
raise ValueError(str(e)) from e
|
|
219
|
+
|
|
220
|
+
row = db.fetchone("SELECT * FROM tasks WHERE id = ?", (task_id,))
|
|
221
|
+
if not row:
|
|
222
|
+
raise ValueError(f"Task {task_id} not found")
|
|
223
|
+
return Task.from_row(row)
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def find_task_by_prefix(db: DatabaseProtocol, prefix: str) -> Task | None:
|
|
227
|
+
"""Find a task by ID prefix. Returns None if no match or multiple matches."""
|
|
228
|
+
# First try exact match
|
|
229
|
+
row = db.fetchone("SELECT * FROM tasks WHERE id = ?", (prefix,))
|
|
230
|
+
if row:
|
|
231
|
+
return Task.from_row(row)
|
|
232
|
+
|
|
233
|
+
# Try prefix match
|
|
234
|
+
rows = db.fetchall("SELECT * FROM tasks WHERE id LIKE ?", (f"{prefix}%",))
|
|
235
|
+
if len(rows) == 1:
|
|
236
|
+
return Task.from_row(rows[0])
|
|
237
|
+
return None
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def find_tasks_by_prefix(db: DatabaseProtocol, prefix: str) -> list[Task]:
|
|
241
|
+
"""Find all tasks matching an ID prefix."""
|
|
242
|
+
rows = db.fetchall("SELECT * FROM tasks WHERE id LIKE ?", (f"{prefix}%",))
|
|
243
|
+
return [Task.from_row(row) for row in rows]
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def update_task(
|
|
247
|
+
db: DatabaseProtocol,
|
|
248
|
+
task_id: str,
|
|
249
|
+
title: Any = UNSET,
|
|
250
|
+
description: Any = UNSET,
|
|
251
|
+
status: Any = UNSET,
|
|
252
|
+
priority: Any = UNSET,
|
|
253
|
+
task_type: Any = UNSET,
|
|
254
|
+
assignee: Any = UNSET,
|
|
255
|
+
labels: Any = UNSET,
|
|
256
|
+
parent_task_id: Any = UNSET,
|
|
257
|
+
validation_status: Any = UNSET,
|
|
258
|
+
validation_feedback: Any = UNSET,
|
|
259
|
+
category: Any = UNSET,
|
|
260
|
+
complexity_score: Any = UNSET,
|
|
261
|
+
estimated_subtasks: Any = UNSET,
|
|
262
|
+
expansion_context: Any = UNSET,
|
|
263
|
+
validation_criteria: Any = UNSET,
|
|
264
|
+
use_external_validator: Any = UNSET,
|
|
265
|
+
validation_fail_count: Any = UNSET,
|
|
266
|
+
workflow_name: Any = UNSET,
|
|
267
|
+
verification: Any = UNSET,
|
|
268
|
+
sequence_order: Any = UNSET,
|
|
269
|
+
escalated_at: Any = UNSET,
|
|
270
|
+
escalation_reason: Any = UNSET,
|
|
271
|
+
github_issue_number: Any = UNSET,
|
|
272
|
+
github_pr_number: Any = UNSET,
|
|
273
|
+
github_repo: Any = UNSET,
|
|
274
|
+
linear_issue_id: Any = UNSET,
|
|
275
|
+
linear_team_id: Any = UNSET,
|
|
276
|
+
agent_name: Any = UNSET,
|
|
277
|
+
reference_doc: Any = UNSET,
|
|
278
|
+
is_expanded: Any = UNSET,
|
|
279
|
+
is_tdd_applied: Any = UNSET,
|
|
280
|
+
validation_override_reason: Any = UNSET,
|
|
281
|
+
requires_user_review: Any = UNSET,
|
|
282
|
+
) -> bool:
|
|
283
|
+
"""Update task fields.
|
|
284
|
+
|
|
285
|
+
Returns True if parent_task_id was changed (indicating path cache needs update).
|
|
286
|
+
"""
|
|
287
|
+
# Validate status transitions from needs_decomposition
|
|
288
|
+
if status is not UNSET and status in ("in_progress", "closed"):
|
|
289
|
+
current_task = get_task(db, task_id)
|
|
290
|
+
if current_task.status == "needs_decomposition":
|
|
291
|
+
# Check if task has subtasks (required to transition out of needs_decomposition)
|
|
292
|
+
children = db.fetchone(
|
|
293
|
+
"SELECT COUNT(*) as count FROM tasks WHERE parent_task_id = ?",
|
|
294
|
+
(task_id,),
|
|
295
|
+
)
|
|
296
|
+
has_children = children and children["count"] > 0
|
|
297
|
+
if not has_children:
|
|
298
|
+
raise ValueError(
|
|
299
|
+
f"Cannot transition task {task_id} from 'needs_decomposition' to '{status}'. "
|
|
300
|
+
"Task must be decomposed into subtasks first."
|
|
301
|
+
)
|
|
302
|
+
|
|
303
|
+
# Block setting validation criteria on needs_decomposition tasks without subtasks
|
|
304
|
+
if validation_criteria is not UNSET and validation_criteria is not None:
|
|
305
|
+
current_task = get_task(db, task_id)
|
|
306
|
+
if current_task.status == "needs_decomposition":
|
|
307
|
+
# Check if task has subtasks
|
|
308
|
+
children = db.fetchone(
|
|
309
|
+
"SELECT COUNT(*) as count FROM tasks WHERE parent_task_id = ?",
|
|
310
|
+
(task_id,),
|
|
311
|
+
)
|
|
312
|
+
has_children = children and children["count"] > 0
|
|
313
|
+
if not has_children:
|
|
314
|
+
raise ValueError(
|
|
315
|
+
f"Cannot set validation criteria on task {task_id} with 'needs_decomposition' status. "
|
|
316
|
+
"Decompose the task into subtasks first, then set validation criteria."
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
updates: list[str] = []
|
|
320
|
+
params: list[Any] = []
|
|
321
|
+
|
|
322
|
+
if title is not UNSET:
|
|
323
|
+
updates.append("title = ?")
|
|
324
|
+
params.append(title)
|
|
325
|
+
if description is not UNSET:
|
|
326
|
+
updates.append("description = ?")
|
|
327
|
+
params.append(description)
|
|
328
|
+
if status is not UNSET:
|
|
329
|
+
updates.append("status = ?")
|
|
330
|
+
params.append(status)
|
|
331
|
+
if priority is not UNSET:
|
|
332
|
+
updates.append("priority = ?")
|
|
333
|
+
params.append(priority)
|
|
334
|
+
if task_type is not UNSET:
|
|
335
|
+
updates.append("task_type = ?")
|
|
336
|
+
params.append(task_type)
|
|
337
|
+
if assignee is not UNSET:
|
|
338
|
+
updates.append("assignee = ?")
|
|
339
|
+
params.append(assignee)
|
|
340
|
+
if labels is not UNSET:
|
|
341
|
+
updates.append("labels = ?")
|
|
342
|
+
if labels is None:
|
|
343
|
+
params.append("[]")
|
|
344
|
+
else:
|
|
345
|
+
params.append(json.dumps(labels))
|
|
346
|
+
if parent_task_id is not UNSET:
|
|
347
|
+
updates.append("parent_task_id = ?")
|
|
348
|
+
params.append(parent_task_id)
|
|
349
|
+
if validation_status is not UNSET:
|
|
350
|
+
updates.append("validation_status = ?")
|
|
351
|
+
params.append(validation_status)
|
|
352
|
+
if validation_feedback is not UNSET:
|
|
353
|
+
updates.append("validation_feedback = ?")
|
|
354
|
+
params.append(validation_feedback)
|
|
355
|
+
if category is not UNSET:
|
|
356
|
+
updates.append("category = ?")
|
|
357
|
+
params.append(category)
|
|
358
|
+
if complexity_score is not UNSET:
|
|
359
|
+
updates.append("complexity_score = ?")
|
|
360
|
+
params.append(complexity_score)
|
|
361
|
+
if estimated_subtasks is not UNSET:
|
|
362
|
+
updates.append("estimated_subtasks = ?")
|
|
363
|
+
params.append(estimated_subtasks)
|
|
364
|
+
if expansion_context is not UNSET:
|
|
365
|
+
updates.append("expansion_context = ?")
|
|
366
|
+
params.append(expansion_context)
|
|
367
|
+
if validation_criteria is not UNSET:
|
|
368
|
+
updates.append("validation_criteria = ?")
|
|
369
|
+
params.append(validation_criteria)
|
|
370
|
+
if use_external_validator is not UNSET:
|
|
371
|
+
updates.append("use_external_validator = ?")
|
|
372
|
+
params.append(use_external_validator)
|
|
373
|
+
if validation_fail_count is not UNSET:
|
|
374
|
+
updates.append("validation_fail_count = ?")
|
|
375
|
+
params.append(validation_fail_count)
|
|
376
|
+
if workflow_name is not UNSET:
|
|
377
|
+
updates.append("workflow_name = ?")
|
|
378
|
+
params.append(workflow_name)
|
|
379
|
+
if verification is not UNSET:
|
|
380
|
+
updates.append("verification = ?")
|
|
381
|
+
params.append(verification)
|
|
382
|
+
if sequence_order is not UNSET:
|
|
383
|
+
updates.append("sequence_order = ?")
|
|
384
|
+
params.append(sequence_order)
|
|
385
|
+
if escalated_at is not UNSET:
|
|
386
|
+
updates.append("escalated_at = ?")
|
|
387
|
+
params.append(escalated_at)
|
|
388
|
+
if escalation_reason is not UNSET:
|
|
389
|
+
updates.append("escalation_reason = ?")
|
|
390
|
+
params.append(escalation_reason)
|
|
391
|
+
if github_issue_number is not UNSET:
|
|
392
|
+
updates.append("github_issue_number = ?")
|
|
393
|
+
params.append(github_issue_number)
|
|
394
|
+
if github_pr_number is not UNSET:
|
|
395
|
+
updates.append("github_pr_number = ?")
|
|
396
|
+
params.append(github_pr_number)
|
|
397
|
+
if github_repo is not UNSET:
|
|
398
|
+
updates.append("github_repo = ?")
|
|
399
|
+
params.append(github_repo)
|
|
400
|
+
if linear_issue_id is not UNSET:
|
|
401
|
+
updates.append("linear_issue_id = ?")
|
|
402
|
+
params.append(linear_issue_id)
|
|
403
|
+
if linear_team_id is not UNSET:
|
|
404
|
+
updates.append("linear_team_id = ?")
|
|
405
|
+
params.append(linear_team_id)
|
|
406
|
+
if agent_name is not UNSET:
|
|
407
|
+
updates.append("agent_name = ?")
|
|
408
|
+
params.append(agent_name)
|
|
409
|
+
if reference_doc is not UNSET:
|
|
410
|
+
updates.append("reference_doc = ?")
|
|
411
|
+
params.append(reference_doc)
|
|
412
|
+
if is_expanded is not UNSET:
|
|
413
|
+
updates.append("is_expanded = ?")
|
|
414
|
+
params.append(1 if is_expanded else 0)
|
|
415
|
+
if is_tdd_applied is not UNSET:
|
|
416
|
+
updates.append("is_tdd_applied = ?")
|
|
417
|
+
params.append(1 if is_tdd_applied else 0)
|
|
418
|
+
if validation_override_reason is not UNSET:
|
|
419
|
+
updates.append("validation_override_reason = ?")
|
|
420
|
+
params.append(validation_override_reason)
|
|
421
|
+
if requires_user_review is not UNSET:
|
|
422
|
+
updates.append("requires_user_review = ?")
|
|
423
|
+
params.append(1 if requires_user_review else 0)
|
|
424
|
+
|
|
425
|
+
# Auto-reset accepted_by_user when transitioning from 'closed' to any other status
|
|
426
|
+
if status is not UNSET and status != "closed":
|
|
427
|
+
current_task = get_task(db, task_id)
|
|
428
|
+
if current_task and current_task.status == "closed":
|
|
429
|
+
updates.append("accepted_by_user = ?")
|
|
430
|
+
params.append(0)
|
|
431
|
+
|
|
432
|
+
if not updates:
|
|
433
|
+
return False
|
|
434
|
+
|
|
435
|
+
updates.append("updated_at = ?")
|
|
436
|
+
params.append(datetime.now(UTC).isoformat())
|
|
437
|
+
|
|
438
|
+
params.append(task_id) # for WHERE clause
|
|
439
|
+
|
|
440
|
+
# nosec B608: SET clause built from hardcoded column names, values parameterized
|
|
441
|
+
sql = f"UPDATE tasks SET {', '.join(updates)} WHERE id = ?" # nosec B608
|
|
442
|
+
|
|
443
|
+
with db.transaction() as conn:
|
|
444
|
+
cursor = conn.execute(sql, tuple(params))
|
|
445
|
+
if cursor.rowcount == 0:
|
|
446
|
+
raise ValueError(f"Task {task_id} not found")
|
|
447
|
+
|
|
448
|
+
# Return whether parent_task_id was changed (caller should update path cache)
|
|
449
|
+
return parent_task_id is not UNSET
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
"""Task ID generation and resolution utilities.
|
|
2
|
+
|
|
3
|
+
This module provides:
|
|
4
|
+
- generate_task_id(): Generate unique task UUIDs
|
|
5
|
+
- resolve_task_reference(): Resolve various reference formats to UUIDs
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import uuid
|
|
9
|
+
|
|
10
|
+
from gobby.storage.database import DatabaseProtocol
|
|
11
|
+
from gobby.storage.tasks._models import TaskNotFoundError
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def generate_task_id(project_id: str, salt: str = "") -> str:
|
|
15
|
+
"""
|
|
16
|
+
Generate a UUID-based task ID.
|
|
17
|
+
|
|
18
|
+
Returns a UUID4 string which provides:
|
|
19
|
+
- Guaranteed uniqueness (128-bit random)
|
|
20
|
+
- Standard format (RFC 4122)
|
|
21
|
+
- Human-friendly reference via seq_num field
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
project_id: Project ID (included for API compatibility, not used in UUID generation)
|
|
25
|
+
salt: Salt value (included for API compatibility, not used in UUID generation)
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
UUID4 string in standard format (xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx)
|
|
29
|
+
"""
|
|
30
|
+
# Note: project_id and salt params kept for backward API compatibility
|
|
31
|
+
# UUID4 is random and doesn't need external entropy
|
|
32
|
+
_ = project_id, salt # Silence unused parameter warnings
|
|
33
|
+
return str(uuid.uuid4())
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def resolve_task_reference(db: DatabaseProtocol, ref: str, project_id: str) -> str:
|
|
37
|
+
"""Resolve a task reference to its UUID.
|
|
38
|
+
|
|
39
|
+
Accepts multiple reference formats:
|
|
40
|
+
- N: Plain seq_num (e.g., 47)
|
|
41
|
+
- #N: Project-scoped seq_num (e.g., #47)
|
|
42
|
+
- 1.2.3: Path cache format
|
|
43
|
+
- UUID: Direct UUID (validated to exist)
|
|
44
|
+
|
|
45
|
+
Args:
|
|
46
|
+
db: Database protocol instance
|
|
47
|
+
ref: Task reference in any supported format
|
|
48
|
+
project_id: Project ID for scoped lookups
|
|
49
|
+
|
|
50
|
+
Returns:
|
|
51
|
+
The task's UUID
|
|
52
|
+
|
|
53
|
+
Raises:
|
|
54
|
+
TaskNotFoundError: If the reference cannot be resolved
|
|
55
|
+
"""
|
|
56
|
+
if not ref:
|
|
57
|
+
raise TaskNotFoundError("Empty task reference")
|
|
58
|
+
|
|
59
|
+
# #N or plain N format: seq_num lookup
|
|
60
|
+
seq_num: int | None = None
|
|
61
|
+
if ref.startswith("#"):
|
|
62
|
+
try:
|
|
63
|
+
seq_num = int(ref[1:])
|
|
64
|
+
except ValueError:
|
|
65
|
+
raise TaskNotFoundError(f"Invalid seq_num format: {ref}") from None
|
|
66
|
+
elif ref.isdigit():
|
|
67
|
+
seq_num = int(ref)
|
|
68
|
+
|
|
69
|
+
if seq_num is not None:
|
|
70
|
+
if seq_num <= 0:
|
|
71
|
+
raise TaskNotFoundError(f"Invalid seq_num: {ref} (must be positive)")
|
|
72
|
+
|
|
73
|
+
row = db.fetchone(
|
|
74
|
+
"SELECT id FROM tasks WHERE project_id = ? AND seq_num = ?",
|
|
75
|
+
(project_id, seq_num),
|
|
76
|
+
)
|
|
77
|
+
if not row:
|
|
78
|
+
raise TaskNotFoundError(f"Task {ref} not found in project")
|
|
79
|
+
return str(row["id"])
|
|
80
|
+
|
|
81
|
+
# Path format: 1.2.3 (dots with all digits)
|
|
82
|
+
if "." in ref and all(part.isdigit() for part in ref.split(".")):
|
|
83
|
+
row = db.fetchone(
|
|
84
|
+
"SELECT id FROM tasks WHERE project_id = ? AND path_cache = ?",
|
|
85
|
+
(project_id, ref),
|
|
86
|
+
)
|
|
87
|
+
if not row:
|
|
88
|
+
raise TaskNotFoundError(f"Task with path '{ref}' not found in project")
|
|
89
|
+
return str(row["id"])
|
|
90
|
+
|
|
91
|
+
# UUID format: validate it exists
|
|
92
|
+
# UUIDs have 5 parts separated by hyphens
|
|
93
|
+
parts = ref.split("-")
|
|
94
|
+
if len(parts) == 5:
|
|
95
|
+
row = db.fetchone(
|
|
96
|
+
"SELECT id FROM tasks WHERE id = ?",
|
|
97
|
+
(ref,),
|
|
98
|
+
)
|
|
99
|
+
if not row:
|
|
100
|
+
raise TaskNotFoundError(f"Task with UUID '{ref}' not found")
|
|
101
|
+
return str(row["id"])
|
|
102
|
+
|
|
103
|
+
# Unknown format
|
|
104
|
+
raise TaskNotFoundError(f"Unknown task reference format: {ref}")
|