gobby 0.2.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +3 -0
- gobby/adapters/__init__.py +30 -0
- gobby/adapters/base.py +93 -0
- gobby/adapters/claude_code.py +276 -0
- gobby/adapters/codex.py +1292 -0
- gobby/adapters/gemini.py +343 -0
- gobby/agents/__init__.py +37 -0
- gobby/agents/codex_session.py +120 -0
- gobby/agents/constants.py +112 -0
- gobby/agents/context.py +362 -0
- gobby/agents/definitions.py +133 -0
- gobby/agents/gemini_session.py +111 -0
- gobby/agents/registry.py +618 -0
- gobby/agents/runner.py +968 -0
- gobby/agents/session.py +259 -0
- gobby/agents/spawn.py +916 -0
- gobby/agents/spawners/__init__.py +77 -0
- gobby/agents/spawners/base.py +142 -0
- gobby/agents/spawners/cross_platform.py +266 -0
- gobby/agents/spawners/embedded.py +225 -0
- gobby/agents/spawners/headless.py +226 -0
- gobby/agents/spawners/linux.py +125 -0
- gobby/agents/spawners/macos.py +277 -0
- gobby/agents/spawners/windows.py +308 -0
- gobby/agents/tty_config.py +319 -0
- gobby/autonomous/__init__.py +32 -0
- gobby/autonomous/progress_tracker.py +447 -0
- gobby/autonomous/stop_registry.py +269 -0
- gobby/autonomous/stuck_detector.py +383 -0
- gobby/cli/__init__.py +67 -0
- gobby/cli/__main__.py +8 -0
- gobby/cli/agents.py +529 -0
- gobby/cli/artifacts.py +266 -0
- gobby/cli/daemon.py +329 -0
- gobby/cli/extensions.py +526 -0
- gobby/cli/github.py +263 -0
- gobby/cli/init.py +53 -0
- gobby/cli/install.py +614 -0
- gobby/cli/installers/__init__.py +37 -0
- gobby/cli/installers/antigravity.py +65 -0
- gobby/cli/installers/claude.py +363 -0
- gobby/cli/installers/codex.py +192 -0
- gobby/cli/installers/gemini.py +294 -0
- gobby/cli/installers/git_hooks.py +377 -0
- gobby/cli/installers/shared.py +737 -0
- gobby/cli/linear.py +250 -0
- gobby/cli/mcp.py +30 -0
- gobby/cli/mcp_proxy.py +698 -0
- gobby/cli/memory.py +304 -0
- gobby/cli/merge.py +384 -0
- gobby/cli/projects.py +79 -0
- gobby/cli/sessions.py +622 -0
- gobby/cli/tasks/__init__.py +30 -0
- gobby/cli/tasks/_utils.py +658 -0
- gobby/cli/tasks/ai.py +1025 -0
- gobby/cli/tasks/commits.py +169 -0
- gobby/cli/tasks/crud.py +685 -0
- gobby/cli/tasks/deps.py +135 -0
- gobby/cli/tasks/labels.py +63 -0
- gobby/cli/tasks/main.py +273 -0
- gobby/cli/tasks/search.py +178 -0
- gobby/cli/tui.py +34 -0
- gobby/cli/utils.py +513 -0
- gobby/cli/workflows.py +927 -0
- gobby/cli/worktrees.py +481 -0
- gobby/config/__init__.py +129 -0
- gobby/config/app.py +551 -0
- gobby/config/extensions.py +167 -0
- gobby/config/features.py +472 -0
- gobby/config/llm_providers.py +98 -0
- gobby/config/logging.py +66 -0
- gobby/config/mcp.py +346 -0
- gobby/config/persistence.py +247 -0
- gobby/config/servers.py +141 -0
- gobby/config/sessions.py +250 -0
- gobby/config/tasks.py +784 -0
- gobby/hooks/__init__.py +104 -0
- gobby/hooks/artifact_capture.py +213 -0
- gobby/hooks/broadcaster.py +243 -0
- gobby/hooks/event_handlers.py +723 -0
- gobby/hooks/events.py +218 -0
- gobby/hooks/git.py +169 -0
- gobby/hooks/health_monitor.py +171 -0
- gobby/hooks/hook_manager.py +856 -0
- gobby/hooks/hook_types.py +575 -0
- gobby/hooks/plugins.py +813 -0
- gobby/hooks/session_coordinator.py +396 -0
- gobby/hooks/verification_runner.py +268 -0
- gobby/hooks/webhooks.py +339 -0
- gobby/install/claude/commands/gobby/bug.md +51 -0
- gobby/install/claude/commands/gobby/chore.md +51 -0
- gobby/install/claude/commands/gobby/epic.md +52 -0
- gobby/install/claude/commands/gobby/eval.md +235 -0
- gobby/install/claude/commands/gobby/feat.md +49 -0
- gobby/install/claude/commands/gobby/nit.md +52 -0
- gobby/install/claude/commands/gobby/ref.md +52 -0
- gobby/install/claude/hooks/HOOK_SCHEMAS.md +632 -0
- gobby/install/claude/hooks/hook_dispatcher.py +364 -0
- gobby/install/claude/hooks/validate_settings.py +102 -0
- gobby/install/claude/hooks-template.json +118 -0
- gobby/install/codex/hooks/hook_dispatcher.py +153 -0
- gobby/install/codex/prompts/forget.md +7 -0
- gobby/install/codex/prompts/memories.md +7 -0
- gobby/install/codex/prompts/recall.md +7 -0
- gobby/install/codex/prompts/remember.md +13 -0
- gobby/install/gemini/hooks/hook_dispatcher.py +268 -0
- gobby/install/gemini/hooks-template.json +138 -0
- gobby/install/shared/plugins/code_guardian.py +456 -0
- gobby/install/shared/plugins/example_notify.py +331 -0
- gobby/integrations/__init__.py +10 -0
- gobby/integrations/github.py +145 -0
- gobby/integrations/linear.py +145 -0
- gobby/llm/__init__.py +40 -0
- gobby/llm/base.py +120 -0
- gobby/llm/claude.py +578 -0
- gobby/llm/claude_executor.py +503 -0
- gobby/llm/codex.py +322 -0
- gobby/llm/codex_executor.py +513 -0
- gobby/llm/executor.py +316 -0
- gobby/llm/factory.py +34 -0
- gobby/llm/gemini.py +258 -0
- gobby/llm/gemini_executor.py +339 -0
- gobby/llm/litellm.py +287 -0
- gobby/llm/litellm_executor.py +303 -0
- gobby/llm/resolver.py +499 -0
- gobby/llm/service.py +236 -0
- gobby/mcp_proxy/__init__.py +29 -0
- gobby/mcp_proxy/actions.py +175 -0
- gobby/mcp_proxy/daemon_control.py +198 -0
- gobby/mcp_proxy/importer.py +436 -0
- gobby/mcp_proxy/lazy.py +325 -0
- gobby/mcp_proxy/manager.py +798 -0
- gobby/mcp_proxy/metrics.py +609 -0
- gobby/mcp_proxy/models.py +139 -0
- gobby/mcp_proxy/registries.py +215 -0
- gobby/mcp_proxy/schema_hash.py +381 -0
- gobby/mcp_proxy/semantic_search.py +706 -0
- gobby/mcp_proxy/server.py +549 -0
- gobby/mcp_proxy/services/__init__.py +0 -0
- gobby/mcp_proxy/services/fallback.py +306 -0
- gobby/mcp_proxy/services/recommendation.py +224 -0
- gobby/mcp_proxy/services/server_mgmt.py +214 -0
- gobby/mcp_proxy/services/system.py +72 -0
- gobby/mcp_proxy/services/tool_filter.py +231 -0
- gobby/mcp_proxy/services/tool_proxy.py +309 -0
- gobby/mcp_proxy/stdio.py +565 -0
- gobby/mcp_proxy/tools/__init__.py +27 -0
- gobby/mcp_proxy/tools/agents.py +1103 -0
- gobby/mcp_proxy/tools/artifacts.py +207 -0
- gobby/mcp_proxy/tools/hub.py +335 -0
- gobby/mcp_proxy/tools/internal.py +337 -0
- gobby/mcp_proxy/tools/memory.py +543 -0
- gobby/mcp_proxy/tools/merge.py +422 -0
- gobby/mcp_proxy/tools/metrics.py +283 -0
- gobby/mcp_proxy/tools/orchestration/__init__.py +23 -0
- gobby/mcp_proxy/tools/orchestration/cleanup.py +619 -0
- gobby/mcp_proxy/tools/orchestration/monitor.py +380 -0
- gobby/mcp_proxy/tools/orchestration/orchestrate.py +746 -0
- gobby/mcp_proxy/tools/orchestration/review.py +736 -0
- gobby/mcp_proxy/tools/orchestration/utils.py +16 -0
- gobby/mcp_proxy/tools/session_messages.py +1056 -0
- gobby/mcp_proxy/tools/task_dependencies.py +219 -0
- gobby/mcp_proxy/tools/task_expansion.py +591 -0
- gobby/mcp_proxy/tools/task_github.py +393 -0
- gobby/mcp_proxy/tools/task_linear.py +379 -0
- gobby/mcp_proxy/tools/task_orchestration.py +77 -0
- gobby/mcp_proxy/tools/task_readiness.py +522 -0
- gobby/mcp_proxy/tools/task_sync.py +351 -0
- gobby/mcp_proxy/tools/task_validation.py +843 -0
- gobby/mcp_proxy/tools/tasks/__init__.py +25 -0
- gobby/mcp_proxy/tools/tasks/_context.py +112 -0
- gobby/mcp_proxy/tools/tasks/_crud.py +516 -0
- gobby/mcp_proxy/tools/tasks/_factory.py +176 -0
- gobby/mcp_proxy/tools/tasks/_helpers.py +129 -0
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +517 -0
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +301 -0
- gobby/mcp_proxy/tools/tasks/_resolution.py +55 -0
- gobby/mcp_proxy/tools/tasks/_search.py +215 -0
- gobby/mcp_proxy/tools/tasks/_session.py +125 -0
- gobby/mcp_proxy/tools/workflows.py +973 -0
- gobby/mcp_proxy/tools/worktrees.py +1264 -0
- gobby/mcp_proxy/transports/__init__.py +0 -0
- gobby/mcp_proxy/transports/base.py +95 -0
- gobby/mcp_proxy/transports/factory.py +44 -0
- gobby/mcp_proxy/transports/http.py +139 -0
- gobby/mcp_proxy/transports/stdio.py +213 -0
- gobby/mcp_proxy/transports/websocket.py +136 -0
- gobby/memory/backends/__init__.py +116 -0
- gobby/memory/backends/mem0.py +408 -0
- gobby/memory/backends/memu.py +485 -0
- gobby/memory/backends/null.py +111 -0
- gobby/memory/backends/openmemory.py +537 -0
- gobby/memory/backends/sqlite.py +304 -0
- gobby/memory/context.py +87 -0
- gobby/memory/manager.py +1001 -0
- gobby/memory/protocol.py +451 -0
- gobby/memory/search/__init__.py +66 -0
- gobby/memory/search/text.py +127 -0
- gobby/memory/viz.py +258 -0
- gobby/prompts/__init__.py +13 -0
- gobby/prompts/defaults/expansion/system.md +119 -0
- gobby/prompts/defaults/expansion/user.md +48 -0
- gobby/prompts/defaults/external_validation/agent.md +72 -0
- gobby/prompts/defaults/external_validation/external.md +63 -0
- gobby/prompts/defaults/external_validation/spawn.md +83 -0
- gobby/prompts/defaults/external_validation/system.md +6 -0
- gobby/prompts/defaults/features/import_mcp.md +22 -0
- gobby/prompts/defaults/features/import_mcp_github.md +17 -0
- gobby/prompts/defaults/features/import_mcp_search.md +16 -0
- gobby/prompts/defaults/features/recommend_tools.md +32 -0
- gobby/prompts/defaults/features/recommend_tools_hybrid.md +35 -0
- gobby/prompts/defaults/features/recommend_tools_llm.md +30 -0
- gobby/prompts/defaults/features/server_description.md +20 -0
- gobby/prompts/defaults/features/server_description_system.md +6 -0
- gobby/prompts/defaults/features/task_description.md +31 -0
- gobby/prompts/defaults/features/task_description_system.md +6 -0
- gobby/prompts/defaults/features/tool_summary.md +17 -0
- gobby/prompts/defaults/features/tool_summary_system.md +6 -0
- gobby/prompts/defaults/research/step.md +58 -0
- gobby/prompts/defaults/validation/criteria.md +47 -0
- gobby/prompts/defaults/validation/validate.md +38 -0
- gobby/prompts/loader.py +346 -0
- gobby/prompts/models.py +113 -0
- gobby/py.typed +0 -0
- gobby/runner.py +488 -0
- gobby/search/__init__.py +23 -0
- gobby/search/protocol.py +104 -0
- gobby/search/tfidf.py +232 -0
- gobby/servers/__init__.py +7 -0
- gobby/servers/http.py +636 -0
- gobby/servers/models.py +31 -0
- gobby/servers/routes/__init__.py +23 -0
- gobby/servers/routes/admin.py +416 -0
- gobby/servers/routes/dependencies.py +118 -0
- gobby/servers/routes/mcp/__init__.py +24 -0
- gobby/servers/routes/mcp/hooks.py +135 -0
- gobby/servers/routes/mcp/plugins.py +121 -0
- gobby/servers/routes/mcp/tools.py +1337 -0
- gobby/servers/routes/mcp/webhooks.py +159 -0
- gobby/servers/routes/sessions.py +582 -0
- gobby/servers/websocket.py +766 -0
- gobby/sessions/__init__.py +13 -0
- gobby/sessions/analyzer.py +322 -0
- gobby/sessions/lifecycle.py +240 -0
- gobby/sessions/manager.py +563 -0
- gobby/sessions/processor.py +225 -0
- gobby/sessions/summary.py +532 -0
- gobby/sessions/transcripts/__init__.py +41 -0
- gobby/sessions/transcripts/base.py +125 -0
- gobby/sessions/transcripts/claude.py +386 -0
- gobby/sessions/transcripts/codex.py +143 -0
- gobby/sessions/transcripts/gemini.py +195 -0
- gobby/storage/__init__.py +21 -0
- gobby/storage/agents.py +409 -0
- gobby/storage/artifact_classifier.py +341 -0
- gobby/storage/artifacts.py +285 -0
- gobby/storage/compaction.py +67 -0
- gobby/storage/database.py +357 -0
- gobby/storage/inter_session_messages.py +194 -0
- gobby/storage/mcp.py +680 -0
- gobby/storage/memories.py +562 -0
- gobby/storage/merge_resolutions.py +550 -0
- gobby/storage/migrations.py +860 -0
- gobby/storage/migrations_legacy.py +1359 -0
- gobby/storage/projects.py +166 -0
- gobby/storage/session_messages.py +251 -0
- gobby/storage/session_tasks.py +97 -0
- gobby/storage/sessions.py +817 -0
- gobby/storage/task_dependencies.py +223 -0
- gobby/storage/tasks/__init__.py +42 -0
- gobby/storage/tasks/_aggregates.py +180 -0
- gobby/storage/tasks/_crud.py +449 -0
- gobby/storage/tasks/_id.py +104 -0
- gobby/storage/tasks/_lifecycle.py +311 -0
- gobby/storage/tasks/_manager.py +889 -0
- gobby/storage/tasks/_models.py +300 -0
- gobby/storage/tasks/_ordering.py +119 -0
- gobby/storage/tasks/_path_cache.py +110 -0
- gobby/storage/tasks/_queries.py +343 -0
- gobby/storage/tasks/_search.py +143 -0
- gobby/storage/workflow_audit.py +393 -0
- gobby/storage/worktrees.py +547 -0
- gobby/sync/__init__.py +29 -0
- gobby/sync/github.py +333 -0
- gobby/sync/linear.py +304 -0
- gobby/sync/memories.py +284 -0
- gobby/sync/tasks.py +641 -0
- gobby/tasks/__init__.py +8 -0
- gobby/tasks/build_verification.py +193 -0
- gobby/tasks/commits.py +633 -0
- gobby/tasks/context.py +747 -0
- gobby/tasks/criteria.py +342 -0
- gobby/tasks/enhanced_validator.py +226 -0
- gobby/tasks/escalation.py +263 -0
- gobby/tasks/expansion.py +626 -0
- gobby/tasks/external_validator.py +764 -0
- gobby/tasks/issue_extraction.py +171 -0
- gobby/tasks/prompts/expand.py +327 -0
- gobby/tasks/research.py +421 -0
- gobby/tasks/tdd.py +352 -0
- gobby/tasks/tree_builder.py +263 -0
- gobby/tasks/validation.py +712 -0
- gobby/tasks/validation_history.py +357 -0
- gobby/tasks/validation_models.py +89 -0
- gobby/tools/__init__.py +0 -0
- gobby/tools/summarizer.py +170 -0
- gobby/tui/__init__.py +5 -0
- gobby/tui/api_client.py +281 -0
- gobby/tui/app.py +327 -0
- gobby/tui/screens/__init__.py +25 -0
- gobby/tui/screens/agents.py +333 -0
- gobby/tui/screens/chat.py +450 -0
- gobby/tui/screens/dashboard.py +377 -0
- gobby/tui/screens/memory.py +305 -0
- gobby/tui/screens/metrics.py +231 -0
- gobby/tui/screens/orchestrator.py +904 -0
- gobby/tui/screens/sessions.py +412 -0
- gobby/tui/screens/tasks.py +442 -0
- gobby/tui/screens/workflows.py +289 -0
- gobby/tui/screens/worktrees.py +174 -0
- gobby/tui/widgets/__init__.py +21 -0
- gobby/tui/widgets/chat.py +210 -0
- gobby/tui/widgets/conductor.py +104 -0
- gobby/tui/widgets/menu.py +132 -0
- gobby/tui/widgets/message_panel.py +160 -0
- gobby/tui/widgets/review_gate.py +224 -0
- gobby/tui/widgets/task_tree.py +99 -0
- gobby/tui/widgets/token_budget.py +166 -0
- gobby/tui/ws_client.py +258 -0
- gobby/utils/__init__.py +3 -0
- gobby/utils/daemon_client.py +235 -0
- gobby/utils/git.py +222 -0
- gobby/utils/id.py +38 -0
- gobby/utils/json_helpers.py +161 -0
- gobby/utils/logging.py +376 -0
- gobby/utils/machine_id.py +135 -0
- gobby/utils/metrics.py +589 -0
- gobby/utils/project_context.py +182 -0
- gobby/utils/project_init.py +263 -0
- gobby/utils/status.py +256 -0
- gobby/utils/validation.py +80 -0
- gobby/utils/version.py +23 -0
- gobby/workflows/__init__.py +4 -0
- gobby/workflows/actions.py +1310 -0
- gobby/workflows/approval_flow.py +138 -0
- gobby/workflows/artifact_actions.py +103 -0
- gobby/workflows/audit_helpers.py +110 -0
- gobby/workflows/autonomous_actions.py +286 -0
- gobby/workflows/context_actions.py +394 -0
- gobby/workflows/definitions.py +130 -0
- gobby/workflows/detection_helpers.py +208 -0
- gobby/workflows/engine.py +485 -0
- gobby/workflows/evaluator.py +669 -0
- gobby/workflows/git_utils.py +96 -0
- gobby/workflows/hooks.py +169 -0
- gobby/workflows/lifecycle_evaluator.py +613 -0
- gobby/workflows/llm_actions.py +70 -0
- gobby/workflows/loader.py +333 -0
- gobby/workflows/mcp_actions.py +60 -0
- gobby/workflows/memory_actions.py +272 -0
- gobby/workflows/premature_stop.py +164 -0
- gobby/workflows/session_actions.py +139 -0
- gobby/workflows/state_actions.py +123 -0
- gobby/workflows/state_manager.py +104 -0
- gobby/workflows/stop_signal_actions.py +163 -0
- gobby/workflows/summary_actions.py +344 -0
- gobby/workflows/task_actions.py +249 -0
- gobby/workflows/task_enforcement_actions.py +901 -0
- gobby/workflows/templates.py +52 -0
- gobby/workflows/todo_actions.py +84 -0
- gobby/workflows/webhook.py +223 -0
- gobby/workflows/webhook_executor.py +399 -0
- gobby/worktrees/__init__.py +5 -0
- gobby/worktrees/git.py +690 -0
- gobby/worktrees/merge/__init__.py +20 -0
- gobby/worktrees/merge/conflict_parser.py +177 -0
- gobby/worktrees/merge/resolver.py +485 -0
- gobby-0.2.5.dist-info/METADATA +351 -0
- gobby-0.2.5.dist-info/RECORD +383 -0
- gobby-0.2.5.dist-info/WHEEL +5 -0
- gobby-0.2.5.dist-info/entry_points.txt +2 -0
- gobby-0.2.5.dist-info/licenses/LICENSE.md +193 -0
- gobby-0.2.5.dist-info/top_level.txt +1 -0
gobby/sync/tasks.py
ADDED
|
@@ -0,0 +1,641 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import hashlib
|
|
3
|
+
import json
|
|
4
|
+
import logging
|
|
5
|
+
import time
|
|
6
|
+
from datetime import UTC, datetime
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import TYPE_CHECKING, Any
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
pass
|
|
12
|
+
from gobby.storage.tasks import LocalTaskManager
|
|
13
|
+
from gobby.utils.git import normalize_commit_sha
|
|
14
|
+
|
|
15
|
+
logger = logging.getLogger(__name__)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class TaskSyncManager:
|
|
19
|
+
"""
|
|
20
|
+
Manages synchronization of tasks to the filesystem (JSONL) for Git versioning.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
task_manager: LocalTaskManager,
|
|
26
|
+
export_path: str = ".gobby/tasks.jsonl",
|
|
27
|
+
):
|
|
28
|
+
"""
|
|
29
|
+
Initialize TaskSyncManager.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
task_manager: LocalTaskManager instance
|
|
33
|
+
export_path: Path to the JSONL export file
|
|
34
|
+
"""
|
|
35
|
+
self.task_manager = task_manager
|
|
36
|
+
self.db = task_manager.db
|
|
37
|
+
self.export_path = Path(export_path)
|
|
38
|
+
# Async debounce state (replaces threading.Timer to avoid blocking event loop)
|
|
39
|
+
self._export_task: asyncio.Task[None] | None = None
|
|
40
|
+
self._last_change_time: float = 0
|
|
41
|
+
self._debounce_interval = 5.0 # seconds
|
|
42
|
+
self._shutdown_requested = False
|
|
43
|
+
self._pending_project_id: str | None = None
|
|
44
|
+
|
|
45
|
+
def _get_export_path(self, project_id: str | None) -> Path:
|
|
46
|
+
"""
|
|
47
|
+
Resolve the export path for a given project.
|
|
48
|
+
|
|
49
|
+
Resolution order:
|
|
50
|
+
1. If project_id provided -> find project repo_path -> .gobby/tasks.jsonl
|
|
51
|
+
2. Fallback to self.export_path (legacy/default behavior)
|
|
52
|
+
"""
|
|
53
|
+
if not project_id:
|
|
54
|
+
return self.export_path
|
|
55
|
+
|
|
56
|
+
# Try to find project
|
|
57
|
+
from gobby.storage.projects import LocalProjectManager
|
|
58
|
+
|
|
59
|
+
project_manager = LocalProjectManager(self.db)
|
|
60
|
+
project = project_manager.get(project_id)
|
|
61
|
+
|
|
62
|
+
if project and project.repo_path:
|
|
63
|
+
return Path(project.repo_path) / ".gobby" / "tasks.jsonl"
|
|
64
|
+
|
|
65
|
+
return self.export_path
|
|
66
|
+
|
|
67
|
+
def _normalize_commits(self, commits: list[str] | None, repo_path: Path) -> list[str]:
|
|
68
|
+
"""
|
|
69
|
+
Normalize commit SHAs to canonical short form and deduplicate.
|
|
70
|
+
|
|
71
|
+
Uses git rev-parse --short to normalize all SHAs to the same format,
|
|
72
|
+
ensuring that mixed short/full SHA entries resolve to unique values.
|
|
73
|
+
|
|
74
|
+
Args:
|
|
75
|
+
commits: List of commit SHAs (may be mixed short/full)
|
|
76
|
+
repo_path: Path to git repository for SHA resolution
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
Sorted list of unique normalized short SHAs
|
|
80
|
+
"""
|
|
81
|
+
if not commits:
|
|
82
|
+
return []
|
|
83
|
+
|
|
84
|
+
seen: set[str] = set()
|
|
85
|
+
normalized: list[str] = []
|
|
86
|
+
|
|
87
|
+
for sha in commits:
|
|
88
|
+
# Normalize to canonical short form (7+ chars, unique in repo)
|
|
89
|
+
short_sha = normalize_commit_sha(sha, cwd=repo_path)
|
|
90
|
+
if short_sha and short_sha not in seen:
|
|
91
|
+
seen.add(short_sha)
|
|
92
|
+
normalized.append(short_sha)
|
|
93
|
+
elif not short_sha:
|
|
94
|
+
# If normalization fails, keep original SHA but still dedupe
|
|
95
|
+
# This handles cases where git history may be unavailable
|
|
96
|
+
if sha not in seen:
|
|
97
|
+
seen.add(sha)
|
|
98
|
+
normalized.append(sha)
|
|
99
|
+
|
|
100
|
+
return sorted(normalized)
|
|
101
|
+
|
|
102
|
+
def export_to_jsonl(self, project_id: str | None = None) -> None:
|
|
103
|
+
"""
|
|
104
|
+
Export tasks and their dependencies to a JSONL file.
|
|
105
|
+
Tasks are sorted by ID to ensure deterministic output.
|
|
106
|
+
|
|
107
|
+
Args:
|
|
108
|
+
project_id: Optional project to export. If matches context, uses project path.
|
|
109
|
+
"""
|
|
110
|
+
try:
|
|
111
|
+
# Determine target path
|
|
112
|
+
target_path = self._get_export_path(project_id)
|
|
113
|
+
|
|
114
|
+
# Filter tasks by project_id if provided
|
|
115
|
+
# This ensures we only export tasks for the specific project
|
|
116
|
+
|
|
117
|
+
tasks = self.task_manager.list_tasks(limit=100000, project_id=project_id)
|
|
118
|
+
|
|
119
|
+
# Fetch all dependencies
|
|
120
|
+
# We'll use a raw query for efficiency here instead of calling get_blockers for every task
|
|
121
|
+
deps_rows = self.db.fetchall("SELECT task_id, depends_on FROM task_dependencies")
|
|
122
|
+
|
|
123
|
+
# Build dependency map: task_id -> list[depends_on]
|
|
124
|
+
deps_map: dict[str, list[str]] = {}
|
|
125
|
+
for task_id, depends_on in deps_rows:
|
|
126
|
+
if task_id not in deps_map:
|
|
127
|
+
deps_map[task_id] = []
|
|
128
|
+
deps_map[task_id].append(depends_on)
|
|
129
|
+
|
|
130
|
+
# Sort tasks by ID for deterministic output
|
|
131
|
+
tasks.sort(key=lambda t: t.id)
|
|
132
|
+
|
|
133
|
+
export_data = []
|
|
134
|
+
for task in tasks:
|
|
135
|
+
task_dict = {
|
|
136
|
+
"id": task.id,
|
|
137
|
+
"title": task.title,
|
|
138
|
+
"description": task.description,
|
|
139
|
+
"status": task.status,
|
|
140
|
+
"created_at": task.created_at,
|
|
141
|
+
"updated_at": task.updated_at,
|
|
142
|
+
"project_id": task.project_id,
|
|
143
|
+
"parent_id": task.parent_task_id,
|
|
144
|
+
"deps_on": sorted(deps_map.get(task.id, [])), # Sort deps for stability
|
|
145
|
+
# Commit linking - normalize to short SHAs and deduplicate
|
|
146
|
+
# target_path is .gobby/tasks.jsonl, so parent.parent is repo root
|
|
147
|
+
"commits": self._normalize_commits(task.commits, target_path.parent.parent),
|
|
148
|
+
# Validation history (for tracking validation state across syncs)
|
|
149
|
+
"validation": (
|
|
150
|
+
{
|
|
151
|
+
"status": task.validation_status,
|
|
152
|
+
"feedback": task.validation_feedback,
|
|
153
|
+
"fail_count": task.validation_fail_count,
|
|
154
|
+
"criteria": task.validation_criteria,
|
|
155
|
+
"override_reason": task.validation_override_reason,
|
|
156
|
+
}
|
|
157
|
+
if task.validation_status
|
|
158
|
+
else None
|
|
159
|
+
),
|
|
160
|
+
# Escalation fields
|
|
161
|
+
"escalated_at": task.escalated_at,
|
|
162
|
+
"escalation_reason": task.escalation_reason,
|
|
163
|
+
# Human-friendly IDs (preserve across sync)
|
|
164
|
+
"seq_num": task.seq_num,
|
|
165
|
+
"path_cache": task.path_cache,
|
|
166
|
+
}
|
|
167
|
+
export_data.append(task_dict)
|
|
168
|
+
|
|
169
|
+
# Calculate content hash first to check if anything changed
|
|
170
|
+
jsonl_content = ""
|
|
171
|
+
for item in export_data:
|
|
172
|
+
jsonl_content += json.dumps(item, sort_keys=True) + "\n"
|
|
173
|
+
|
|
174
|
+
content_hash = hashlib.sha256(jsonl_content.encode("utf-8")).hexdigest()
|
|
175
|
+
|
|
176
|
+
# Check existing hash before writing anything
|
|
177
|
+
meta_path = target_path.parent / "tasks_meta.json"
|
|
178
|
+
existing_hash = None
|
|
179
|
+
if meta_path.exists():
|
|
180
|
+
try:
|
|
181
|
+
with open(meta_path, encoding="utf-8") as f:
|
|
182
|
+
existing_meta = json.load(f)
|
|
183
|
+
existing_hash = existing_meta.get("content_hash")
|
|
184
|
+
except (json.JSONDecodeError, OSError):
|
|
185
|
+
pass # Will write fresh meta
|
|
186
|
+
|
|
187
|
+
# Skip writing if content hasn't changed
|
|
188
|
+
if content_hash == existing_hash:
|
|
189
|
+
logger.debug(f"Task export skipped - no changes (hash: {content_hash[:8]})")
|
|
190
|
+
return
|
|
191
|
+
|
|
192
|
+
# Write JSONL file
|
|
193
|
+
target_path.parent.mkdir(parents=True, exist_ok=True)
|
|
194
|
+
|
|
195
|
+
with open(target_path, "w", encoding="utf-8") as f:
|
|
196
|
+
for item in export_data:
|
|
197
|
+
f.write(json.dumps(item) + "\n")
|
|
198
|
+
|
|
199
|
+
# Write meta file
|
|
200
|
+
meta_data = {
|
|
201
|
+
"content_hash": content_hash,
|
|
202
|
+
"last_exported": datetime.now(UTC).isoformat(),
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
with open(meta_path, "w", encoding="utf-8") as f:
|
|
206
|
+
json.dump(meta_data, f, indent=2)
|
|
207
|
+
|
|
208
|
+
logger.info(f"Exported {len(tasks)} tasks to {target_path} (hash: {content_hash[:8]})")
|
|
209
|
+
|
|
210
|
+
except Exception as e:
|
|
211
|
+
logger.error(f"Failed to export tasks: {e}", exc_info=True)
|
|
212
|
+
raise
|
|
213
|
+
|
|
214
|
+
def import_from_jsonl(self, project_id: str | None = None) -> None:
|
|
215
|
+
"""
|
|
216
|
+
Import tasks from JSONL file into SQLite.
|
|
217
|
+
Uses Last-Write-Wins conflict resolution based on updated_at.
|
|
218
|
+
|
|
219
|
+
Args:
|
|
220
|
+
project_id: Optional project to import from. If matches context, uses project path.
|
|
221
|
+
"""
|
|
222
|
+
target_path = self._get_export_path(project_id)
|
|
223
|
+
|
|
224
|
+
if not target_path.exists():
|
|
225
|
+
logger.debug(f"No task export file found at {target_path}, skipping import")
|
|
226
|
+
return
|
|
227
|
+
|
|
228
|
+
try:
|
|
229
|
+
with open(target_path, encoding="utf-8") as f:
|
|
230
|
+
lines = f.readlines()
|
|
231
|
+
|
|
232
|
+
imported_count = 0
|
|
233
|
+
updated_count = 0
|
|
234
|
+
skipped_count = 0
|
|
235
|
+
|
|
236
|
+
# Phase 1: Import Tasks (Upsert)
|
|
237
|
+
pending_deps: list[tuple[str, str]] = []
|
|
238
|
+
|
|
239
|
+
# Temporarily disable foreign keys to allow inserting child tasks
|
|
240
|
+
# before their parents (JSONL order may not be parent-first)
|
|
241
|
+
self.db.execute("PRAGMA foreign_keys = OFF")
|
|
242
|
+
|
|
243
|
+
try:
|
|
244
|
+
with self.db.transaction() as conn:
|
|
245
|
+
for line in lines:
|
|
246
|
+
if not line.strip():
|
|
247
|
+
continue
|
|
248
|
+
|
|
249
|
+
data = json.loads(line)
|
|
250
|
+
task_id = data["id"]
|
|
251
|
+
updated_at_file = datetime.fromisoformat(data["updated_at"])
|
|
252
|
+
|
|
253
|
+
# Check if task exists (also fetch seq_num/path_cache to preserve)
|
|
254
|
+
existing_row = self.db.fetchone(
|
|
255
|
+
"SELECT updated_at, seq_num, path_cache FROM tasks WHERE id = ?",
|
|
256
|
+
(task_id,),
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
should_update = False
|
|
260
|
+
existing_seq_num = None
|
|
261
|
+
existing_path_cache = None
|
|
262
|
+
if not existing_row:
|
|
263
|
+
should_update = True
|
|
264
|
+
imported_count += 1
|
|
265
|
+
else:
|
|
266
|
+
updated_at_db = datetime.fromisoformat(existing_row["updated_at"])
|
|
267
|
+
existing_seq_num = existing_row["seq_num"]
|
|
268
|
+
existing_path_cache = existing_row["path_cache"]
|
|
269
|
+
if updated_at_file > updated_at_db:
|
|
270
|
+
should_update = True
|
|
271
|
+
updated_count += 1
|
|
272
|
+
else:
|
|
273
|
+
skipped_count += 1
|
|
274
|
+
|
|
275
|
+
if should_update:
|
|
276
|
+
# Use INSERT OR REPLACE to handle upsert generically
|
|
277
|
+
# Note: Labels not in JSONL currently based on export logic
|
|
278
|
+
# Note: We need to respect the exact fields from JSONL
|
|
279
|
+
|
|
280
|
+
# Handle commits array (stored as JSON in SQLite)
|
|
281
|
+
commits_json = (
|
|
282
|
+
json.dumps(data["commits"]) if data.get("commits") else None
|
|
283
|
+
)
|
|
284
|
+
|
|
285
|
+
# Handle validation object (extract fields)
|
|
286
|
+
validation = data.get("validation") or {}
|
|
287
|
+
validation_status = validation.get("status")
|
|
288
|
+
validation_feedback = validation.get("feedback")
|
|
289
|
+
validation_fail_count = validation.get("fail_count", 0)
|
|
290
|
+
validation_criteria = validation.get("criteria")
|
|
291
|
+
validation_override_reason = validation.get("override_reason")
|
|
292
|
+
|
|
293
|
+
conn.execute(
|
|
294
|
+
"""
|
|
295
|
+
INSERT OR REPLACE INTO tasks (
|
|
296
|
+
id, project_id, title, description, parent_task_id,
|
|
297
|
+
status, priority, task_type, created_at, updated_at,
|
|
298
|
+
commits, validation_status, validation_feedback,
|
|
299
|
+
validation_fail_count, validation_criteria,
|
|
300
|
+
validation_override_reason, escalated_at, escalation_reason,
|
|
301
|
+
seq_num, path_cache
|
|
302
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
303
|
+
""",
|
|
304
|
+
(
|
|
305
|
+
task_id,
|
|
306
|
+
data.get("project_id"),
|
|
307
|
+
data["title"],
|
|
308
|
+
data.get("description"),
|
|
309
|
+
data.get(
|
|
310
|
+
"parent_id"
|
|
311
|
+
), # Note: JSONL uses parent_id, not parent_task_id
|
|
312
|
+
data["status"],
|
|
313
|
+
data.get("priority", 2),
|
|
314
|
+
data.get("task_type", "task"),
|
|
315
|
+
data["created_at"],
|
|
316
|
+
data["updated_at"],
|
|
317
|
+
commits_json,
|
|
318
|
+
validation_status,
|
|
319
|
+
validation_feedback,
|
|
320
|
+
validation_fail_count,
|
|
321
|
+
validation_criteria,
|
|
322
|
+
validation_override_reason,
|
|
323
|
+
data.get("escalated_at"),
|
|
324
|
+
data.get("escalation_reason"),
|
|
325
|
+
# Preserve existing seq_num/path_cache if JSONL doesn't have them
|
|
326
|
+
data["seq_num"] if "seq_num" in data else existing_seq_num,
|
|
327
|
+
data["path_cache"]
|
|
328
|
+
if "path_cache" in data
|
|
329
|
+
else existing_path_cache,
|
|
330
|
+
),
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
# Collect dependencies for Phase 2
|
|
334
|
+
if "deps_on" in data:
|
|
335
|
+
for dep_id in data["deps_on"]:
|
|
336
|
+
pending_deps.append((task_id, dep_id))
|
|
337
|
+
|
|
338
|
+
# Phase 2: Import Dependencies
|
|
339
|
+
# We blindly re-insert dependencies. Since we can't easily track deletion
|
|
340
|
+
# of dependencies without full diff, we'll ensure they exist.
|
|
341
|
+
# To handle strict syncing, we might want to clear existing deps for these
|
|
342
|
+
# tasks, but that's risky. For now, additive only for deps (or ignore if exist).
|
|
343
|
+
|
|
344
|
+
with self.db.transaction() as conn:
|
|
345
|
+
for task_id, depends_on in pending_deps:
|
|
346
|
+
# Check if both exist (they should, unless depends_on is missing)
|
|
347
|
+
conn.execute(
|
|
348
|
+
"""
|
|
349
|
+
INSERT OR IGNORE INTO task_dependencies (
|
|
350
|
+
task_id, depends_on, dep_type, created_at
|
|
351
|
+
) VALUES (?, ?, 'blocks', ?)
|
|
352
|
+
""",
|
|
353
|
+
(task_id, depends_on, datetime.now(UTC).isoformat()),
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
logger.info(
|
|
357
|
+
f"Import complete: {imported_count} imported, "
|
|
358
|
+
f"{updated_count} updated, {skipped_count} skipped"
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
# Rebuild search index to include imported tasks
|
|
362
|
+
if imported_count > 0 or updated_count > 0:
|
|
363
|
+
try:
|
|
364
|
+
stats = self.task_manager.reindex_search(project_id)
|
|
365
|
+
logger.debug(
|
|
366
|
+
f"Search index rebuilt with {stats.get('item_count', 0)} tasks"
|
|
367
|
+
)
|
|
368
|
+
except Exception as e:
|
|
369
|
+
logger.warning(f"Failed to rebuild search index: {e}")
|
|
370
|
+
finally:
|
|
371
|
+
# Re-enable foreign keys
|
|
372
|
+
self.db.execute("PRAGMA foreign_keys = ON")
|
|
373
|
+
|
|
374
|
+
except Exception as e:
|
|
375
|
+
logger.error(f"Failed to import tasks: {e}", exc_info=True)
|
|
376
|
+
raise
|
|
377
|
+
|
|
378
|
+
def get_sync_status(self) -> dict[str, Any]:
|
|
379
|
+
"""
|
|
380
|
+
Get sync status by comparing content hash.
|
|
381
|
+
"""
|
|
382
|
+
if not self.export_path.exists():
|
|
383
|
+
return {"status": "no_file", "synced": False}
|
|
384
|
+
|
|
385
|
+
meta_path = self.export_path.parent / "tasks_meta.json"
|
|
386
|
+
if not meta_path.exists():
|
|
387
|
+
return {"status": "no_meta", "synced": False}
|
|
388
|
+
|
|
389
|
+
try:
|
|
390
|
+
with open(meta_path, encoding="utf-8") as f:
|
|
391
|
+
meta = json.load(f)
|
|
392
|
+
|
|
393
|
+
# Note: To properly detect if file changed, we'd need to recalculate hash
|
|
394
|
+
# using the same logic as export (sorted json dumps). For now, we rely on
|
|
395
|
+
# the meta file to tell us when the file was last exported.
|
|
396
|
+
|
|
397
|
+
# For checking if DB is ahead of Export, we'd need to dry-run export.
|
|
398
|
+
# For checking if File is ahead of DB (Import needed), we check if file changed since last import?
|
|
399
|
+
# Or simplified: "synced" if last export timestamp > last DB update?
|
|
400
|
+
# That requires tracking last import time.
|
|
401
|
+
|
|
402
|
+
return {
|
|
403
|
+
"status": "available",
|
|
404
|
+
"last_exported": meta.get("last_exported"),
|
|
405
|
+
"hash": meta.get("content_hash"),
|
|
406
|
+
"synced": True, # Placeholder
|
|
407
|
+
}
|
|
408
|
+
except Exception:
|
|
409
|
+
return {"status": "error", "synced": False}
|
|
410
|
+
|
|
411
|
+
def trigger_export(self, project_id: str | None = None) -> None:
|
|
412
|
+
"""
|
|
413
|
+
Trigger a debounced export.
|
|
414
|
+
|
|
415
|
+
Uses async debounce pattern to avoid blocking the event loop during export.
|
|
416
|
+
When running outside an event loop (e.g., CLI usage), runs synchronously.
|
|
417
|
+
|
|
418
|
+
Args:
|
|
419
|
+
project_id: Optional project to export
|
|
420
|
+
"""
|
|
421
|
+
self._last_change_time = time.time()
|
|
422
|
+
|
|
423
|
+
if self._export_task is None or self._export_task.done():
|
|
424
|
+
try:
|
|
425
|
+
loop = asyncio.get_running_loop()
|
|
426
|
+
# Capture project_id at task creation to avoid race condition
|
|
427
|
+
self._export_task = loop.create_task(self._process_export_queue(project_id))
|
|
428
|
+
except RuntimeError:
|
|
429
|
+
# No running event loop (e.g. CLI usage) - run sync immediately
|
|
430
|
+
# Skip debounce and export directly
|
|
431
|
+
try:
|
|
432
|
+
self.export_to_jsonl(project_id)
|
|
433
|
+
except Exception as e:
|
|
434
|
+
logger.warning(f"Failed to sync task export: {e}")
|
|
435
|
+
|
|
436
|
+
async def _process_export_queue(self, project_id: str | None = None) -> None:
|
|
437
|
+
"""
|
|
438
|
+
Process export task with debounce.
|
|
439
|
+
|
|
440
|
+
Waits for debounce interval, then runs export in executor to avoid
|
|
441
|
+
blocking the event loop during file I/O and hash computation.
|
|
442
|
+
|
|
443
|
+
During graceful shutdown, flushes any pending export immediately rather
|
|
444
|
+
than abandoning it.
|
|
445
|
+
|
|
446
|
+
Args:
|
|
447
|
+
project_id: Project ID captured at task creation time to avoid race conditions.
|
|
448
|
+
"""
|
|
449
|
+
while True:
|
|
450
|
+
# Check if debounce time has passed
|
|
451
|
+
now = time.time()
|
|
452
|
+
elapsed = now - self._last_change_time
|
|
453
|
+
|
|
454
|
+
# Export if debounce time passed OR shutdown requested (flush pending)
|
|
455
|
+
if elapsed >= self._debounce_interval or self._shutdown_requested:
|
|
456
|
+
try:
|
|
457
|
+
# Run the blocking export in a thread pool to avoid blocking event loop
|
|
458
|
+
loop = asyncio.get_running_loop()
|
|
459
|
+
await loop.run_in_executor(None, self.export_to_jsonl, project_id)
|
|
460
|
+
except Exception as e:
|
|
461
|
+
logger.error(f"Error during task sync export: {e}")
|
|
462
|
+
return
|
|
463
|
+
|
|
464
|
+
# Wait for remaining debounce time
|
|
465
|
+
wait_time = max(0.1, self._debounce_interval - elapsed)
|
|
466
|
+
await asyncio.sleep(wait_time)
|
|
467
|
+
|
|
468
|
+
async def import_from_github_issues(
|
|
469
|
+
self, repo_url: str, project_id: str | None = None, limit: int = 50
|
|
470
|
+
) -> dict[str, Any]:
|
|
471
|
+
"""
|
|
472
|
+
Import open issues from a GitHub repository as tasks.
|
|
473
|
+
Uses GitHub CLI (gh) for reliable API access.
|
|
474
|
+
|
|
475
|
+
Args:
|
|
476
|
+
repo_url: URL of the GitHub repository (e.g., https://github.com/owner/repo)
|
|
477
|
+
project_id: Optional project ID (auto-detected from context if not provided)
|
|
478
|
+
limit: Max issues to import
|
|
479
|
+
|
|
480
|
+
Returns:
|
|
481
|
+
Result with imported issue IDs
|
|
482
|
+
"""
|
|
483
|
+
import re
|
|
484
|
+
import subprocess # nosec B404 - subprocess needed for gh CLI
|
|
485
|
+
|
|
486
|
+
try:
|
|
487
|
+
# Parse repo from URL
|
|
488
|
+
match = re.match(r"https?://github\.com/([^/]+)/([^/]+)/?", repo_url)
|
|
489
|
+
if not match:
|
|
490
|
+
return {
|
|
491
|
+
"success": False,
|
|
492
|
+
"error": "Invalid GitHub URL. Expected: https://github.com/owner/repo",
|
|
493
|
+
}
|
|
494
|
+
|
|
495
|
+
owner, repo = match.groups()
|
|
496
|
+
repo = repo.rstrip(".git") # Handle .git suffix
|
|
497
|
+
|
|
498
|
+
# Check if gh CLI is available
|
|
499
|
+
try:
|
|
500
|
+
subprocess.run(["gh", "--version"], capture_output=True, check=True) # nosec B603 B607
|
|
501
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
502
|
+
return {
|
|
503
|
+
"success": False,
|
|
504
|
+
"error": "GitHub CLI (gh) not found. Install from https://cli.github.com/",
|
|
505
|
+
}
|
|
506
|
+
|
|
507
|
+
# Fetch issues using gh CLI
|
|
508
|
+
cmd = [
|
|
509
|
+
"gh",
|
|
510
|
+
"issue",
|
|
511
|
+
"list",
|
|
512
|
+
"--repo",
|
|
513
|
+
f"{owner}/{repo}",
|
|
514
|
+
"--state",
|
|
515
|
+
"open",
|
|
516
|
+
"--limit",
|
|
517
|
+
str(limit),
|
|
518
|
+
"--json",
|
|
519
|
+
"number,title,body,labels,createdAt",
|
|
520
|
+
]
|
|
521
|
+
|
|
522
|
+
result = subprocess.run(cmd, capture_output=True, text=True) # nosec B603 - hardcoded gh arguments
|
|
523
|
+
if result.returncode != 0:
|
|
524
|
+
return {
|
|
525
|
+
"success": False,
|
|
526
|
+
"error": f"gh command failed: {result.stderr}",
|
|
527
|
+
}
|
|
528
|
+
|
|
529
|
+
issues = json.loads(result.stdout)
|
|
530
|
+
|
|
531
|
+
if not issues:
|
|
532
|
+
return {
|
|
533
|
+
"success": True,
|
|
534
|
+
"message": "No open issues found",
|
|
535
|
+
"imported": [],
|
|
536
|
+
"count": 0,
|
|
537
|
+
}
|
|
538
|
+
|
|
539
|
+
# Resolve project ID if not provided
|
|
540
|
+
if not project_id:
|
|
541
|
+
# Try to find project by github_url
|
|
542
|
+
row = self.db.fetchone("SELECT id FROM projects WHERE github_url = ?", (repo_url,))
|
|
543
|
+
if row:
|
|
544
|
+
project_id = row["id"]
|
|
545
|
+
|
|
546
|
+
if not project_id:
|
|
547
|
+
# Try current project context
|
|
548
|
+
from gobby.utils.project_context import get_project_context
|
|
549
|
+
|
|
550
|
+
ctx = get_project_context()
|
|
551
|
+
if ctx and ctx.get("id"):
|
|
552
|
+
project_id = ctx["id"]
|
|
553
|
+
|
|
554
|
+
if not project_id:
|
|
555
|
+
return {
|
|
556
|
+
"success": False,
|
|
557
|
+
"error": "Could not determine project ID. Run from within a gobby project.",
|
|
558
|
+
}
|
|
559
|
+
|
|
560
|
+
imported = []
|
|
561
|
+
imported_count = 0
|
|
562
|
+
|
|
563
|
+
with self.db.transaction() as conn:
|
|
564
|
+
for issue in issues:
|
|
565
|
+
issue_num = issue.get("number")
|
|
566
|
+
if not issue_num:
|
|
567
|
+
continue
|
|
568
|
+
|
|
569
|
+
task_id = f"gh-{issue_num}"
|
|
570
|
+
title = issue.get("title", "Untitled Issue")
|
|
571
|
+
body = issue.get("body") or ""
|
|
572
|
+
# Add link to original issue
|
|
573
|
+
desc = f"{body}\n\nSource: {repo_url}/issues/{issue_num}".strip()
|
|
574
|
+
|
|
575
|
+
# Extract label names
|
|
576
|
+
labels = [lbl.get("name") for lbl in issue.get("labels", []) if lbl.get("name")]
|
|
577
|
+
labels_json = json.dumps(labels) if labels else None
|
|
578
|
+
|
|
579
|
+
created_at = issue.get("createdAt", datetime.now(UTC).isoformat())
|
|
580
|
+
updated_at = datetime.now(UTC).isoformat()
|
|
581
|
+
|
|
582
|
+
# Check if exists
|
|
583
|
+
exists = self.db.fetchone("SELECT 1 FROM tasks WHERE id = ?", (task_id,))
|
|
584
|
+
if exists:
|
|
585
|
+
# Update existing
|
|
586
|
+
conn.execute(
|
|
587
|
+
"UPDATE tasks SET title=?, description=?, labels=?, updated_at=? WHERE id=?",
|
|
588
|
+
(title, desc, labels_json, updated_at, task_id),
|
|
589
|
+
)
|
|
590
|
+
else:
|
|
591
|
+
# Insert new
|
|
592
|
+
conn.execute(
|
|
593
|
+
"""
|
|
594
|
+
INSERT INTO tasks (
|
|
595
|
+
id, project_id, title, description, status, task_type,
|
|
596
|
+
labels, created_at, updated_at
|
|
597
|
+
) VALUES (?, ?, ?, ?, 'open', 'issue', ?, ?, ?)
|
|
598
|
+
""",
|
|
599
|
+
(task_id, project_id, title, desc, labels_json, created_at, updated_at),
|
|
600
|
+
)
|
|
601
|
+
imported_count += 1
|
|
602
|
+
|
|
603
|
+
imported.append(task_id)
|
|
604
|
+
|
|
605
|
+
return {
|
|
606
|
+
"success": True,
|
|
607
|
+
"imported": imported,
|
|
608
|
+
"count": imported_count,
|
|
609
|
+
"message": f"Imported {imported_count} new issues, updated {len(imported) - imported_count} existing.",
|
|
610
|
+
}
|
|
611
|
+
|
|
612
|
+
except json.JSONDecodeError as e:
|
|
613
|
+
logger.error(f"Failed to parse gh output: {e}")
|
|
614
|
+
return {"success": False, "error": f"Failed to parse GitHub response: {e}"}
|
|
615
|
+
except Exception as e:
|
|
616
|
+
logger.error(f"Failed to import from GitHub: {e}")
|
|
617
|
+
return {"success": False, "error": str(e)}
|
|
618
|
+
|
|
619
|
+
def stop(self) -> None:
|
|
620
|
+
"""Stop any pending export tasks."""
|
|
621
|
+
self._shutdown_requested = True
|
|
622
|
+
if self._export_task and not self._export_task.done():
|
|
623
|
+
self._export_task.cancel()
|
|
624
|
+
|
|
625
|
+
async def shutdown(self) -> None:
|
|
626
|
+
"""Gracefully shutdown the export task.
|
|
627
|
+
|
|
628
|
+
Sets the shutdown flag first so the exporter loop can observe it and
|
|
629
|
+
exit early, then waits for any pending export to complete.
|
|
630
|
+
"""
|
|
631
|
+
# Set flag BEFORE awaiting so _process_export_queue can see it
|
|
632
|
+
self._shutdown_requested = True
|
|
633
|
+
|
|
634
|
+
if self._export_task:
|
|
635
|
+
if not self._export_task.done():
|
|
636
|
+
try:
|
|
637
|
+
# Wait for export to complete naturally
|
|
638
|
+
await self._export_task
|
|
639
|
+
except asyncio.CancelledError:
|
|
640
|
+
pass
|
|
641
|
+
self._export_task = None
|