gobby 0.2.9__py3-none-any.whl → 0.2.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- gobby/__init__.py +1 -1
- gobby/adapters/__init__.py +6 -0
- gobby/adapters/base.py +11 -2
- gobby/adapters/claude_code.py +2 -2
- gobby/adapters/codex_impl/adapter.py +38 -43
- gobby/adapters/copilot.py +324 -0
- gobby/adapters/cursor.py +373 -0
- gobby/adapters/gemini.py +2 -26
- gobby/adapters/windsurf.py +359 -0
- gobby/agents/definitions.py +162 -2
- gobby/agents/isolation.py +33 -1
- gobby/agents/pty_reader.py +192 -0
- gobby/agents/registry.py +10 -1
- gobby/agents/runner.py +24 -8
- gobby/agents/sandbox.py +8 -3
- gobby/agents/session.py +4 -0
- gobby/agents/spawn.py +9 -2
- gobby/agents/spawn_executor.py +49 -61
- gobby/agents/spawners/command_builder.py +4 -4
- gobby/app_context.py +5 -0
- gobby/cli/__init__.py +4 -0
- gobby/cli/install.py +259 -4
- gobby/cli/installers/__init__.py +12 -0
- gobby/cli/installers/copilot.py +242 -0
- gobby/cli/installers/cursor.py +244 -0
- gobby/cli/installers/shared.py +3 -0
- gobby/cli/installers/windsurf.py +242 -0
- gobby/cli/pipelines.py +639 -0
- gobby/cli/sessions.py +3 -1
- gobby/cli/skills.py +209 -0
- gobby/cli/tasks/crud.py +6 -5
- gobby/cli/tasks/search.py +1 -1
- gobby/cli/ui.py +116 -0
- gobby/cli/workflows.py +38 -17
- gobby/config/app.py +5 -0
- gobby/config/skills.py +23 -2
- gobby/hooks/broadcaster.py +9 -0
- gobby/hooks/event_handlers/_base.py +6 -1
- gobby/hooks/event_handlers/_session.py +44 -130
- gobby/hooks/events.py +48 -0
- gobby/hooks/hook_manager.py +25 -3
- gobby/install/copilot/hooks/hook_dispatcher.py +203 -0
- gobby/install/cursor/hooks/hook_dispatcher.py +203 -0
- gobby/install/gemini/hooks/hook_dispatcher.py +8 -0
- gobby/install/windsurf/hooks/hook_dispatcher.py +205 -0
- gobby/llm/__init__.py +14 -1
- gobby/llm/claude.py +217 -1
- gobby/llm/service.py +149 -0
- gobby/mcp_proxy/instructions.py +9 -27
- gobby/mcp_proxy/models.py +1 -0
- gobby/mcp_proxy/registries.py +56 -9
- gobby/mcp_proxy/server.py +6 -2
- gobby/mcp_proxy/services/tool_filter.py +7 -0
- gobby/mcp_proxy/services/tool_proxy.py +19 -1
- gobby/mcp_proxy/stdio.py +37 -21
- gobby/mcp_proxy/tools/agents.py +7 -0
- gobby/mcp_proxy/tools/hub.py +30 -1
- gobby/mcp_proxy/tools/orchestration/cleanup.py +5 -5
- gobby/mcp_proxy/tools/orchestration/monitor.py +1 -1
- gobby/mcp_proxy/tools/orchestration/orchestrate.py +8 -3
- gobby/mcp_proxy/tools/orchestration/review.py +17 -4
- gobby/mcp_proxy/tools/orchestration/wait.py +7 -7
- gobby/mcp_proxy/tools/pipelines/__init__.py +254 -0
- gobby/mcp_proxy/tools/pipelines/_discovery.py +67 -0
- gobby/mcp_proxy/tools/pipelines/_execution.py +281 -0
- gobby/mcp_proxy/tools/sessions/_crud.py +4 -4
- gobby/mcp_proxy/tools/sessions/_handoff.py +1 -1
- gobby/mcp_proxy/tools/skills/__init__.py +184 -30
- gobby/mcp_proxy/tools/spawn_agent.py +229 -14
- gobby/mcp_proxy/tools/tasks/_context.py +8 -0
- gobby/mcp_proxy/tools/tasks/_crud.py +27 -1
- gobby/mcp_proxy/tools/tasks/_helpers.py +1 -1
- gobby/mcp_proxy/tools/tasks/_lifecycle.py +125 -8
- gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +2 -1
- gobby/mcp_proxy/tools/tasks/_search.py +1 -1
- gobby/mcp_proxy/tools/workflows/__init__.py +9 -2
- gobby/mcp_proxy/tools/workflows/_lifecycle.py +12 -1
- gobby/mcp_proxy/tools/workflows/_query.py +45 -26
- gobby/mcp_proxy/tools/workflows/_terminal.py +39 -3
- gobby/mcp_proxy/tools/worktrees.py +54 -15
- gobby/memory/context.py +5 -5
- gobby/runner.py +108 -6
- gobby/servers/http.py +7 -1
- gobby/servers/routes/__init__.py +2 -0
- gobby/servers/routes/admin.py +44 -0
- gobby/servers/routes/mcp/endpoints/execution.py +18 -25
- gobby/servers/routes/mcp/hooks.py +10 -1
- gobby/servers/routes/pipelines.py +227 -0
- gobby/servers/websocket.py +314 -1
- gobby/sessions/analyzer.py +87 -1
- gobby/sessions/manager.py +5 -5
- gobby/sessions/transcripts/__init__.py +3 -0
- gobby/sessions/transcripts/claude.py +5 -0
- gobby/sessions/transcripts/codex.py +5 -0
- gobby/sessions/transcripts/gemini.py +5 -0
- gobby/skills/hubs/__init__.py +25 -0
- gobby/skills/hubs/base.py +234 -0
- gobby/skills/hubs/claude_plugins.py +328 -0
- gobby/skills/hubs/clawdhub.py +289 -0
- gobby/skills/hubs/github_collection.py +465 -0
- gobby/skills/hubs/manager.py +263 -0
- gobby/skills/hubs/skillhub.py +342 -0
- gobby/storage/memories.py +4 -4
- gobby/storage/migrations.py +95 -3
- gobby/storage/pipelines.py +367 -0
- gobby/storage/sessions.py +23 -4
- gobby/storage/skills.py +1 -1
- gobby/storage/tasks/_aggregates.py +2 -2
- gobby/storage/tasks/_lifecycle.py +4 -4
- gobby/storage/tasks/_models.py +7 -1
- gobby/storage/tasks/_queries.py +3 -3
- gobby/sync/memories.py +4 -3
- gobby/tasks/commits.py +48 -17
- gobby/workflows/actions.py +75 -0
- gobby/workflows/context_actions.py +246 -5
- gobby/workflows/definitions.py +119 -1
- gobby/workflows/detection_helpers.py +23 -11
- gobby/workflows/enforcement/task_policy.py +18 -0
- gobby/workflows/engine.py +20 -1
- gobby/workflows/evaluator.py +8 -5
- gobby/workflows/lifecycle_evaluator.py +57 -26
- gobby/workflows/loader.py +567 -30
- gobby/workflows/lobster_compat.py +147 -0
- gobby/workflows/pipeline_executor.py +801 -0
- gobby/workflows/pipeline_state.py +172 -0
- gobby/workflows/pipeline_webhooks.py +206 -0
- gobby/workflows/premature_stop.py +5 -0
- gobby/worktrees/git.py +135 -20
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/METADATA +56 -22
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/RECORD +134 -106
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/WHEEL +0 -0
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/entry_points.txt +0 -0
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/licenses/LICENSE.md +0 -0
- {gobby-0.2.9.dist-info → gobby-0.2.11.dist-info}/top_level.txt +0 -0
gobby/storage/migrations.py
CHANGED
|
@@ -43,11 +43,12 @@ class MigrationUnsupportedError(Exception):
|
|
|
43
43
|
# Migration can be SQL string or a callable that takes LocalDatabase
|
|
44
44
|
MigrationAction = str | Callable[[LocalDatabase], None]
|
|
45
45
|
|
|
46
|
-
# Baseline version - the schema state at
|
|
46
|
+
# Baseline version - the schema state at v81 (flattened)
|
|
47
47
|
# This is applied for new databases directly
|
|
48
|
-
BASELINE_VERSION
|
|
48
|
+
# Note: Migrations >= BASELINE_VERSION still run for existing databases
|
|
49
|
+
BASELINE_VERSION = 81
|
|
49
50
|
|
|
50
|
-
# Baseline schema - flattened from
|
|
51
|
+
# Baseline schema - flattened from v81 production state, includes all migrations
|
|
51
52
|
# This is applied for new databases directly
|
|
52
53
|
# Generated by: sqlite3 ~/.gobby/gobby-hub.db .schema
|
|
53
54
|
BASELINE_SCHEMA = """
|
|
@@ -220,6 +221,7 @@ CREATE TABLE sessions (
|
|
|
220
221
|
agent_depth INTEGER DEFAULT 0,
|
|
221
222
|
spawned_by_agent_id TEXT,
|
|
222
223
|
workflow_name TEXT,
|
|
224
|
+
step_variables TEXT,
|
|
223
225
|
agent_run_id TEXT REFERENCES agent_runs(id) ON DELETE SET NULL,
|
|
224
226
|
context_injected INTEGER DEFAULT 0,
|
|
225
227
|
original_prompt TEXT,
|
|
@@ -620,6 +622,42 @@ CREATE INDEX idx_clones_status ON clones(status);
|
|
|
620
622
|
CREATE INDEX idx_clones_task ON clones(task_id);
|
|
621
623
|
CREATE INDEX idx_clones_session ON clones(agent_session_id);
|
|
622
624
|
CREATE UNIQUE INDEX idx_clones_path ON clones(clone_path);
|
|
625
|
+
|
|
626
|
+
CREATE TABLE pipeline_executions (
|
|
627
|
+
id TEXT PRIMARY KEY,
|
|
628
|
+
pipeline_name TEXT NOT NULL,
|
|
629
|
+
project_id TEXT NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
|
630
|
+
status TEXT NOT NULL DEFAULT 'pending',
|
|
631
|
+
inputs_json TEXT,
|
|
632
|
+
outputs_json TEXT,
|
|
633
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
634
|
+
updated_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
635
|
+
completed_at TEXT,
|
|
636
|
+
resume_token TEXT UNIQUE,
|
|
637
|
+
session_id TEXT REFERENCES sessions(id) ON DELETE SET NULL,
|
|
638
|
+
parent_execution_id TEXT REFERENCES pipeline_executions(id) ON DELETE CASCADE
|
|
639
|
+
);
|
|
640
|
+
CREATE INDEX idx_pipeline_executions_project ON pipeline_executions(project_id);
|
|
641
|
+
CREATE INDEX idx_pipeline_executions_status ON pipeline_executions(status);
|
|
642
|
+
CREATE INDEX idx_pipeline_executions_resume_token ON pipeline_executions(resume_token);
|
|
643
|
+
|
|
644
|
+
CREATE TABLE step_executions (
|
|
645
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
646
|
+
execution_id TEXT NOT NULL REFERENCES pipeline_executions(id) ON DELETE CASCADE,
|
|
647
|
+
step_id TEXT NOT NULL,
|
|
648
|
+
status TEXT NOT NULL DEFAULT 'pending',
|
|
649
|
+
started_at TEXT,
|
|
650
|
+
completed_at TEXT,
|
|
651
|
+
input_json TEXT,
|
|
652
|
+
output_json TEXT,
|
|
653
|
+
error TEXT,
|
|
654
|
+
approval_token TEXT UNIQUE,
|
|
655
|
+
approved_by TEXT,
|
|
656
|
+
approved_at TEXT,
|
|
657
|
+
UNIQUE(execution_id, step_id)
|
|
658
|
+
);
|
|
659
|
+
CREATE INDEX idx_step_executions_execution ON step_executions(execution_id);
|
|
660
|
+
CREATE INDEX idx_step_executions_approval_token ON step_executions(approval_token);
|
|
623
661
|
"""
|
|
624
662
|
|
|
625
663
|
# Future migrations (v61+)
|
|
@@ -739,6 +777,60 @@ MIGRATIONS: list[tuple[int, str, MigrationAction]] = [
|
|
|
739
777
|
(78, "Add hub tracking fields to skills", _migrate_add_hub_tracking_to_skills),
|
|
740
778
|
# Skill injection: Add always_apply and injection_format columns
|
|
741
779
|
(79, "Add skill injection columns", _migrate_add_skill_injection_columns),
|
|
780
|
+
# Pipeline system: Add pipeline_executions and step_executions tables
|
|
781
|
+
(
|
|
782
|
+
80,
|
|
783
|
+
"Add pipeline execution tables",
|
|
784
|
+
"""
|
|
785
|
+
CREATE TABLE IF NOT EXISTS pipeline_executions (
|
|
786
|
+
id TEXT PRIMARY KEY,
|
|
787
|
+
pipeline_name TEXT NOT NULL,
|
|
788
|
+
project_id TEXT NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
|
789
|
+
status TEXT NOT NULL DEFAULT 'pending',
|
|
790
|
+
inputs_json TEXT,
|
|
791
|
+
outputs_json TEXT,
|
|
792
|
+
created_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
793
|
+
updated_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
794
|
+
completed_at TEXT,
|
|
795
|
+
resume_token TEXT UNIQUE,
|
|
796
|
+
session_id TEXT REFERENCES sessions(id) ON DELETE SET NULL,
|
|
797
|
+
parent_execution_id TEXT REFERENCES pipeline_executions(id) ON DELETE CASCADE
|
|
798
|
+
);
|
|
799
|
+
CREATE INDEX IF NOT EXISTS idx_pipeline_executions_project ON pipeline_executions(project_id);
|
|
800
|
+
CREATE INDEX IF NOT EXISTS idx_pipeline_executions_status ON pipeline_executions(status);
|
|
801
|
+
CREATE INDEX IF NOT EXISTS idx_pipeline_executions_resume_token ON pipeline_executions(resume_token);
|
|
802
|
+
|
|
803
|
+
CREATE TABLE IF NOT EXISTS step_executions (
|
|
804
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
805
|
+
execution_id TEXT NOT NULL REFERENCES pipeline_executions(id) ON DELETE CASCADE,
|
|
806
|
+
step_id TEXT NOT NULL,
|
|
807
|
+
status TEXT NOT NULL DEFAULT 'pending',
|
|
808
|
+
started_at TEXT,
|
|
809
|
+
completed_at TEXT,
|
|
810
|
+
input_json TEXT,
|
|
811
|
+
output_json TEXT,
|
|
812
|
+
error TEXT,
|
|
813
|
+
approval_token TEXT UNIQUE,
|
|
814
|
+
approved_by TEXT,
|
|
815
|
+
approved_at TEXT,
|
|
816
|
+
UNIQUE(execution_id, step_id)
|
|
817
|
+
);
|
|
818
|
+
CREATE INDEX IF NOT EXISTS idx_step_executions_execution ON step_executions(execution_id);
|
|
819
|
+
CREATE INDEX IF NOT EXISTS idx_step_executions_approval_token ON step_executions(approval_token);
|
|
820
|
+
""",
|
|
821
|
+
),
|
|
822
|
+
# Add step_variables JSON column to sessions for spawn-time variable passing
|
|
823
|
+
(
|
|
824
|
+
81,
|
|
825
|
+
"Add step_variables to sessions",
|
|
826
|
+
"ALTER TABLE sessions ADD COLUMN step_variables TEXT",
|
|
827
|
+
),
|
|
828
|
+
# Rename task status 'review' to 'needs_review' for clarity
|
|
829
|
+
(
|
|
830
|
+
82,
|
|
831
|
+
"Rename task status 'review' to 'needs_review'",
|
|
832
|
+
"UPDATE tasks SET status = 'needs_review' WHERE status = 'review'",
|
|
833
|
+
),
|
|
742
834
|
]
|
|
743
835
|
|
|
744
836
|
|
|
@@ -0,0 +1,367 @@
|
|
|
1
|
+
"""Local pipeline execution storage manager."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import logging
|
|
6
|
+
from datetime import UTC, datetime
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
from gobby.storage.database import DatabaseProtocol
|
|
10
|
+
from gobby.utils.id import generate_prefixed_id
|
|
11
|
+
from gobby.workflows.pipeline_state import (
|
|
12
|
+
ExecutionStatus,
|
|
13
|
+
PipelineExecution,
|
|
14
|
+
StepExecution,
|
|
15
|
+
StepStatus,
|
|
16
|
+
)
|
|
17
|
+
|
|
18
|
+
logger = logging.getLogger(__name__)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class LocalPipelineExecutionManager:
|
|
22
|
+
"""Manager for local pipeline execution storage."""
|
|
23
|
+
|
|
24
|
+
def __init__(self, db: DatabaseProtocol, project_id: str):
|
|
25
|
+
"""Initialize with database connection and project context.
|
|
26
|
+
|
|
27
|
+
Args:
|
|
28
|
+
db: Database connection
|
|
29
|
+
project_id: Project ID for scoping executions
|
|
30
|
+
"""
|
|
31
|
+
self.db = db
|
|
32
|
+
self.project_id = project_id
|
|
33
|
+
|
|
34
|
+
def create_execution(
|
|
35
|
+
self,
|
|
36
|
+
pipeline_name: str,
|
|
37
|
+
inputs_json: str | None = None,
|
|
38
|
+
session_id: str | None = None,
|
|
39
|
+
parent_execution_id: str | None = None,
|
|
40
|
+
) -> PipelineExecution:
|
|
41
|
+
"""Create a new pipeline execution.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
pipeline_name: Name of the pipeline being executed
|
|
45
|
+
inputs_json: JSON string of input parameters
|
|
46
|
+
session_id: Session that triggered the execution
|
|
47
|
+
parent_execution_id: Parent execution for nested pipelines
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
Created PipelineExecution instance
|
|
51
|
+
"""
|
|
52
|
+
execution_id = generate_prefixed_id("pe")
|
|
53
|
+
now = datetime.now(UTC).isoformat()
|
|
54
|
+
|
|
55
|
+
with self.db.transaction():
|
|
56
|
+
self.db.execute(
|
|
57
|
+
"""
|
|
58
|
+
INSERT INTO pipeline_executions (
|
|
59
|
+
id, pipeline_name, project_id, status, inputs_json,
|
|
60
|
+
session_id, parent_execution_id, created_at, updated_at
|
|
61
|
+
)
|
|
62
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
63
|
+
""",
|
|
64
|
+
(
|
|
65
|
+
execution_id,
|
|
66
|
+
pipeline_name,
|
|
67
|
+
self.project_id,
|
|
68
|
+
ExecutionStatus.PENDING.value,
|
|
69
|
+
inputs_json,
|
|
70
|
+
session_id,
|
|
71
|
+
parent_execution_id,
|
|
72
|
+
now,
|
|
73
|
+
now,
|
|
74
|
+
),
|
|
75
|
+
)
|
|
76
|
+
|
|
77
|
+
execution = self.get_execution(execution_id)
|
|
78
|
+
if execution is None:
|
|
79
|
+
raise RuntimeError(f"Execution {execution_id} not found after creation")
|
|
80
|
+
return execution
|
|
81
|
+
|
|
82
|
+
def get_execution(self, execution_id: str) -> PipelineExecution | None:
|
|
83
|
+
"""Get execution by ID.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
execution_id: Execution UUID
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
PipelineExecution or None if not found
|
|
90
|
+
"""
|
|
91
|
+
row = self.db.fetchone(
|
|
92
|
+
"SELECT * FROM pipeline_executions WHERE id = ?",
|
|
93
|
+
(execution_id,),
|
|
94
|
+
)
|
|
95
|
+
return PipelineExecution.from_row(row) if row else None
|
|
96
|
+
|
|
97
|
+
def update_execution_status(
|
|
98
|
+
self,
|
|
99
|
+
execution_id: str,
|
|
100
|
+
status: ExecutionStatus,
|
|
101
|
+
resume_token: str | None = None,
|
|
102
|
+
outputs_json: str | None = None,
|
|
103
|
+
) -> PipelineExecution | None:
|
|
104
|
+
"""Update execution status.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
execution_id: Execution UUID
|
|
108
|
+
status: New status
|
|
109
|
+
resume_token: Resume token for approval gates
|
|
110
|
+
outputs_json: JSON string of outputs (for completed status)
|
|
111
|
+
|
|
112
|
+
Returns:
|
|
113
|
+
Updated PipelineExecution or None if not found
|
|
114
|
+
"""
|
|
115
|
+
now = datetime.now(UTC).isoformat()
|
|
116
|
+
completed_at = now if status == ExecutionStatus.COMPLETED else None
|
|
117
|
+
|
|
118
|
+
self.db.execute(
|
|
119
|
+
"""
|
|
120
|
+
UPDATE pipeline_executions
|
|
121
|
+
SET status = ?, resume_token = ?, outputs_json = ?,
|
|
122
|
+
completed_at = COALESCE(?, completed_at), updated_at = ?
|
|
123
|
+
WHERE id = ?
|
|
124
|
+
""",
|
|
125
|
+
(
|
|
126
|
+
status.value,
|
|
127
|
+
resume_token,
|
|
128
|
+
outputs_json,
|
|
129
|
+
completed_at,
|
|
130
|
+
now,
|
|
131
|
+
execution_id,
|
|
132
|
+
),
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
return self.get_execution(execution_id)
|
|
136
|
+
|
|
137
|
+
def list_executions(
|
|
138
|
+
self,
|
|
139
|
+
status: ExecutionStatus | None = None,
|
|
140
|
+
pipeline_name: str | None = None,
|
|
141
|
+
limit: int = 50,
|
|
142
|
+
) -> list[PipelineExecution]:
|
|
143
|
+
"""List executions for the project.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
status: Filter by status
|
|
147
|
+
pipeline_name: Filter by pipeline name
|
|
148
|
+
limit: Maximum number of results
|
|
149
|
+
|
|
150
|
+
Returns:
|
|
151
|
+
List of PipelineExecution instances
|
|
152
|
+
"""
|
|
153
|
+
query = "SELECT * FROM pipeline_executions WHERE project_id = ?"
|
|
154
|
+
params: list[Any] = [self.project_id]
|
|
155
|
+
|
|
156
|
+
if status is not None:
|
|
157
|
+
query += " AND status = ?"
|
|
158
|
+
params.append(status.value)
|
|
159
|
+
|
|
160
|
+
if pipeline_name is not None:
|
|
161
|
+
query += " AND pipeline_name = ?"
|
|
162
|
+
params.append(pipeline_name)
|
|
163
|
+
|
|
164
|
+
query += " ORDER BY created_at DESC LIMIT ?"
|
|
165
|
+
params.append(limit)
|
|
166
|
+
|
|
167
|
+
rows = self.db.fetchall(query, tuple(params))
|
|
168
|
+
return [PipelineExecution.from_row(row) for row in rows]
|
|
169
|
+
|
|
170
|
+
def get_execution_by_resume_token(self, token: str) -> PipelineExecution | None:
|
|
171
|
+
"""Get execution by resume token.
|
|
172
|
+
|
|
173
|
+
Args:
|
|
174
|
+
token: Resume token
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
PipelineExecution or None if not found
|
|
178
|
+
"""
|
|
179
|
+
row = self.db.fetchone(
|
|
180
|
+
"SELECT * FROM pipeline_executions WHERE resume_token = ?",
|
|
181
|
+
(token,),
|
|
182
|
+
)
|
|
183
|
+
return PipelineExecution.from_row(row) if row else None
|
|
184
|
+
|
|
185
|
+
def resolve_execution_reference(self, ref: str) -> str:
|
|
186
|
+
"""Resolve an execution reference to a UUID.
|
|
187
|
+
|
|
188
|
+
Supports:
|
|
189
|
+
- Full UUID: pe-abc123... or UUID format
|
|
190
|
+
- UUID prefix: pe-abc1 (matches by prefix)
|
|
191
|
+
|
|
192
|
+
Args:
|
|
193
|
+
ref: Execution reference
|
|
194
|
+
|
|
195
|
+
Returns:
|
|
196
|
+
Execution UUID
|
|
197
|
+
|
|
198
|
+
Raises:
|
|
199
|
+
ValueError: If reference cannot be resolved
|
|
200
|
+
"""
|
|
201
|
+
# Try exact match first
|
|
202
|
+
execution = self.get_execution(ref)
|
|
203
|
+
if execution:
|
|
204
|
+
return execution.id
|
|
205
|
+
|
|
206
|
+
# Try prefix match
|
|
207
|
+
row = self.db.fetchone(
|
|
208
|
+
"SELECT id FROM pipeline_executions WHERE id LIKE ? AND project_id = ?",
|
|
209
|
+
(f"{ref}%", self.project_id),
|
|
210
|
+
)
|
|
211
|
+
if row:
|
|
212
|
+
result: str = row["id"]
|
|
213
|
+
return result
|
|
214
|
+
|
|
215
|
+
raise ValueError(f"Cannot resolve execution reference: {ref}")
|
|
216
|
+
|
|
217
|
+
# Step execution methods
|
|
218
|
+
|
|
219
|
+
def create_step_execution(
|
|
220
|
+
self,
|
|
221
|
+
execution_id: str,
|
|
222
|
+
step_id: str,
|
|
223
|
+
input_json: str | None = None,
|
|
224
|
+
) -> StepExecution:
|
|
225
|
+
"""Create a new step execution.
|
|
226
|
+
|
|
227
|
+
Args:
|
|
228
|
+
execution_id: Parent pipeline execution ID
|
|
229
|
+
step_id: Step ID from pipeline definition
|
|
230
|
+
input_json: JSON string of step input
|
|
231
|
+
|
|
232
|
+
Returns:
|
|
233
|
+
Created StepExecution instance
|
|
234
|
+
"""
|
|
235
|
+
self.db.execute(
|
|
236
|
+
"""
|
|
237
|
+
INSERT INTO step_executions (
|
|
238
|
+
execution_id, step_id, status, input_json
|
|
239
|
+
)
|
|
240
|
+
VALUES (?, ?, ?, ?)
|
|
241
|
+
""",
|
|
242
|
+
(
|
|
243
|
+
execution_id,
|
|
244
|
+
step_id,
|
|
245
|
+
StepStatus.PENDING.value,
|
|
246
|
+
input_json,
|
|
247
|
+
),
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
# Get the created step by execution_id and step_id (unique combination)
|
|
251
|
+
row = self.db.fetchone(
|
|
252
|
+
"SELECT * FROM step_executions WHERE execution_id = ? AND step_id = ?",
|
|
253
|
+
(execution_id, step_id),
|
|
254
|
+
)
|
|
255
|
+
if row is None:
|
|
256
|
+
raise RuntimeError(f"Step {step_id} not found after creation")
|
|
257
|
+
return StepExecution.from_row(row)
|
|
258
|
+
|
|
259
|
+
def update_step_execution(
|
|
260
|
+
self,
|
|
261
|
+
step_execution_id: int,
|
|
262
|
+
status: StepStatus | None = None,
|
|
263
|
+
output_json: str | None = None,
|
|
264
|
+
error: str | None = None,
|
|
265
|
+
approval_token: str | None = None,
|
|
266
|
+
approved_by: str | None = None,
|
|
267
|
+
) -> StepExecution | None:
|
|
268
|
+
"""Update a step execution.
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
step_execution_id: Step execution ID (integer)
|
|
272
|
+
status: New status
|
|
273
|
+
output_json: JSON string of step output
|
|
274
|
+
error: Error message (for failed status)
|
|
275
|
+
approval_token: Token for approval gate
|
|
276
|
+
approved_by: Who approved the step
|
|
277
|
+
|
|
278
|
+
Returns:
|
|
279
|
+
Updated StepExecution or None if not found
|
|
280
|
+
"""
|
|
281
|
+
now = datetime.now(UTC).isoformat()
|
|
282
|
+
|
|
283
|
+
# Build update parts dynamically (step_executions has no updated_at column)
|
|
284
|
+
updates: list[str] = []
|
|
285
|
+
params: list[Any] = []
|
|
286
|
+
|
|
287
|
+
if status is not None:
|
|
288
|
+
updates.append("status = ?")
|
|
289
|
+
params.append(status.value)
|
|
290
|
+
# Set timestamps based on status
|
|
291
|
+
if status == StepStatus.RUNNING:
|
|
292
|
+
updates.append("started_at = COALESCE(started_at, ?)")
|
|
293
|
+
params.append(now)
|
|
294
|
+
elif status in (StepStatus.COMPLETED, StepStatus.FAILED, StepStatus.SKIPPED):
|
|
295
|
+
updates.append("completed_at = COALESCE(completed_at, ?)")
|
|
296
|
+
params.append(now)
|
|
297
|
+
|
|
298
|
+
if output_json is not None:
|
|
299
|
+
updates.append("output_json = ?")
|
|
300
|
+
params.append(output_json)
|
|
301
|
+
|
|
302
|
+
if error is not None:
|
|
303
|
+
updates.append("error = ?")
|
|
304
|
+
params.append(error)
|
|
305
|
+
|
|
306
|
+
if approval_token is not None:
|
|
307
|
+
updates.append("approval_token = ?")
|
|
308
|
+
params.append(approval_token)
|
|
309
|
+
|
|
310
|
+
if approved_by is not None:
|
|
311
|
+
updates.append("approved_by = ?")
|
|
312
|
+
params.append(approved_by)
|
|
313
|
+
updates.append("approved_at = ?")
|
|
314
|
+
params.append(now)
|
|
315
|
+
|
|
316
|
+
if not updates:
|
|
317
|
+
# Nothing to update
|
|
318
|
+
row = self.db.fetchone(
|
|
319
|
+
"SELECT * FROM step_executions WHERE id = ?",
|
|
320
|
+
(step_execution_id,),
|
|
321
|
+
)
|
|
322
|
+
return StepExecution.from_row(row) if row else None
|
|
323
|
+
|
|
324
|
+
# Append step_execution_id for WHERE clause
|
|
325
|
+
params.append(step_execution_id)
|
|
326
|
+
|
|
327
|
+
# updates list contains only hardcoded column names, values are parameterized
|
|
328
|
+
self.db.execute(
|
|
329
|
+
f"UPDATE step_executions SET {', '.join(updates)} WHERE id = ?", # nosec B608
|
|
330
|
+
tuple(params),
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
row = self.db.fetchone(
|
|
334
|
+
"SELECT * FROM step_executions WHERE id = ?",
|
|
335
|
+
(step_execution_id,),
|
|
336
|
+
)
|
|
337
|
+
return StepExecution.from_row(row) if row else None
|
|
338
|
+
|
|
339
|
+
def get_step_by_approval_token(self, token: str) -> StepExecution | None:
|
|
340
|
+
"""Get step execution by approval token.
|
|
341
|
+
|
|
342
|
+
Args:
|
|
343
|
+
token: Approval token
|
|
344
|
+
|
|
345
|
+
Returns:
|
|
346
|
+
StepExecution or None if not found
|
|
347
|
+
"""
|
|
348
|
+
row = self.db.fetchone(
|
|
349
|
+
"SELECT * FROM step_executions WHERE approval_token = ?",
|
|
350
|
+
(token,),
|
|
351
|
+
)
|
|
352
|
+
return StepExecution.from_row(row) if row else None
|
|
353
|
+
|
|
354
|
+
def get_steps_for_execution(self, execution_id: str) -> list[StepExecution]:
|
|
355
|
+
"""Get all steps for an execution.
|
|
356
|
+
|
|
357
|
+
Args:
|
|
358
|
+
execution_id: Pipeline execution ID
|
|
359
|
+
|
|
360
|
+
Returns:
|
|
361
|
+
List of StepExecution instances
|
|
362
|
+
"""
|
|
363
|
+
rows = self.db.fetchall(
|
|
364
|
+
"SELECT * FROM step_executions WHERE execution_id = ? ORDER BY id",
|
|
365
|
+
(execution_id,),
|
|
366
|
+
)
|
|
367
|
+
return [StepExecution.from_row(row) for row in rows]
|
gobby/storage/sessions.py
CHANGED
|
@@ -38,6 +38,7 @@ class Session:
|
|
|
38
38
|
spawned_by_agent_id: str | None = None # ID of agent that spawned this session
|
|
39
39
|
# Terminal pickup metadata fields
|
|
40
40
|
workflow_name: str | None = None # Workflow to activate on terminal pickup
|
|
41
|
+
step_variables: dict[str, Any] | None = None # Variables for workflow activation
|
|
41
42
|
agent_run_id: str | None = None # Link back to agent run record
|
|
42
43
|
context_injected: bool = False # Whether context was injected into prompt
|
|
43
44
|
original_prompt: str | None = None # Original prompt for terminal mode
|
|
@@ -77,6 +78,7 @@ class Session:
|
|
|
77
78
|
agent_depth=row["agent_depth"] or 0,
|
|
78
79
|
spawned_by_agent_id=row["spawned_by_agent_id"],
|
|
79
80
|
workflow_name=row["workflow_name"],
|
|
81
|
+
step_variables=cls._parse_json_field(row, "step_variables"),
|
|
80
82
|
agent_run_id=row["agent_run_id"],
|
|
81
83
|
context_injected=bool(row["context_injected"]),
|
|
82
84
|
original_prompt=row["original_prompt"],
|
|
@@ -110,6 +112,21 @@ class Session:
|
|
|
110
112
|
logger.warning("Failed to parse terminal_context JSON, returning None")
|
|
111
113
|
return None
|
|
112
114
|
|
|
115
|
+
@classmethod
|
|
116
|
+
def _parse_json_field(cls, row: Any, field_name: str) -> dict[str, Any] | None:
|
|
117
|
+
"""Parse a JSON field from a database row, returning None on missing/malformed data."""
|
|
118
|
+
if field_name not in row.keys():
|
|
119
|
+
return None
|
|
120
|
+
raw = row[field_name]
|
|
121
|
+
if not raw:
|
|
122
|
+
return None
|
|
123
|
+
try:
|
|
124
|
+
result: dict[str, Any] = json.loads(raw)
|
|
125
|
+
return result
|
|
126
|
+
except json.JSONDecodeError:
|
|
127
|
+
logger.warning(f"Failed to parse {field_name} JSON, returning None")
|
|
128
|
+
return None
|
|
129
|
+
|
|
113
130
|
def to_dict(self) -> dict[str, Any]:
|
|
114
131
|
"""Convert to dictionary."""
|
|
115
132
|
return {
|
|
@@ -167,6 +184,7 @@ class LocalSessionManager:
|
|
|
167
184
|
spawned_by_agent_id: str | None = None,
|
|
168
185
|
terminal_context: dict[str, Any] | None = None,
|
|
169
186
|
workflow_name: str | None = None,
|
|
187
|
+
step_variables: dict[str, Any] | None = None,
|
|
170
188
|
) -> Session:
|
|
171
189
|
"""
|
|
172
190
|
Register a new session or return existing one.
|
|
@@ -176,9 +194,9 @@ class LocalSessionManager:
|
|
|
176
194
|
returns the existing session. Otherwise creates a new one.
|
|
177
195
|
|
|
178
196
|
Args:
|
|
179
|
-
external_id: External session identifier (e.g., Claude Code
|
|
197
|
+
external_id: External session identifier (e.g., Claude Code session ID)
|
|
180
198
|
machine_id: Machine identifier
|
|
181
|
-
source: CLI source (
|
|
199
|
+
source: CLI source (claude, gemini, codex, cursor, windsurf, copilot)
|
|
182
200
|
project_id: Project ID (required - sessions must belong to a project)
|
|
183
201
|
title: Optional session title
|
|
184
202
|
jsonl_path: Path to transcript file
|
|
@@ -242,9 +260,9 @@ class LocalSessionManager:
|
|
|
242
260
|
id, external_id, machine_id, source, project_id, title,
|
|
243
261
|
jsonl_path, git_branch, parent_session_id,
|
|
244
262
|
agent_depth, spawned_by_agent_id, terminal_context,
|
|
245
|
-
workflow_name, status, created_at, updated_at, seq_num, had_edits
|
|
263
|
+
workflow_name, step_variables, status, created_at, updated_at, seq_num, had_edits
|
|
246
264
|
)
|
|
247
|
-
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'active', ?, ?, ?, 0)
|
|
265
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'active', ?, ?, ?, 0)
|
|
248
266
|
""",
|
|
249
267
|
(
|
|
250
268
|
session_id,
|
|
@@ -260,6 +278,7 @@ class LocalSessionManager:
|
|
|
260
278
|
spawned_by_agent_id,
|
|
261
279
|
json.dumps(terminal_context) if terminal_context else None,
|
|
262
280
|
workflow_name,
|
|
281
|
+
json.dumps(step_variables) if step_variables else None,
|
|
263
282
|
now,
|
|
264
283
|
now,
|
|
265
284
|
next_seq_num,
|
gobby/storage/skills.py
CHANGED
|
@@ -23,7 +23,7 @@ logger = logging.getLogger(__name__)
|
|
|
23
23
|
_UNSET: Any = object()
|
|
24
24
|
|
|
25
25
|
# Valid source types for skills
|
|
26
|
-
SkillSourceType = Literal["local", "github", "url", "zip", "filesystem"]
|
|
26
|
+
SkillSourceType = Literal["local", "github", "url", "zip", "filesystem", "hub"]
|
|
27
27
|
|
|
28
28
|
|
|
29
29
|
@dataclass
|
|
@@ -97,7 +97,7 @@ def count_ready_tasks(
|
|
|
97
97
|
-- Blocker is unresolved if not closed AND not in review without requiring user review
|
|
98
98
|
AND NOT (
|
|
99
99
|
blocker.status = 'closed'
|
|
100
|
-
OR (blocker.status = '
|
|
100
|
+
OR (blocker.status = 'needs_review' AND blocker.requires_user_review = 0)
|
|
101
101
|
)
|
|
102
102
|
-- Exclude ancestor blocked by any descendant (completion block, not work block)
|
|
103
103
|
-- Check if t.id appears anywhere in blocker's ancestor chain
|
|
@@ -153,7 +153,7 @@ def count_blocked_tasks(
|
|
|
153
153
|
-- Blocker is unresolved if not closed AND not in review without requiring user review
|
|
154
154
|
AND NOT (
|
|
155
155
|
blocker.status = 'closed'
|
|
156
|
-
OR (blocker.status = '
|
|
156
|
+
OR (blocker.status = 'needs_review' AND blocker.requires_user_review = 0)
|
|
157
157
|
)
|
|
158
158
|
-- Exclude ancestor blocked by any descendant (completion block, not work block)
|
|
159
159
|
-- Check if t.id appears anywhere in blocker's ancestor chain
|
|
@@ -57,9 +57,9 @@ def close_task(
|
|
|
57
57
|
f"Cannot close task {task_id}: has {len(open_children)} open child task(s): {child_list}"
|
|
58
58
|
)
|
|
59
59
|
|
|
60
|
-
# Check if task is being closed from
|
|
60
|
+
# Check if task is being closed from needs_review state (user acceptance)
|
|
61
61
|
current_task = get_task(db, task_id)
|
|
62
|
-
accepted_by_user = current_task.status == "
|
|
62
|
+
accepted_by_user = current_task.status == "needs_review" if current_task else False
|
|
63
63
|
|
|
64
64
|
now = datetime.now(UTC).isoformat()
|
|
65
65
|
with db.transaction() as conn:
|
|
@@ -117,8 +117,8 @@ def reopen_task(
|
|
|
117
117
|
ValueError: If task not found or not closed/review
|
|
118
118
|
"""
|
|
119
119
|
task = get_task(db, task_id)
|
|
120
|
-
if task.status not in ("closed", "
|
|
121
|
-
raise ValueError(f"Task {task_id} is not closed or in
|
|
120
|
+
if task.status not in ("closed", "needs_review"):
|
|
121
|
+
raise ValueError(f"Task {task_id} is not closed or in needs_review (status: {task.status})")
|
|
122
122
|
|
|
123
123
|
now = datetime.now(UTC).isoformat()
|
|
124
124
|
|
gobby/storage/tasks/_models.py
CHANGED
|
@@ -82,7 +82,13 @@ class Task:
|
|
|
82
82
|
project_id: str
|
|
83
83
|
title: str
|
|
84
84
|
status: Literal[
|
|
85
|
-
"open",
|
|
85
|
+
"open",
|
|
86
|
+
"in_progress",
|
|
87
|
+
"needs_review",
|
|
88
|
+
"closed",
|
|
89
|
+
"failed",
|
|
90
|
+
"escalated",
|
|
91
|
+
"needs_decomposition",
|
|
86
92
|
]
|
|
87
93
|
priority: int
|
|
88
94
|
task_type: str # bug, feature, task, epic, chore
|
gobby/storage/tasks/_queries.py
CHANGED
|
@@ -156,7 +156,7 @@ def list_ready_tasks(
|
|
|
156
156
|
-- Blocker is unresolved if not closed AND not in review without requiring user review
|
|
157
157
|
AND NOT (
|
|
158
158
|
blocker.status = 'closed'
|
|
159
|
-
OR (blocker.status = '
|
|
159
|
+
OR (blocker.status = 'needs_review' AND blocker.requires_user_review = 0)
|
|
160
160
|
)
|
|
161
161
|
-- Exclude ancestor blocked by any descendant (completion block, not work block)
|
|
162
162
|
AND NOT EXISTS (
|
|
@@ -186,7 +186,7 @@ def list_ready_tasks(
|
|
|
186
186
|
-- Blocker is unresolved if not closed AND not in review without requiring user review
|
|
187
187
|
AND NOT (
|
|
188
188
|
blocker.status = 'closed'
|
|
189
|
-
OR (blocker.status = '
|
|
189
|
+
OR (blocker.status = 'needs_review' AND blocker.requires_user_review = 0)
|
|
190
190
|
)
|
|
191
191
|
-- Exclude ancestor blocked by any descendant (completion block, not work block)
|
|
192
192
|
AND NOT EXISTS (
|
|
@@ -266,7 +266,7 @@ def list_blocked_tasks(
|
|
|
266
266
|
-- Blocker is unresolved if not closed AND not in review without requiring user review
|
|
267
267
|
AND NOT (
|
|
268
268
|
blocker.status = 'closed'
|
|
269
|
-
OR (blocker.status = '
|
|
269
|
+
OR (blocker.status = 'needs_review' AND blocker.requires_user_review = 0)
|
|
270
270
|
)
|
|
271
271
|
-- Exclude ancestor blocked by any descendant (completion block, not work block)
|
|
272
272
|
AND NOT EXISTS (
|
gobby/sync/memories.py
CHANGED
|
@@ -131,8 +131,8 @@ class MemoryBackupManager:
|
|
|
131
131
|
from gobby.utils.project_context import get_project_context
|
|
132
132
|
|
|
133
133
|
project_ctx = get_project_context()
|
|
134
|
-
if project_ctx and project_ctx.get("
|
|
135
|
-
project_path = Path(project_ctx["
|
|
134
|
+
if project_ctx and project_ctx.get("project_path"):
|
|
135
|
+
project_path = Path(project_ctx["project_path"]).expanduser().resolve()
|
|
136
136
|
return project_path / self.export_path
|
|
137
137
|
except Exception:
|
|
138
138
|
pass # nosec B110 - fall back to cwd if project context unavailable
|
|
@@ -289,7 +289,8 @@ class MemoryBackupManager:
|
|
|
289
289
|
return 0
|
|
290
290
|
|
|
291
291
|
try:
|
|
292
|
-
memories
|
|
292
|
+
# Use high limit to export all memories for backup (default is 50)
|
|
293
|
+
memories = self.memory_manager.list_memories(limit=10000)
|
|
293
294
|
|
|
294
295
|
# Deduplicate by content before export
|
|
295
296
|
unique_memories = self._deduplicate_memories(memories)
|