gobby 0.2.8__py3-none-any.whl → 0.2.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (168) hide show
  1. gobby/__init__.py +1 -1
  2. gobby/adapters/__init__.py +6 -0
  3. gobby/adapters/base.py +11 -2
  4. gobby/adapters/claude_code.py +5 -28
  5. gobby/adapters/codex_impl/adapter.py +38 -43
  6. gobby/adapters/copilot.py +324 -0
  7. gobby/adapters/cursor.py +373 -0
  8. gobby/adapters/gemini.py +2 -26
  9. gobby/adapters/windsurf.py +359 -0
  10. gobby/agents/definitions.py +162 -2
  11. gobby/agents/isolation.py +33 -1
  12. gobby/agents/pty_reader.py +192 -0
  13. gobby/agents/registry.py +10 -1
  14. gobby/agents/runner.py +24 -8
  15. gobby/agents/sandbox.py +8 -3
  16. gobby/agents/session.py +4 -0
  17. gobby/agents/spawn.py +9 -2
  18. gobby/agents/spawn_executor.py +49 -61
  19. gobby/agents/spawners/command_builder.py +4 -4
  20. gobby/app_context.py +64 -0
  21. gobby/cli/__init__.py +4 -0
  22. gobby/cli/install.py +259 -4
  23. gobby/cli/installers/__init__.py +12 -0
  24. gobby/cli/installers/copilot.py +242 -0
  25. gobby/cli/installers/cursor.py +244 -0
  26. gobby/cli/installers/shared.py +3 -0
  27. gobby/cli/installers/windsurf.py +242 -0
  28. gobby/cli/pipelines.py +639 -0
  29. gobby/cli/sessions.py +3 -1
  30. gobby/cli/skills.py +209 -0
  31. gobby/cli/tasks/crud.py +6 -5
  32. gobby/cli/tasks/search.py +1 -1
  33. gobby/cli/ui.py +116 -0
  34. gobby/cli/utils.py +5 -17
  35. gobby/cli/workflows.py +38 -17
  36. gobby/config/app.py +5 -0
  37. gobby/config/features.py +0 -20
  38. gobby/config/skills.py +23 -2
  39. gobby/config/tasks.py +4 -0
  40. gobby/hooks/broadcaster.py +9 -0
  41. gobby/hooks/event_handlers/__init__.py +155 -0
  42. gobby/hooks/event_handlers/_agent.py +175 -0
  43. gobby/hooks/event_handlers/_base.py +92 -0
  44. gobby/hooks/event_handlers/_misc.py +66 -0
  45. gobby/hooks/event_handlers/_session.py +487 -0
  46. gobby/hooks/event_handlers/_tool.py +196 -0
  47. gobby/hooks/events.py +48 -0
  48. gobby/hooks/hook_manager.py +27 -3
  49. gobby/install/copilot/hooks/hook_dispatcher.py +203 -0
  50. gobby/install/cursor/hooks/hook_dispatcher.py +203 -0
  51. gobby/install/gemini/hooks/hook_dispatcher.py +8 -0
  52. gobby/install/windsurf/hooks/hook_dispatcher.py +205 -0
  53. gobby/llm/__init__.py +14 -1
  54. gobby/llm/claude.py +594 -43
  55. gobby/llm/service.py +149 -0
  56. gobby/mcp_proxy/importer.py +4 -41
  57. gobby/mcp_proxy/instructions.py +9 -27
  58. gobby/mcp_proxy/manager.py +13 -3
  59. gobby/mcp_proxy/models.py +1 -0
  60. gobby/mcp_proxy/registries.py +66 -5
  61. gobby/mcp_proxy/server.py +6 -2
  62. gobby/mcp_proxy/services/recommendation.py +2 -28
  63. gobby/mcp_proxy/services/tool_filter.py +7 -0
  64. gobby/mcp_proxy/services/tool_proxy.py +19 -1
  65. gobby/mcp_proxy/stdio.py +37 -21
  66. gobby/mcp_proxy/tools/agents.py +7 -0
  67. gobby/mcp_proxy/tools/artifacts.py +3 -3
  68. gobby/mcp_proxy/tools/hub.py +30 -1
  69. gobby/mcp_proxy/tools/orchestration/cleanup.py +5 -5
  70. gobby/mcp_proxy/tools/orchestration/monitor.py +1 -1
  71. gobby/mcp_proxy/tools/orchestration/orchestrate.py +8 -3
  72. gobby/mcp_proxy/tools/orchestration/review.py +17 -4
  73. gobby/mcp_proxy/tools/orchestration/wait.py +7 -7
  74. gobby/mcp_proxy/tools/pipelines/__init__.py +254 -0
  75. gobby/mcp_proxy/tools/pipelines/_discovery.py +67 -0
  76. gobby/mcp_proxy/tools/pipelines/_execution.py +281 -0
  77. gobby/mcp_proxy/tools/sessions/_crud.py +4 -4
  78. gobby/mcp_proxy/tools/sessions/_handoff.py +1 -1
  79. gobby/mcp_proxy/tools/skills/__init__.py +184 -30
  80. gobby/mcp_proxy/tools/spawn_agent.py +229 -14
  81. gobby/mcp_proxy/tools/task_readiness.py +27 -4
  82. gobby/mcp_proxy/tools/tasks/_context.py +8 -0
  83. gobby/mcp_proxy/tools/tasks/_crud.py +27 -1
  84. gobby/mcp_proxy/tools/tasks/_helpers.py +1 -1
  85. gobby/mcp_proxy/tools/tasks/_lifecycle.py +125 -8
  86. gobby/mcp_proxy/tools/tasks/_lifecycle_validation.py +2 -1
  87. gobby/mcp_proxy/tools/tasks/_search.py +1 -1
  88. gobby/mcp_proxy/tools/workflows/__init__.py +273 -0
  89. gobby/mcp_proxy/tools/workflows/_artifacts.py +225 -0
  90. gobby/mcp_proxy/tools/workflows/_import.py +112 -0
  91. gobby/mcp_proxy/tools/workflows/_lifecycle.py +332 -0
  92. gobby/mcp_proxy/tools/workflows/_query.py +226 -0
  93. gobby/mcp_proxy/tools/workflows/_resolution.py +78 -0
  94. gobby/mcp_proxy/tools/workflows/_terminal.py +175 -0
  95. gobby/mcp_proxy/tools/worktrees.py +54 -15
  96. gobby/memory/components/__init__.py +0 -0
  97. gobby/memory/components/ingestion.py +98 -0
  98. gobby/memory/components/search.py +108 -0
  99. gobby/memory/context.py +5 -5
  100. gobby/memory/manager.py +16 -25
  101. gobby/paths.py +51 -0
  102. gobby/prompts/loader.py +1 -35
  103. gobby/runner.py +131 -16
  104. gobby/servers/http.py +193 -150
  105. gobby/servers/routes/__init__.py +2 -0
  106. gobby/servers/routes/admin.py +56 -0
  107. gobby/servers/routes/mcp/endpoints/execution.py +33 -32
  108. gobby/servers/routes/mcp/endpoints/registry.py +8 -8
  109. gobby/servers/routes/mcp/hooks.py +10 -1
  110. gobby/servers/routes/pipelines.py +227 -0
  111. gobby/servers/websocket.py +314 -1
  112. gobby/sessions/analyzer.py +89 -3
  113. gobby/sessions/manager.py +5 -5
  114. gobby/sessions/transcripts/__init__.py +3 -0
  115. gobby/sessions/transcripts/claude.py +5 -0
  116. gobby/sessions/transcripts/codex.py +5 -0
  117. gobby/sessions/transcripts/gemini.py +5 -0
  118. gobby/skills/hubs/__init__.py +25 -0
  119. gobby/skills/hubs/base.py +234 -0
  120. gobby/skills/hubs/claude_plugins.py +328 -0
  121. gobby/skills/hubs/clawdhub.py +289 -0
  122. gobby/skills/hubs/github_collection.py +465 -0
  123. gobby/skills/hubs/manager.py +263 -0
  124. gobby/skills/hubs/skillhub.py +342 -0
  125. gobby/skills/parser.py +23 -0
  126. gobby/skills/sync.py +5 -4
  127. gobby/storage/artifacts.py +19 -0
  128. gobby/storage/memories.py +4 -4
  129. gobby/storage/migrations.py +118 -3
  130. gobby/storage/pipelines.py +367 -0
  131. gobby/storage/sessions.py +23 -4
  132. gobby/storage/skills.py +48 -8
  133. gobby/storage/tasks/_aggregates.py +2 -2
  134. gobby/storage/tasks/_lifecycle.py +4 -4
  135. gobby/storage/tasks/_models.py +7 -1
  136. gobby/storage/tasks/_queries.py +3 -3
  137. gobby/sync/memories.py +4 -3
  138. gobby/tasks/commits.py +48 -17
  139. gobby/tasks/external_validator.py +4 -17
  140. gobby/tasks/validation.py +13 -87
  141. gobby/tools/summarizer.py +18 -51
  142. gobby/utils/status.py +13 -0
  143. gobby/workflows/actions.py +80 -0
  144. gobby/workflows/context_actions.py +265 -27
  145. gobby/workflows/definitions.py +119 -1
  146. gobby/workflows/detection_helpers.py +23 -11
  147. gobby/workflows/enforcement/__init__.py +11 -1
  148. gobby/workflows/enforcement/blocking.py +96 -0
  149. gobby/workflows/enforcement/handlers.py +35 -1
  150. gobby/workflows/enforcement/task_policy.py +18 -0
  151. gobby/workflows/engine.py +26 -4
  152. gobby/workflows/evaluator.py +8 -5
  153. gobby/workflows/lifecycle_evaluator.py +59 -27
  154. gobby/workflows/loader.py +567 -30
  155. gobby/workflows/lobster_compat.py +147 -0
  156. gobby/workflows/pipeline_executor.py +801 -0
  157. gobby/workflows/pipeline_state.py +172 -0
  158. gobby/workflows/pipeline_webhooks.py +206 -0
  159. gobby/workflows/premature_stop.py +5 -0
  160. gobby/worktrees/git.py +135 -20
  161. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/METADATA +56 -22
  162. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/RECORD +166 -122
  163. gobby/hooks/event_handlers.py +0 -1008
  164. gobby/mcp_proxy/tools/workflows.py +0 -1023
  165. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/WHEEL +0 -0
  166. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/entry_points.txt +0 -0
  167. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/licenses/LICENSE.md +0 -0
  168. {gobby-0.2.8.dist-info → gobby-0.2.11.dist-info}/top_level.txt +0 -0
@@ -283,3 +283,22 @@ class LocalArtifactManager:
283
283
 
284
284
  rows = self.db.fetchall(sql, tuple(params))
285
285
  return [Artifact.from_row(row) for row in rows]
286
+
287
+ def count_artifacts(self, session_id: str | None = None) -> int:
288
+ """Count total artifacts, optionally filtered by session.
289
+
290
+ Args:
291
+ session_id: Optional session ID to filter by
292
+
293
+ Returns:
294
+ Total artifact count
295
+ """
296
+ if session_id:
297
+ row = self.db.fetchone(
298
+ "SELECT COUNT(*) FROM session_artifacts WHERE session_id = ?",
299
+ (session_id,),
300
+ )
301
+ else:
302
+ row = self.db.fetchone("SELECT COUNT(*) FROM session_artifacts")
303
+
304
+ return row[0] if row else 0
gobby/storage/memories.py CHANGED
@@ -145,11 +145,11 @@ class LocalMemoryManager:
145
145
 
146
146
  now = datetime.now(UTC).isoformat()
147
147
  # Normalize content for consistent ID generation (avoid duplicates from
148
- # whitespace differences or project_id inconsistency)
148
+ # whitespace differences)
149
149
  normalized_content = content.strip()
150
- project_str = project_id if project_id else ""
151
- # Use delimiter to prevent collisions (e.g., "abc" + "def" vs "abcd" + "ef")
152
- memory_id = generate_prefixed_id("mm", f"{normalized_content}||{project_str}")
150
+ # Global dedup: ID based on content only (project_id stored but not in ID)
151
+ # This aligns with content_exists() which checks globally
152
+ memory_id = generate_prefixed_id("mm", normalized_content)
153
153
 
154
154
  # Check if memory already exists to avoid duplicate insert errors
155
155
  existing_row = self.db.fetchone("SELECT * FROM memories WHERE id = ?", (memory_id,))
@@ -43,11 +43,12 @@ class MigrationUnsupportedError(Exception):
43
43
  # Migration can be SQL string or a callable that takes LocalDatabase
44
44
  MigrationAction = str | Callable[[LocalDatabase], None]
45
45
 
46
- # Baseline version - the schema state at v78 (flattened)
46
+ # Baseline version - the schema state at v81 (flattened)
47
47
  # This is applied for new databases directly
48
- BASELINE_VERSION = 78
48
+ # Note: Migrations >= BASELINE_VERSION still run for existing databases
49
+ BASELINE_VERSION = 81
49
50
 
50
- # Baseline schema - flattened from v78 production state, includes hub tracking fields
51
+ # Baseline schema - flattened from v81 production state, includes all migrations
51
52
  # This is applied for new databases directly
52
53
  # Generated by: sqlite3 ~/.gobby/gobby-hub.db .schema
53
54
  BASELINE_SCHEMA = """
@@ -220,6 +221,7 @@ CREATE TABLE sessions (
220
221
  agent_depth INTEGER DEFAULT 0,
221
222
  spawned_by_agent_id TEXT,
222
223
  workflow_name TEXT,
224
+ step_variables TEXT,
223
225
  agent_run_id TEXT REFERENCES agent_runs(id) ON DELETE SET NULL,
224
226
  context_injected INTEGER DEFAULT 0,
225
227
  original_prompt TEXT,
@@ -587,6 +589,8 @@ CREATE TABLE skills (
587
589
  hub_slug TEXT,
588
590
  hub_version TEXT,
589
591
  enabled INTEGER DEFAULT 1,
592
+ always_apply INTEGER DEFAULT 0,
593
+ injection_format TEXT DEFAULT 'summary',
590
594
  project_id TEXT REFERENCES projects(id) ON DELETE CASCADE,
591
595
  created_at TEXT NOT NULL,
592
596
  updated_at TEXT NOT NULL
@@ -594,6 +598,7 @@ CREATE TABLE skills (
594
598
  CREATE INDEX idx_skills_name ON skills(name);
595
599
  CREATE INDEX idx_skills_project_id ON skills(project_id);
596
600
  CREATE INDEX idx_skills_enabled ON skills(enabled);
601
+ CREATE INDEX idx_skills_always_apply ON skills(always_apply);
597
602
  CREATE UNIQUE INDEX idx_skills_name_project ON skills(name, project_id);
598
603
  CREATE UNIQUE INDEX idx_skills_name_global ON skills(name) WHERE project_id IS NULL;
599
604
 
@@ -617,6 +622,42 @@ CREATE INDEX idx_clones_status ON clones(status);
617
622
  CREATE INDEX idx_clones_task ON clones(task_id);
618
623
  CREATE INDEX idx_clones_session ON clones(agent_session_id);
619
624
  CREATE UNIQUE INDEX idx_clones_path ON clones(clone_path);
625
+
626
+ CREATE TABLE pipeline_executions (
627
+ id TEXT PRIMARY KEY,
628
+ pipeline_name TEXT NOT NULL,
629
+ project_id TEXT NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
630
+ status TEXT NOT NULL DEFAULT 'pending',
631
+ inputs_json TEXT,
632
+ outputs_json TEXT,
633
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
634
+ updated_at TEXT NOT NULL DEFAULT (datetime('now')),
635
+ completed_at TEXT,
636
+ resume_token TEXT UNIQUE,
637
+ session_id TEXT REFERENCES sessions(id) ON DELETE SET NULL,
638
+ parent_execution_id TEXT REFERENCES pipeline_executions(id) ON DELETE CASCADE
639
+ );
640
+ CREATE INDEX idx_pipeline_executions_project ON pipeline_executions(project_id);
641
+ CREATE INDEX idx_pipeline_executions_status ON pipeline_executions(status);
642
+ CREATE INDEX idx_pipeline_executions_resume_token ON pipeline_executions(resume_token);
643
+
644
+ CREATE TABLE step_executions (
645
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
646
+ execution_id TEXT NOT NULL REFERENCES pipeline_executions(id) ON DELETE CASCADE,
647
+ step_id TEXT NOT NULL,
648
+ status TEXT NOT NULL DEFAULT 'pending',
649
+ started_at TEXT,
650
+ completed_at TEXT,
651
+ input_json TEXT,
652
+ output_json TEXT,
653
+ error TEXT,
654
+ approval_token TEXT UNIQUE,
655
+ approved_by TEXT,
656
+ approved_at TEXT,
657
+ UNIQUE(execution_id, step_id)
658
+ );
659
+ CREATE INDEX idx_step_executions_execution ON step_executions(execution_id);
660
+ CREATE INDEX idx_step_executions_approval_token ON step_executions(approval_token);
620
661
  """
621
662
 
622
663
  # Future migrations (v61+)
@@ -709,6 +750,24 @@ def _migrate_add_hub_tracking_to_skills(db: LocalDatabase) -> None:
709
750
  logger.info("Added hub tracking fields to skills table")
710
751
 
711
752
 
753
+ def _migrate_add_skill_injection_columns(db: LocalDatabase) -> None:
754
+ """Add always_apply and injection_format columns to skills table.
755
+
756
+ These columns enable per-skill control over:
757
+ - always_apply: Whether skill should always be injected at session start
758
+ - injection_format: How to inject the skill (summary, full, content)
759
+
760
+ The values are extracted from SKILL.md frontmatter during sync and stored
761
+ as columns for efficient querying.
762
+ """
763
+ with db.transaction() as conn:
764
+ conn.execute("ALTER TABLE skills ADD COLUMN always_apply INTEGER DEFAULT 0")
765
+ conn.execute("ALTER TABLE skills ADD COLUMN injection_format TEXT DEFAULT 'summary'")
766
+ conn.execute("CREATE INDEX idx_skills_always_apply ON skills(always_apply)")
767
+
768
+ logger.info("Added always_apply and injection_format columns to skills table")
769
+
770
+
712
771
  MIGRATIONS: list[tuple[int, str, MigrationAction]] = [
713
772
  # Project-scoped session refs: Change seq_num index from global to project-scoped
714
773
  (76, "Make sessions.seq_num project-scoped", _migrate_session_seq_num_project_scoped),
@@ -716,6 +775,62 @@ MIGRATIONS: list[tuple[int, str, MigrationAction]] = [
716
775
  (77, "Backfill sessions.seq_num per project", _migrate_backfill_session_seq_num_per_project),
717
776
  # Hub tracking: Add hub_name, hub_slug, hub_version to skills table
718
777
  (78, "Add hub tracking fields to skills", _migrate_add_hub_tracking_to_skills),
778
+ # Skill injection: Add always_apply and injection_format columns
779
+ (79, "Add skill injection columns", _migrate_add_skill_injection_columns),
780
+ # Pipeline system: Add pipeline_executions and step_executions tables
781
+ (
782
+ 80,
783
+ "Add pipeline execution tables",
784
+ """
785
+ CREATE TABLE IF NOT EXISTS pipeline_executions (
786
+ id TEXT PRIMARY KEY,
787
+ pipeline_name TEXT NOT NULL,
788
+ project_id TEXT NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
789
+ status TEXT NOT NULL DEFAULT 'pending',
790
+ inputs_json TEXT,
791
+ outputs_json TEXT,
792
+ created_at TEXT NOT NULL DEFAULT (datetime('now')),
793
+ updated_at TEXT NOT NULL DEFAULT (datetime('now')),
794
+ completed_at TEXT,
795
+ resume_token TEXT UNIQUE,
796
+ session_id TEXT REFERENCES sessions(id) ON DELETE SET NULL,
797
+ parent_execution_id TEXT REFERENCES pipeline_executions(id) ON DELETE CASCADE
798
+ );
799
+ CREATE INDEX IF NOT EXISTS idx_pipeline_executions_project ON pipeline_executions(project_id);
800
+ CREATE INDEX IF NOT EXISTS idx_pipeline_executions_status ON pipeline_executions(status);
801
+ CREATE INDEX IF NOT EXISTS idx_pipeline_executions_resume_token ON pipeline_executions(resume_token);
802
+
803
+ CREATE TABLE IF NOT EXISTS step_executions (
804
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
805
+ execution_id TEXT NOT NULL REFERENCES pipeline_executions(id) ON DELETE CASCADE,
806
+ step_id TEXT NOT NULL,
807
+ status TEXT NOT NULL DEFAULT 'pending',
808
+ started_at TEXT,
809
+ completed_at TEXT,
810
+ input_json TEXT,
811
+ output_json TEXT,
812
+ error TEXT,
813
+ approval_token TEXT UNIQUE,
814
+ approved_by TEXT,
815
+ approved_at TEXT,
816
+ UNIQUE(execution_id, step_id)
817
+ );
818
+ CREATE INDEX IF NOT EXISTS idx_step_executions_execution ON step_executions(execution_id);
819
+ CREATE INDEX IF NOT EXISTS idx_step_executions_approval_token ON step_executions(approval_token);
820
+ """,
821
+ ),
822
+ # Add step_variables JSON column to sessions for spawn-time variable passing
823
+ (
824
+ 81,
825
+ "Add step_variables to sessions",
826
+ "ALTER TABLE sessions ADD COLUMN step_variables TEXT",
827
+ ),
828
+ # Rename task status 'review' to 'needs_review' for clarity
829
+ (
830
+ 82,
831
+ "Rename task status 'review' to 'needs_review'",
832
+ "UPDATE tasks SET status = 'needs_review' WHERE status = 'review'",
833
+ ),
719
834
  ]
720
835
 
721
836
 
@@ -0,0 +1,367 @@
1
+ """Local pipeline execution storage manager."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import logging
6
+ from datetime import UTC, datetime
7
+ from typing import Any
8
+
9
+ from gobby.storage.database import DatabaseProtocol
10
+ from gobby.utils.id import generate_prefixed_id
11
+ from gobby.workflows.pipeline_state import (
12
+ ExecutionStatus,
13
+ PipelineExecution,
14
+ StepExecution,
15
+ StepStatus,
16
+ )
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class LocalPipelineExecutionManager:
22
+ """Manager for local pipeline execution storage."""
23
+
24
+ def __init__(self, db: DatabaseProtocol, project_id: str):
25
+ """Initialize with database connection and project context.
26
+
27
+ Args:
28
+ db: Database connection
29
+ project_id: Project ID for scoping executions
30
+ """
31
+ self.db = db
32
+ self.project_id = project_id
33
+
34
+ def create_execution(
35
+ self,
36
+ pipeline_name: str,
37
+ inputs_json: str | None = None,
38
+ session_id: str | None = None,
39
+ parent_execution_id: str | None = None,
40
+ ) -> PipelineExecution:
41
+ """Create a new pipeline execution.
42
+
43
+ Args:
44
+ pipeline_name: Name of the pipeline being executed
45
+ inputs_json: JSON string of input parameters
46
+ session_id: Session that triggered the execution
47
+ parent_execution_id: Parent execution for nested pipelines
48
+
49
+ Returns:
50
+ Created PipelineExecution instance
51
+ """
52
+ execution_id = generate_prefixed_id("pe")
53
+ now = datetime.now(UTC).isoformat()
54
+
55
+ with self.db.transaction():
56
+ self.db.execute(
57
+ """
58
+ INSERT INTO pipeline_executions (
59
+ id, pipeline_name, project_id, status, inputs_json,
60
+ session_id, parent_execution_id, created_at, updated_at
61
+ )
62
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
63
+ """,
64
+ (
65
+ execution_id,
66
+ pipeline_name,
67
+ self.project_id,
68
+ ExecutionStatus.PENDING.value,
69
+ inputs_json,
70
+ session_id,
71
+ parent_execution_id,
72
+ now,
73
+ now,
74
+ ),
75
+ )
76
+
77
+ execution = self.get_execution(execution_id)
78
+ if execution is None:
79
+ raise RuntimeError(f"Execution {execution_id} not found after creation")
80
+ return execution
81
+
82
+ def get_execution(self, execution_id: str) -> PipelineExecution | None:
83
+ """Get execution by ID.
84
+
85
+ Args:
86
+ execution_id: Execution UUID
87
+
88
+ Returns:
89
+ PipelineExecution or None if not found
90
+ """
91
+ row = self.db.fetchone(
92
+ "SELECT * FROM pipeline_executions WHERE id = ?",
93
+ (execution_id,),
94
+ )
95
+ return PipelineExecution.from_row(row) if row else None
96
+
97
+ def update_execution_status(
98
+ self,
99
+ execution_id: str,
100
+ status: ExecutionStatus,
101
+ resume_token: str | None = None,
102
+ outputs_json: str | None = None,
103
+ ) -> PipelineExecution | None:
104
+ """Update execution status.
105
+
106
+ Args:
107
+ execution_id: Execution UUID
108
+ status: New status
109
+ resume_token: Resume token for approval gates
110
+ outputs_json: JSON string of outputs (for completed status)
111
+
112
+ Returns:
113
+ Updated PipelineExecution or None if not found
114
+ """
115
+ now = datetime.now(UTC).isoformat()
116
+ completed_at = now if status == ExecutionStatus.COMPLETED else None
117
+
118
+ self.db.execute(
119
+ """
120
+ UPDATE pipeline_executions
121
+ SET status = ?, resume_token = ?, outputs_json = ?,
122
+ completed_at = COALESCE(?, completed_at), updated_at = ?
123
+ WHERE id = ?
124
+ """,
125
+ (
126
+ status.value,
127
+ resume_token,
128
+ outputs_json,
129
+ completed_at,
130
+ now,
131
+ execution_id,
132
+ ),
133
+ )
134
+
135
+ return self.get_execution(execution_id)
136
+
137
+ def list_executions(
138
+ self,
139
+ status: ExecutionStatus | None = None,
140
+ pipeline_name: str | None = None,
141
+ limit: int = 50,
142
+ ) -> list[PipelineExecution]:
143
+ """List executions for the project.
144
+
145
+ Args:
146
+ status: Filter by status
147
+ pipeline_name: Filter by pipeline name
148
+ limit: Maximum number of results
149
+
150
+ Returns:
151
+ List of PipelineExecution instances
152
+ """
153
+ query = "SELECT * FROM pipeline_executions WHERE project_id = ?"
154
+ params: list[Any] = [self.project_id]
155
+
156
+ if status is not None:
157
+ query += " AND status = ?"
158
+ params.append(status.value)
159
+
160
+ if pipeline_name is not None:
161
+ query += " AND pipeline_name = ?"
162
+ params.append(pipeline_name)
163
+
164
+ query += " ORDER BY created_at DESC LIMIT ?"
165
+ params.append(limit)
166
+
167
+ rows = self.db.fetchall(query, tuple(params))
168
+ return [PipelineExecution.from_row(row) for row in rows]
169
+
170
+ def get_execution_by_resume_token(self, token: str) -> PipelineExecution | None:
171
+ """Get execution by resume token.
172
+
173
+ Args:
174
+ token: Resume token
175
+
176
+ Returns:
177
+ PipelineExecution or None if not found
178
+ """
179
+ row = self.db.fetchone(
180
+ "SELECT * FROM pipeline_executions WHERE resume_token = ?",
181
+ (token,),
182
+ )
183
+ return PipelineExecution.from_row(row) if row else None
184
+
185
+ def resolve_execution_reference(self, ref: str) -> str:
186
+ """Resolve an execution reference to a UUID.
187
+
188
+ Supports:
189
+ - Full UUID: pe-abc123... or UUID format
190
+ - UUID prefix: pe-abc1 (matches by prefix)
191
+
192
+ Args:
193
+ ref: Execution reference
194
+
195
+ Returns:
196
+ Execution UUID
197
+
198
+ Raises:
199
+ ValueError: If reference cannot be resolved
200
+ """
201
+ # Try exact match first
202
+ execution = self.get_execution(ref)
203
+ if execution:
204
+ return execution.id
205
+
206
+ # Try prefix match
207
+ row = self.db.fetchone(
208
+ "SELECT id FROM pipeline_executions WHERE id LIKE ? AND project_id = ?",
209
+ (f"{ref}%", self.project_id),
210
+ )
211
+ if row:
212
+ result: str = row["id"]
213
+ return result
214
+
215
+ raise ValueError(f"Cannot resolve execution reference: {ref}")
216
+
217
+ # Step execution methods
218
+
219
+ def create_step_execution(
220
+ self,
221
+ execution_id: str,
222
+ step_id: str,
223
+ input_json: str | None = None,
224
+ ) -> StepExecution:
225
+ """Create a new step execution.
226
+
227
+ Args:
228
+ execution_id: Parent pipeline execution ID
229
+ step_id: Step ID from pipeline definition
230
+ input_json: JSON string of step input
231
+
232
+ Returns:
233
+ Created StepExecution instance
234
+ """
235
+ self.db.execute(
236
+ """
237
+ INSERT INTO step_executions (
238
+ execution_id, step_id, status, input_json
239
+ )
240
+ VALUES (?, ?, ?, ?)
241
+ """,
242
+ (
243
+ execution_id,
244
+ step_id,
245
+ StepStatus.PENDING.value,
246
+ input_json,
247
+ ),
248
+ )
249
+
250
+ # Get the created step by execution_id and step_id (unique combination)
251
+ row = self.db.fetchone(
252
+ "SELECT * FROM step_executions WHERE execution_id = ? AND step_id = ?",
253
+ (execution_id, step_id),
254
+ )
255
+ if row is None:
256
+ raise RuntimeError(f"Step {step_id} not found after creation")
257
+ return StepExecution.from_row(row)
258
+
259
+ def update_step_execution(
260
+ self,
261
+ step_execution_id: int,
262
+ status: StepStatus | None = None,
263
+ output_json: str | None = None,
264
+ error: str | None = None,
265
+ approval_token: str | None = None,
266
+ approved_by: str | None = None,
267
+ ) -> StepExecution | None:
268
+ """Update a step execution.
269
+
270
+ Args:
271
+ step_execution_id: Step execution ID (integer)
272
+ status: New status
273
+ output_json: JSON string of step output
274
+ error: Error message (for failed status)
275
+ approval_token: Token for approval gate
276
+ approved_by: Who approved the step
277
+
278
+ Returns:
279
+ Updated StepExecution or None if not found
280
+ """
281
+ now = datetime.now(UTC).isoformat()
282
+
283
+ # Build update parts dynamically (step_executions has no updated_at column)
284
+ updates: list[str] = []
285
+ params: list[Any] = []
286
+
287
+ if status is not None:
288
+ updates.append("status = ?")
289
+ params.append(status.value)
290
+ # Set timestamps based on status
291
+ if status == StepStatus.RUNNING:
292
+ updates.append("started_at = COALESCE(started_at, ?)")
293
+ params.append(now)
294
+ elif status in (StepStatus.COMPLETED, StepStatus.FAILED, StepStatus.SKIPPED):
295
+ updates.append("completed_at = COALESCE(completed_at, ?)")
296
+ params.append(now)
297
+
298
+ if output_json is not None:
299
+ updates.append("output_json = ?")
300
+ params.append(output_json)
301
+
302
+ if error is not None:
303
+ updates.append("error = ?")
304
+ params.append(error)
305
+
306
+ if approval_token is not None:
307
+ updates.append("approval_token = ?")
308
+ params.append(approval_token)
309
+
310
+ if approved_by is not None:
311
+ updates.append("approved_by = ?")
312
+ params.append(approved_by)
313
+ updates.append("approved_at = ?")
314
+ params.append(now)
315
+
316
+ if not updates:
317
+ # Nothing to update
318
+ row = self.db.fetchone(
319
+ "SELECT * FROM step_executions WHERE id = ?",
320
+ (step_execution_id,),
321
+ )
322
+ return StepExecution.from_row(row) if row else None
323
+
324
+ # Append step_execution_id for WHERE clause
325
+ params.append(step_execution_id)
326
+
327
+ # updates list contains only hardcoded column names, values are parameterized
328
+ self.db.execute(
329
+ f"UPDATE step_executions SET {', '.join(updates)} WHERE id = ?", # nosec B608
330
+ tuple(params),
331
+ )
332
+
333
+ row = self.db.fetchone(
334
+ "SELECT * FROM step_executions WHERE id = ?",
335
+ (step_execution_id,),
336
+ )
337
+ return StepExecution.from_row(row) if row else None
338
+
339
+ def get_step_by_approval_token(self, token: str) -> StepExecution | None:
340
+ """Get step execution by approval token.
341
+
342
+ Args:
343
+ token: Approval token
344
+
345
+ Returns:
346
+ StepExecution or None if not found
347
+ """
348
+ row = self.db.fetchone(
349
+ "SELECT * FROM step_executions WHERE approval_token = ?",
350
+ (token,),
351
+ )
352
+ return StepExecution.from_row(row) if row else None
353
+
354
+ def get_steps_for_execution(self, execution_id: str) -> list[StepExecution]:
355
+ """Get all steps for an execution.
356
+
357
+ Args:
358
+ execution_id: Pipeline execution ID
359
+
360
+ Returns:
361
+ List of StepExecution instances
362
+ """
363
+ rows = self.db.fetchall(
364
+ "SELECT * FROM step_executions WHERE execution_id = ? ORDER BY id",
365
+ (execution_id,),
366
+ )
367
+ return [StepExecution.from_row(row) for row in rows]
gobby/storage/sessions.py CHANGED
@@ -38,6 +38,7 @@ class Session:
38
38
  spawned_by_agent_id: str | None = None # ID of agent that spawned this session
39
39
  # Terminal pickup metadata fields
40
40
  workflow_name: str | None = None # Workflow to activate on terminal pickup
41
+ step_variables: dict[str, Any] | None = None # Variables for workflow activation
41
42
  agent_run_id: str | None = None # Link back to agent run record
42
43
  context_injected: bool = False # Whether context was injected into prompt
43
44
  original_prompt: str | None = None # Original prompt for terminal mode
@@ -77,6 +78,7 @@ class Session:
77
78
  agent_depth=row["agent_depth"] or 0,
78
79
  spawned_by_agent_id=row["spawned_by_agent_id"],
79
80
  workflow_name=row["workflow_name"],
81
+ step_variables=cls._parse_json_field(row, "step_variables"),
80
82
  agent_run_id=row["agent_run_id"],
81
83
  context_injected=bool(row["context_injected"]),
82
84
  original_prompt=row["original_prompt"],
@@ -110,6 +112,21 @@ class Session:
110
112
  logger.warning("Failed to parse terminal_context JSON, returning None")
111
113
  return None
112
114
 
115
+ @classmethod
116
+ def _parse_json_field(cls, row: Any, field_name: str) -> dict[str, Any] | None:
117
+ """Parse a JSON field from a database row, returning None on missing/malformed data."""
118
+ if field_name not in row.keys():
119
+ return None
120
+ raw = row[field_name]
121
+ if not raw:
122
+ return None
123
+ try:
124
+ result: dict[str, Any] = json.loads(raw)
125
+ return result
126
+ except json.JSONDecodeError:
127
+ logger.warning(f"Failed to parse {field_name} JSON, returning None")
128
+ return None
129
+
113
130
  def to_dict(self) -> dict[str, Any]:
114
131
  """Convert to dictionary."""
115
132
  return {
@@ -167,6 +184,7 @@ class LocalSessionManager:
167
184
  spawned_by_agent_id: str | None = None,
168
185
  terminal_context: dict[str, Any] | None = None,
169
186
  workflow_name: str | None = None,
187
+ step_variables: dict[str, Any] | None = None,
170
188
  ) -> Session:
171
189
  """
172
190
  Register a new session or return existing one.
@@ -176,9 +194,9 @@ class LocalSessionManager:
176
194
  returns the existing session. Otherwise creates a new one.
177
195
 
178
196
  Args:
179
- external_id: External session identifier (e.g., Claude Code's session ID)
197
+ external_id: External session identifier (e.g., Claude Code session ID)
180
198
  machine_id: Machine identifier
181
- source: CLI source (claude_code, codex, gemini)
199
+ source: CLI source (claude, gemini, codex, cursor, windsurf, copilot)
182
200
  project_id: Project ID (required - sessions must belong to a project)
183
201
  title: Optional session title
184
202
  jsonl_path: Path to transcript file
@@ -242,9 +260,9 @@ class LocalSessionManager:
242
260
  id, external_id, machine_id, source, project_id, title,
243
261
  jsonl_path, git_branch, parent_session_id,
244
262
  agent_depth, spawned_by_agent_id, terminal_context,
245
- workflow_name, status, created_at, updated_at, seq_num, had_edits
263
+ workflow_name, step_variables, status, created_at, updated_at, seq_num, had_edits
246
264
  )
247
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'active', ?, ?, ?, 0)
265
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'active', ?, ?, ?, 0)
248
266
  """,
249
267
  (
250
268
  session_id,
@@ -260,6 +278,7 @@ class LocalSessionManager:
260
278
  spawned_by_agent_id,
261
279
  json.dumps(terminal_context) if terminal_context else None,
262
280
  workflow_name,
281
+ json.dumps(step_variables) if step_variables else None,
263
282
  now,
264
283
  now,
265
284
  next_seq_num,