substrate-ai 0.9.0 → 0.11.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/dist/adapter-registry-DXLMTmfD.js +0 -0
  2. package/dist/adapter-registry-neBZrkr3.js +4 -0
  3. package/dist/cli/index.js +5594 -5951
  4. package/dist/decisions-C0pz9Clx.js +0 -0
  5. package/dist/{decisions-BDLp3tJB.js → decisions-DQZW0h9X.js} +2 -1
  6. package/dist/dist-eNB_v7Iy.js +10205 -0
  7. package/dist/errors-BvyMlvCX.js +74 -0
  8. package/dist/experimenter-Dos3NsCg.js +3 -0
  9. package/dist/health-BvYILeQQ.js +6 -0
  10. package/dist/{health-C-VRJruD.js → health-CiDi90gC.js} +57 -1850
  11. package/dist/{helpers-CpMs8VZX.js → helpers-DTp3VJ2-.js} +31 -121
  12. package/dist/index.d.ts +709 -266
  13. package/dist/index.js +5 -3
  14. package/dist/{logger-D2fS2ccL.js → logger-KeHncl-f.js} +2 -42
  15. package/dist/routing-CcBOCuC9.js +0 -0
  16. package/dist/{routing-CD8bIci_.js → routing-HaYsjEIS.js} +2 -2
  17. package/dist/{run-ClxNDHbr.js → run-CAUhTR7Y.js} +594 -4249
  18. package/dist/run-DPZOQOvB.js +9 -0
  19. package/dist/{upgrade-B1S61VXJ.js → upgrade-DFGrqjGI.js} +3 -3
  20. package/dist/{upgrade-BK0HrKA6.js → upgrade-DYdYuuJK.js} +3 -3
  21. package/dist/version-manager-impl-BmOWu8ml.js +0 -0
  22. package/dist/version-manager-impl-CKv6I1S0.js +4 -0
  23. package/package.json +5 -2
  24. package/dist/adapter-registry-D2zdMwVu.js +0 -840
  25. package/dist/adapter-registry-WAyFydN5.js +0 -4
  26. package/dist/config-migrator-CtGelIsG.js +0 -250
  27. package/dist/decisions-DhAA2HG2.js +0 -397
  28. package/dist/experimenter-D_N_7ZF3.js +0 -503
  29. package/dist/git-utils-DxPx6erV.js +0 -365
  30. package/dist/health-DMbNP9bw.js +0 -5
  31. package/dist/operational-BdcdmDqS.js +0 -374
  32. package/dist/routing-BVrxrM6v.js +0 -832
  33. package/dist/run-MAQ3Wuju.js +0 -10
  34. package/dist/version-manager-impl-BIxOe7gZ.js +0 -372
  35. package/dist/version-manager-impl-RrWs-CI6.js +0 -4
@@ -1,4 +0,0 @@
1
- import "./logger-D2fS2ccL.js";
2
- import { AdapterRegistry } from "./adapter-registry-D2zdMwVu.js";
3
-
4
- export { AdapterRegistry };
@@ -1,250 +0,0 @@
1
- import { readFileSync, writeFileSync } from "fs";
2
- import { z } from "zod";
3
-
4
- //#region src/modules/config/config-schema.ts
5
- /** Subscription routing modes */
6
- const SubscriptionRoutingSchema = z.enum([
7
- "auto",
8
- "subscription",
9
- "api",
10
- "disabled"
11
- ]);
12
- /** Rate limit configuration for a provider */
13
- const RateLimitSchema = z.object({
14
- tokens: z.number().int().positive(),
15
- window_seconds: z.number().int().positive()
16
- }).strict();
17
- /** Per-provider configuration */
18
- const ProviderConfigSchema = z.object({
19
- enabled: z.boolean(),
20
- cli_path: z.string().optional(),
21
- subscription_routing: SubscriptionRoutingSchema,
22
- max_concurrent: z.number().int().min(1).max(32),
23
- rate_limit: RateLimitSchema.optional(),
24
- api_key_env: z.string().optional(),
25
- api_billing: z.boolean()
26
- }).strict();
27
- /** Map of all known providers */
28
- const ProvidersSchema = z.object({
29
- claude: ProviderConfigSchema.optional(),
30
- codex: ProviderConfigSchema.optional(),
31
- gemini: ProviderConfigSchema.optional()
32
- }).strict();
33
- const LogLevelSchema = z.enum([
34
- "trace",
35
- "debug",
36
- "info",
37
- "warn",
38
- "error",
39
- "fatal"
40
- ]);
41
- const GlobalSettingsSchema = z.object({
42
- log_level: LogLevelSchema,
43
- max_concurrent_tasks: z.number().int().min(1).max(64),
44
- budget_cap_tokens: z.number().int().min(0),
45
- budget_cap_usd: z.number().min(0),
46
- workspace_dir: z.string().optional(),
47
- update_check: z.boolean().optional()
48
- }).strict();
49
- const CostTrackerConfigSchema = z.object({
50
- enabled: z.boolean(),
51
- token_rates_provider: z.enum(["builtin", "custom"]),
52
- track_planning_costs: z.boolean(),
53
- savings_reporting: z.boolean()
54
- }).strict();
55
- const BudgetConfigSchema = z.object({
56
- default_task_budget_usd: z.number().min(0),
57
- default_session_budget_usd: z.number().min(0),
58
- planning_costs_count_against_budget: z.boolean(),
59
- warning_threshold_percent: z.number().min(0).max(100)
60
- }).strict();
61
- const RoutingRuleSchema = z.object({
62
- task_type: z.string(),
63
- preferred_provider: z.string(),
64
- fallback_providers: z.array(z.string())
65
- }).strict();
66
- const RoutingPolicySchema = z.object({
67
- default_provider: z.string(),
68
- rules: z.array(RoutingRuleSchema)
69
- }).strict();
70
- /**
71
- * Per-workflow token ceiling overrides.
72
- * Keys match the workflow type names used in prompts and events.
73
- * Values must be positive integers.
74
- */
75
- const TokenCeilingsSchema = z.object({
76
- "create-story": z.number().int().positive("create-story token ceiling must be a positive integer").optional(),
77
- "dev-story": z.number().int().positive("dev-story token ceiling must be a positive integer").optional(),
78
- "code-review": z.number().int().positive("code-review token ceiling must be a positive integer").optional(),
79
- "test-plan": z.number().int().positive("test-plan token ceiling must be a positive integer").optional(),
80
- "test-expansion": z.number().int().positive("test-expansion token ceiling must be a positive integer").optional()
81
- });
82
- const TelemetryConfigSchema = z.object({
83
- enabled: z.boolean().default(false),
84
- port: z.number().int().min(1).max(65535).default(4318)
85
- }).strict();
86
- /** Current supported config format version */
87
- const CURRENT_CONFIG_FORMAT_VERSION = "1";
88
- /** Current supported task graph version */
89
- const CURRENT_TASK_GRAPH_VERSION = "1";
90
- /** All config format versions this toolkit can read and validate */
91
- const SUPPORTED_CONFIG_FORMAT_VERSIONS = ["1"];
92
- /** All task graph format versions this toolkit can read and validate */
93
- const SUPPORTED_TASK_GRAPH_VERSIONS = ["1"];
94
- const SubstrateConfigSchema = z.object({
95
- config_format_version: z.enum(["1"]),
96
- task_graph_version: z.enum(["1"]).optional(),
97
- global: GlobalSettingsSchema,
98
- providers: ProvidersSchema,
99
- cost_tracker: CostTrackerConfigSchema.optional(),
100
- budget: BudgetConfigSchema.optional(),
101
- token_ceilings: TokenCeilingsSchema.optional(),
102
- telemetry: TelemetryConfigSchema.optional()
103
- }).strict();
104
- const PartialProviderConfigSchema = ProviderConfigSchema.partial();
105
- const PartialGlobalSettingsSchema = GlobalSettingsSchema.partial();
106
- const PartialSubstrateConfigSchema = z.object({
107
- config_format_version: z.enum(["1"]).optional(),
108
- task_graph_version: z.enum(["1"]).optional(),
109
- global: PartialGlobalSettingsSchema.optional(),
110
- providers: z.object({
111
- claude: PartialProviderConfigSchema.optional(),
112
- codex: PartialProviderConfigSchema.optional(),
113
- gemini: PartialProviderConfigSchema.optional()
114
- }).partial().optional(),
115
- cost_tracker: CostTrackerConfigSchema.partial().optional(),
116
- budget: BudgetConfigSchema.partial().optional(),
117
- token_ceilings: TokenCeilingsSchema.optional(),
118
- telemetry: TelemetryConfigSchema.partial().optional()
119
- }).strict();
120
-
121
- //#endregion
122
- //#region src/modules/config/config-migrator.ts
123
- /**
124
- * ConfigMigrator manages a registry of migration functions and applies them
125
- * sequentially to upgrade config documents from one format version to another.
126
- */
127
- var ConfigMigrator = class {
128
- migrations = new Map();
129
- /**
130
- * Register a migration function for the given version key.
131
- *
132
- * @param key - Migration key in format "N->M" (e.g. "1->2")
133
- * @param fn - Migration function that receives the raw config and returns the migrated config
134
- */
135
- register(key, fn) {
136
- this.migrations.set(key, fn);
137
- }
138
- /**
139
- * Check whether a sequential migration path exists from fromVersion to toVersion.
140
- *
141
- * @param fromVersion - Starting version string
142
- * @param toVersion - Target version string
143
- * @returns true if every step in the path is registered
144
- */
145
- canMigrate(fromVersion, toVersion) {
146
- if (fromVersion === toVersion) return true;
147
- const from = parseInt(fromVersion, 10);
148
- const to = parseInt(toVersion, 10);
149
- if (isNaN(from) || isNaN(to) || from >= to) return false;
150
- for (let v = from; v < to; v++) {
151
- const key = `${String(v)}->${String(v + 1)}`;
152
- if (!this.migrations.has(key)) return false;
153
- }
154
- return true;
155
- }
156
- /**
157
- * Apply sequential migrations from fromVersion to toVersion.
158
- *
159
- * If fromVersion === toVersion, returns a no-op success result.
160
- * If any intermediate migration is missing, returns success:false.
161
- *
162
- * When filePath is provided and migration is needed, a backup is written to
163
- * `${filePath}.bak.v${fromVersion}` before any transformations are applied.
164
- *
165
- * @param config - Raw config object to migrate
166
- * @param fromVersion - Starting format version string
167
- * @param toVersion - Target format version string
168
- * @param filePath - Optional path to the source config file for backup creation
169
- * @returns Object containing the (possibly migrated) config and a MigrationResult
170
- */
171
- migrate(config, fromVersion, toVersion, filePath) {
172
- if (fromVersion === toVersion) return {
173
- config,
174
- result: {
175
- success: true,
176
- fromVersion,
177
- toVersion,
178
- migratedKeys: [],
179
- manualStepsRequired: [],
180
- backupPath: null
181
- }
182
- };
183
- const from = parseInt(fromVersion, 10);
184
- const to = parseInt(toVersion, 10);
185
- if (isNaN(from) || isNaN(to) || from >= to) return {
186
- config,
187
- result: {
188
- success: false,
189
- fromVersion,
190
- toVersion,
191
- migratedKeys: [],
192
- manualStepsRequired: [`Cannot migrate from version "${fromVersion}" to "${toVersion}": invalid version range.`],
193
- backupPath: null
194
- }
195
- };
196
- for (let v = from; v < to; v++) {
197
- const key = `${String(v)}->${String(v + 1)}`;
198
- if (!this.migrations.has(key)) return {
199
- config,
200
- result: {
201
- success: false,
202
- fromVersion,
203
- toVersion,
204
- migratedKeys: [],
205
- manualStepsRequired: [`Missing migration step: "${key}". Cannot automatically migrate from version "${fromVersion}" to "${toVersion}". Please upgrade the toolkit: npm install -g substrate@latest`],
206
- backupPath: null
207
- }
208
- };
209
- }
210
- let backupPath = null;
211
- if (filePath !== void 0) {
212
- backupPath = `${filePath}.bak.v${fromVersion}`;
213
- const originalContent = readFileSync(filePath, "utf-8");
214
- writeFileSync(backupPath, originalContent, "utf-8");
215
- }
216
- let current = config;
217
- const migratedKeys = [];
218
- for (let v = from; v < to; v++) {
219
- const key = `${String(v)}->${String(v + 1)}`;
220
- const fn = this.migrations.get(key);
221
- const before = JSON.stringify(current);
222
- current = fn(current);
223
- const after = JSON.stringify(current);
224
- if (current !== null && typeof current === "object" && !Array.isArray(current)) {
225
- const beforeObj = JSON.parse(before);
226
- const afterObj = JSON.parse(after);
227
- for (const k of Object.keys(afterObj)) if (JSON.stringify(afterObj[k]) !== JSON.stringify(beforeObj[k])) {
228
- if (!migratedKeys.includes(k)) migratedKeys.push(k);
229
- }
230
- }
231
- }
232
- return {
233
- config: current,
234
- result: {
235
- success: true,
236
- fromVersion,
237
- toVersion,
238
- migratedKeys,
239
- manualStepsRequired: [],
240
- backupPath
241
- }
242
- };
243
- }
244
- };
245
- /** Singleton instance for use throughout the toolkit */
246
- const defaultConfigMigrator = new ConfigMigrator();
247
-
248
- //#endregion
249
- export { CURRENT_CONFIG_FORMAT_VERSION, CURRENT_TASK_GRAPH_VERSION, PartialSubstrateConfigSchema, SUPPORTED_CONFIG_FORMAT_VERSIONS, SUPPORTED_TASK_GRAPH_VERSIONS, SubstrateConfigSchema, defaultConfigMigrator };
250
- //# sourceMappingURL=config-migrator-CtGelIsG.js.map
@@ -1,397 +0,0 @@
1
- import { z } from "zod";
2
-
3
- //#region src/persistence/schemas/decisions.ts
4
- const PhaseEnum = z.enum([
5
- "analysis",
6
- "planning",
7
- "solutioning",
8
- "implementation"
9
- ]);
10
- const RequirementPriorityEnum = z.enum([
11
- "must",
12
- "should",
13
- "could",
14
- "wont"
15
- ]);
16
- const RequirementTypeEnum = z.enum([
17
- "functional",
18
- "non_functional",
19
- "constraint"
20
- ]);
21
- const PipelineRunStatusEnum = z.enum([
22
- "running",
23
- "paused",
24
- "completed",
25
- "failed",
26
- "stopped"
27
- ]);
28
- const DecisionSchema = z.object({
29
- id: z.string().uuid(),
30
- pipeline_run_id: z.string().nullable().optional(),
31
- phase: z.string().min(1),
32
- category: z.string().min(1),
33
- key: z.string().min(1),
34
- value: z.string().min(1),
35
- rationale: z.string().nullable().optional(),
36
- superseded_by: z.string().nullable().optional(),
37
- created_at: z.string().optional(),
38
- updated_at: z.string().optional()
39
- });
40
- const CreateDecisionInputSchema = z.object({
41
- pipeline_run_id: z.string().nullable().optional(),
42
- phase: z.string().min(1),
43
- category: z.string().min(1),
44
- key: z.string().min(1),
45
- value: z.string().min(1),
46
- rationale: z.string().nullable().optional()
47
- });
48
- const RequirementSchema = z.object({
49
- id: z.string().uuid(),
50
- pipeline_run_id: z.string().nullable().optional(),
51
- source: z.string().min(1),
52
- type: RequirementTypeEnum,
53
- description: z.string().min(1),
54
- priority: RequirementPriorityEnum,
55
- status: z.string().default("active"),
56
- created_at: z.string().optional()
57
- });
58
- const CreateRequirementInputSchema = z.object({
59
- pipeline_run_id: z.string().nullable().optional(),
60
- source: z.string().min(1),
61
- type: RequirementTypeEnum,
62
- description: z.string().min(1),
63
- priority: RequirementPriorityEnum
64
- });
65
- const ConstraintSchema = z.object({
66
- id: z.string().uuid(),
67
- pipeline_run_id: z.string().nullable().optional(),
68
- category: z.string().min(1),
69
- description: z.string().min(1),
70
- source: z.string().min(1),
71
- created_at: z.string().optional()
72
- });
73
- const CreateConstraintInputSchema = z.object({
74
- pipeline_run_id: z.string().nullable().optional(),
75
- category: z.string().min(1),
76
- description: z.string().min(1),
77
- source: z.string().min(1)
78
- });
79
- const ArtifactSchema = z.object({
80
- id: z.string().uuid(),
81
- pipeline_run_id: z.string().nullable().optional(),
82
- phase: z.string().min(1),
83
- type: z.string().min(1),
84
- path: z.string().min(1),
85
- content_hash: z.string().nullable().optional(),
86
- summary: z.string().nullable().optional(),
87
- created_at: z.string().optional()
88
- });
89
- const RegisterArtifactInputSchema = z.object({
90
- pipeline_run_id: z.string().nullable().optional(),
91
- phase: z.string().min(1),
92
- type: z.string().min(1),
93
- path: z.string().min(1),
94
- content_hash: z.string().nullable().optional(),
95
- summary: z.string().nullable().optional()
96
- });
97
- const PipelineRunSchema = z.object({
98
- id: z.string().uuid(),
99
- methodology: z.string().min(1),
100
- current_phase: z.string().nullable().optional(),
101
- status: PipelineRunStatusEnum,
102
- config_json: z.string().nullable().optional(),
103
- token_usage_json: z.string().nullable().optional(),
104
- parent_run_id: z.string().nullable().optional(),
105
- created_at: z.string().optional(),
106
- updated_at: z.string().optional()
107
- });
108
- const CreatePipelineRunInputSchema = z.object({
109
- methodology: z.string().min(1),
110
- start_phase: z.string().nullable().optional(),
111
- config_json: z.string().nullable().optional()
112
- });
113
- const TokenUsageSchema = z.object({
114
- id: z.number().int().optional(),
115
- pipeline_run_id: z.string().nullable().optional(),
116
- phase: z.string().min(1),
117
- agent: z.string().min(1),
118
- input_tokens: z.number().int().min(0).default(0),
119
- output_tokens: z.number().int().min(0).default(0),
120
- cost_usd: z.number().min(0).default(0),
121
- metadata: z.string().nullable().optional(),
122
- created_at: z.string().optional()
123
- });
124
- const AddTokenUsageInputSchema = z.object({
125
- phase: z.string().min(1),
126
- agent: z.string().min(1),
127
- input_tokens: z.number().int().min(0).default(0),
128
- output_tokens: z.number().int().min(0).default(0),
129
- cost_usd: z.number().min(0).default(0),
130
- metadata: z.string().nullable().optional()
131
- });
132
-
133
- //#endregion
134
- //#region src/persistence/queries/decisions.ts
135
- /**
136
- * Insert a new decision record with a generated UUID.
137
- */
138
- async function createDecision(adapter, input) {
139
- const validated = CreateDecisionInputSchema.parse(input);
140
- const id = crypto.randomUUID();
141
- await adapter.query(`INSERT INTO decisions (id, pipeline_run_id, phase, category, \`key\`, value, rationale)
142
- VALUES (?, ?, ?, ?, ?, ?, ?)`, [
143
- id,
144
- validated.pipeline_run_id ?? null,
145
- validated.phase,
146
- validated.category,
147
- validated.key,
148
- validated.value,
149
- validated.rationale ?? null
150
- ]);
151
- const rows = await adapter.query("SELECT * FROM decisions WHERE id = ?", [id]);
152
- return rows[0];
153
- }
154
- /**
155
- * Insert or update a decision record.
156
- * If a decision with the same pipeline_run_id, category, and key already exists,
157
- * update its value and rationale. Otherwise, insert a new record.
158
- */
159
- async function upsertDecision(adapter, input) {
160
- const validated = CreateDecisionInputSchema.parse(input);
161
- const rows = await adapter.query("SELECT * FROM decisions WHERE pipeline_run_id = ? AND category = ? AND `key` = ? LIMIT 1", [
162
- validated.pipeline_run_id ?? null,
163
- validated.category,
164
- validated.key
165
- ]);
166
- const existing = rows[0];
167
- if (existing) {
168
- await updateDecision(adapter, existing.id, {
169
- value: validated.value,
170
- rationale: validated.rationale ?? void 0
171
- });
172
- const updated = await adapter.query("SELECT * FROM decisions WHERE id = ?", [existing.id]);
173
- return updated[0];
174
- }
175
- return createDecision(adapter, input);
176
- }
177
- /**
178
- * Get all decisions for a given phase, ordered by created_at ascending.
179
- */
180
- async function getDecisionsByPhase(adapter, phase) {
181
- return adapter.query("SELECT * FROM decisions WHERE phase = ? ORDER BY created_at ASC", [phase]);
182
- }
183
- /**
184
- * Get all decisions for a given phase scoped to a specific pipeline run,
185
- * ordered by created_at ascending.
186
- */
187
- async function getDecisionsByPhaseForRun(adapter, runId, phase) {
188
- return adapter.query("SELECT * FROM decisions WHERE pipeline_run_id = ? AND phase = ? ORDER BY created_at ASC", [runId, phase]);
189
- }
190
- /**
191
- * Get all decisions for a given category, ordered by created_at ascending.
192
- */
193
- async function getDecisionsByCategory(adapter, category) {
194
- return adapter.query("SELECT * FROM decisions WHERE category = ? ORDER BY created_at ASC", [category]);
195
- }
196
- /**
197
- * Update a decision's value and/or rationale and set updated_at.
198
- */
199
- async function updateDecision(adapter, id, updates) {
200
- const setClauses = [];
201
- const values = [];
202
- if (updates.value !== void 0) {
203
- setClauses.push("value = ?");
204
- values.push(updates.value);
205
- }
206
- if (updates.rationale !== void 0) {
207
- setClauses.push("rationale = ?");
208
- values.push(updates.rationale);
209
- }
210
- if (setClauses.length === 0) return;
211
- setClauses.push("updated_at = ?");
212
- values.push(new Date().toISOString());
213
- values.push(id);
214
- await adapter.query(`UPDATE decisions SET ${setClauses.join(", ")} WHERE id = ?`, values);
215
- }
216
- /**
217
- * Insert a new requirement with status = 'active'.
218
- */
219
- async function createRequirement(adapter, input) {
220
- const validated = CreateRequirementInputSchema.parse(input);
221
- const id = crypto.randomUUID();
222
- await adapter.query(`INSERT INTO requirements (id, pipeline_run_id, source, type, description, priority, status)
223
- VALUES (?, ?, ?, ?, ?, ?, 'active')`, [
224
- id,
225
- validated.pipeline_run_id ?? null,
226
- validated.source,
227
- validated.type,
228
- validated.description,
229
- validated.priority
230
- ]);
231
- const rows = await adapter.query("SELECT * FROM requirements WHERE id = ?", [id]);
232
- return rows[0];
233
- }
234
- /**
235
- * List requirements with optional filtering by type, priority, and status.
236
- */
237
- async function listRequirements(adapter, filters) {
238
- const conditions = [];
239
- const values = [];
240
- if (filters?.type !== void 0) {
241
- conditions.push("type = ?");
242
- values.push(filters.type);
243
- }
244
- if (filters?.priority !== void 0) {
245
- conditions.push("priority = ?");
246
- values.push(filters.priority);
247
- }
248
- if (filters?.status !== void 0) {
249
- conditions.push("status = ?");
250
- values.push(filters.status);
251
- }
252
- const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
253
- return adapter.query(`SELECT * FROM requirements ${where} ORDER BY created_at ASC`, values);
254
- }
255
- /**
256
- * Register a new artifact record.
257
- */
258
- async function registerArtifact(adapter, input) {
259
- const validated = RegisterArtifactInputSchema.parse(input);
260
- const id = crypto.randomUUID();
261
- await adapter.query(`INSERT INTO artifacts (id, pipeline_run_id, phase, type, path, content_hash, summary)
262
- VALUES (?, ?, ?, ?, ?, ?, ?)`, [
263
- id,
264
- validated.pipeline_run_id ?? null,
265
- validated.phase,
266
- validated.type,
267
- validated.path,
268
- validated.content_hash ?? null,
269
- validated.summary ?? null
270
- ]);
271
- const rows = await adapter.query("SELECT * FROM artifacts WHERE id = ?", [id]);
272
- return rows[0];
273
- }
274
- /**
275
- * Get the latest artifact of a given type for a specific pipeline run.
276
- * Filters by pipeline_run_id, phase, and type.
277
- * Returns undefined if none found.
278
- */
279
- async function getArtifactByTypeForRun(adapter, runId, phase, type) {
280
- const rows = await adapter.query("SELECT * FROM artifacts WHERE pipeline_run_id = ? AND phase = ? AND type = ? ORDER BY created_at DESC, id DESC LIMIT 1", [
281
- runId,
282
- phase,
283
- type
284
- ]);
285
- return rows[0];
286
- }
287
- /**
288
- * Get all artifacts registered for a specific pipeline run, ordered by created_at ascending.
289
- */
290
- async function getArtifactsByRun(adapter, runId) {
291
- return adapter.query("SELECT * FROM artifacts WHERE pipeline_run_id = ? ORDER BY created_at ASC", [runId]);
292
- }
293
- /**
294
- * Get a pipeline run by its ID. Returns undefined if not found.
295
- */
296
- async function getPipelineRunById(adapter, id) {
297
- const rows = await adapter.query("SELECT * FROM pipeline_runs WHERE id = ?", [id]);
298
- return rows[0];
299
- }
300
- /**
301
- * Update a pipeline run's config_json field.
302
- */
303
- async function updatePipelineRunConfig(adapter, id, configJson) {
304
- await adapter.query("UPDATE pipeline_runs SET config_json = ?, updated_at = ? WHERE id = ?", [
305
- configJson,
306
- new Date().toISOString(),
307
- id
308
- ]);
309
- }
310
- /**
311
- * Create a new pipeline run with status = 'running'.
312
- */
313
- async function createPipelineRun(adapter, input) {
314
- const validated = CreatePipelineRunInputSchema.parse(input);
315
- const id = crypto.randomUUID();
316
- await adapter.query(`INSERT INTO pipeline_runs (id, methodology, current_phase, status, config_json)
317
- VALUES (?, ?, ?, 'running', ?)`, [
318
- id,
319
- validated.methodology,
320
- validated.start_phase ?? null,
321
- validated.config_json ?? null
322
- ]);
323
- const rows = await adapter.query("SELECT * FROM pipeline_runs WHERE id = ?", [id]);
324
- return rows[0];
325
- }
326
- /**
327
- * Update a pipeline run's current_phase, status, and/or token_usage_json.
328
- */
329
- async function updatePipelineRun(adapter, id, updates) {
330
- const setClauses = [];
331
- const values = [];
332
- if (updates.current_phase !== void 0) {
333
- setClauses.push("current_phase = ?");
334
- values.push(updates.current_phase);
335
- }
336
- if (updates.status !== void 0) {
337
- setClauses.push("status = ?");
338
- values.push(updates.status);
339
- }
340
- if (updates.token_usage_json !== void 0) {
341
- setClauses.push("token_usage_json = ?");
342
- values.push(updates.token_usage_json);
343
- }
344
- if (setClauses.length === 0) return;
345
- setClauses.push("updated_at = ?");
346
- values.push(new Date().toISOString());
347
- values.push(id);
348
- await adapter.query(`UPDATE pipeline_runs SET ${setClauses.join(", ")} WHERE id = ?`, values);
349
- }
350
- /**
351
- * Get all pipeline runs with status = 'running'.
352
- */
353
- async function getRunningPipelineRuns(adapter) {
354
- return adapter.query("SELECT * FROM pipeline_runs WHERE status = 'running'");
355
- }
356
- /**
357
- * Get the most recently created pipeline run. Returns undefined if none found.
358
- */
359
- async function getLatestRun(adapter) {
360
- const rows = await adapter.query("SELECT * FROM pipeline_runs ORDER BY created_at DESC, id DESC LIMIT 1");
361
- return rows[0];
362
- }
363
- /**
364
- * Append a token usage record for a pipeline run.
365
- */
366
- async function addTokenUsage(adapter, runId, usage) {
367
- const validated = AddTokenUsageInputSchema.parse(usage);
368
- await adapter.query(`INSERT INTO token_usage (pipeline_run_id, phase, agent, input_tokens, output_tokens, cost_usd, metadata)
369
- VALUES (?, ?, ?, ?, ?, ?, ?)`, [
370
- runId,
371
- validated.phase,
372
- validated.agent,
373
- validated.input_tokens,
374
- validated.output_tokens,
375
- validated.cost_usd,
376
- validated.metadata ?? null
377
- ]);
378
- }
379
- /**
380
- * Aggregate token usage by phase and agent for a given pipeline run.
381
- */
382
- async function getTokenUsageSummary(adapter, runId) {
383
- return adapter.query(`SELECT
384
- phase,
385
- agent,
386
- SUM(input_tokens) AS total_input_tokens,
387
- SUM(output_tokens) AS total_output_tokens,
388
- SUM(cost_usd) AS total_cost_usd
389
- FROM token_usage
390
- WHERE pipeline_run_id = ?
391
- GROUP BY phase, agent
392
- ORDER BY phase ASC, agent ASC`, [runId]);
393
- }
394
-
395
- //#endregion
396
- export { addTokenUsage, createDecision, createPipelineRun, createRequirement, getArtifactByTypeForRun, getArtifactsByRun, getDecisionsByCategory, getDecisionsByPhase, getDecisionsByPhaseForRun, getLatestRun, getPipelineRunById, getRunningPipelineRuns, getTokenUsageSummary, listRequirements, registerArtifact, updateDecision, updatePipelineRun, updatePipelineRunConfig, upsertDecision };
397
- //# sourceMappingURL=decisions-DhAA2HG2.js.map