astrocode-workflow 0.3.0 → 0.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +6 -0
- package/dist/shared/metrics.d.ts +66 -0
- package/dist/shared/metrics.js +112 -0
- package/dist/src/agents/commands.d.ts +9 -0
- package/dist/src/agents/commands.js +121 -0
- package/dist/src/agents/prompts.d.ts +3 -0
- package/dist/src/agents/prompts.js +232 -0
- package/dist/src/agents/registry.d.ts +6 -0
- package/dist/src/agents/registry.js +242 -0
- package/dist/src/agents/types.d.ts +14 -0
- package/dist/src/agents/types.js +8 -0
- package/dist/src/config/config-handler.d.ts +4 -0
- package/dist/src/config/config-handler.js +46 -0
- package/dist/src/config/defaults.d.ts +3 -0
- package/dist/src/config/defaults.js +3 -0
- package/dist/src/config/loader.d.ts +11 -0
- package/dist/src/config/loader.js +82 -0
- package/dist/src/config/schema.d.ts +194 -0
- package/dist/src/config/schema.js +223 -0
- package/dist/src/hooks/continuation-enforcer.d.ts +34 -0
- package/dist/src/hooks/continuation-enforcer.js +190 -0
- package/dist/src/hooks/inject-provider.d.ts +22 -0
- package/dist/src/hooks/inject-provider.js +120 -0
- package/dist/src/hooks/tool-output-truncator.d.ts +25 -0
- package/dist/src/hooks/tool-output-truncator.js +57 -0
- package/dist/src/index.d.ts +3 -0
- package/dist/src/index.js +308 -0
- package/dist/src/shared/deep-merge.d.ts +8 -0
- package/dist/src/shared/deep-merge.js +25 -0
- package/dist/src/shared/hash.d.ts +1 -0
- package/dist/src/shared/hash.js +4 -0
- package/dist/src/shared/log.d.ts +7 -0
- package/dist/src/shared/log.js +24 -0
- package/dist/src/shared/metrics.d.ts +66 -0
- package/dist/src/shared/metrics.js +112 -0
- package/dist/src/shared/model-tuning.d.ts +9 -0
- package/dist/src/shared/model-tuning.js +28 -0
- package/dist/src/shared/paths.d.ts +19 -0
- package/dist/src/shared/paths.js +64 -0
- package/dist/src/shared/text.d.ts +4 -0
- package/dist/src/shared/text.js +19 -0
- package/dist/src/shared/time.d.ts +1 -0
- package/dist/src/shared/time.js +3 -0
- package/dist/src/state/adapters/index.d.ts +41 -0
- package/dist/src/state/adapters/index.js +115 -0
- package/dist/src/state/db.d.ts +16 -0
- package/dist/src/state/db.js +225 -0
- package/dist/src/state/ids.d.ts +8 -0
- package/dist/src/state/ids.js +25 -0
- package/dist/src/state/repo-lock.d.ts +3 -0
- package/dist/src/state/repo-lock.js +29 -0
- package/dist/src/state/schema.d.ts +2 -0
- package/dist/src/state/schema.js +251 -0
- package/dist/src/state/types.d.ts +71 -0
- package/dist/src/state/types.js +1 -0
- package/dist/src/tools/artifacts.d.ts +18 -0
- package/dist/src/tools/artifacts.js +71 -0
- package/dist/src/tools/health.d.ts +8 -0
- package/dist/src/tools/health.js +119 -0
- package/dist/src/tools/index.d.ts +20 -0
- package/dist/src/tools/index.js +94 -0
- package/dist/src/tools/init.d.ts +17 -0
- package/dist/src/tools/init.js +96 -0
- package/dist/src/tools/injects.d.ts +53 -0
- package/dist/src/tools/injects.js +325 -0
- package/dist/src/tools/metrics.d.ts +7 -0
- package/dist/src/tools/metrics.js +61 -0
- package/dist/src/tools/repair.d.ts +8 -0
- package/dist/src/tools/repair.js +25 -0
- package/dist/src/tools/reset.d.ts +8 -0
- package/dist/src/tools/reset.js +92 -0
- package/dist/src/tools/run.d.ts +13 -0
- package/dist/src/tools/run.js +54 -0
- package/dist/src/tools/spec.d.ts +12 -0
- package/dist/src/tools/spec.js +44 -0
- package/dist/src/tools/stage.d.ts +23 -0
- package/dist/src/tools/stage.js +371 -0
- package/dist/src/tools/status.d.ts +8 -0
- package/dist/src/tools/status.js +125 -0
- package/dist/src/tools/story.d.ts +23 -0
- package/dist/src/tools/story.js +85 -0
- package/dist/src/tools/workflow.d.ts +13 -0
- package/dist/src/tools/workflow.js +355 -0
- package/dist/src/ui/inject.d.ts +12 -0
- package/dist/src/ui/inject.js +107 -0
- package/dist/src/ui/toasts.d.ts +13 -0
- package/dist/src/ui/toasts.js +39 -0
- package/dist/src/workflow/artifacts.d.ts +24 -0
- package/dist/src/workflow/artifacts.js +45 -0
- package/dist/src/workflow/baton.d.ts +72 -0
- package/dist/src/workflow/baton.js +166 -0
- package/dist/src/workflow/context.d.ts +20 -0
- package/dist/src/workflow/context.js +113 -0
- package/dist/src/workflow/directives.d.ts +39 -0
- package/dist/src/workflow/directives.js +137 -0
- package/dist/src/workflow/repair.d.ts +8 -0
- package/dist/src/workflow/repair.js +99 -0
- package/dist/src/workflow/state-machine.d.ts +86 -0
- package/dist/src/workflow/state-machine.js +216 -0
- package/dist/src/workflow/story-helpers.d.ts +9 -0
- package/dist/src/workflow/story-helpers.js +13 -0
- package/dist/state/db.d.ts +1 -0
- package/dist/state/db.js +9 -0
- package/dist/state/repo-lock.d.ts +3 -0
- package/dist/state/repo-lock.js +29 -0
- package/dist/test/integration/db-transactions.test.d.ts +1 -0
- package/dist/test/integration/db-transactions.test.js +126 -0
- package/dist/test/integration/injection-metrics.test.d.ts +1 -0
- package/dist/test/integration/injection-metrics.test.js +129 -0
- package/dist/tools/health.d.ts +8 -0
- package/dist/tools/health.js +119 -0
- package/dist/tools/index.js +9 -0
- package/dist/tools/metrics.d.ts +7 -0
- package/dist/tools/metrics.js +61 -0
- package/dist/tools/reset.d.ts +8 -0
- package/dist/tools/reset.js +92 -0
- package/dist/tools/workflow.js +210 -215
- package/dist/ui/inject.d.ts +6 -0
- package/dist/ui/inject.js +86 -67
- package/dist/workflow/state-machine.d.ts +32 -32
- package/dist/workflow/state-machine.js +85 -170
- package/package.json +6 -3
- package/src/index.ts +8 -0
- package/src/shared/metrics.ts +148 -0
- package/src/state/db.ts +10 -1
- package/src/state/repo-lock.ts +158 -0
- package/src/tools/health.ts +128 -0
- package/src/tools/index.ts +12 -3
- package/src/tools/init.ts +26 -14
- package/src/tools/metrics.ts +71 -0
- package/src/tools/repair.ts +21 -8
- package/src/tools/reset.ts +100 -0
- package/src/tools/stage.ts +12 -0
- package/src/tools/status.ts +17 -3
- package/src/tools/story.ts +41 -15
- package/src/tools/workflow.ts +123 -121
- package/src/ui/inject.ts +113 -79
- package/src/workflow/state-machine.ts +123 -227
- package/src/tools/workflow.ts.backup +0 -681
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
import { nowISO } from "../shared/time";
|
|
2
|
+
import { newEventId, newStageRunId } from "../state/ids";
|
|
3
|
+
function push(report, line) {
|
|
4
|
+
report.actions.push(line);
|
|
5
|
+
}
|
|
6
|
+
function warn(report, line) {
|
|
7
|
+
report.warnings.push(line);
|
|
8
|
+
}
|
|
9
|
+
export function repairState(db, config) {
|
|
10
|
+
const report = { actions: [], warnings: [] };
|
|
11
|
+
const now = nowISO();
|
|
12
|
+
// 1) Multiple running runs -> abort extras (keep most recent started_at)
|
|
13
|
+
const running = db
|
|
14
|
+
.prepare("SELECT * FROM runs WHERE status='running' ORDER BY started_at DESC, created_at DESC")
|
|
15
|
+
.all();
|
|
16
|
+
if (running.length > 1) {
|
|
17
|
+
const keep = running[0];
|
|
18
|
+
for (const r of running.slice(1)) {
|
|
19
|
+
db.prepare("UPDATE runs SET status='aborted', error_text=?, completed_at=?, updated_at=? WHERE run_id=?").run("repair: multiple running runs", now, now, r.run_id);
|
|
20
|
+
// unlock story
|
|
21
|
+
db.prepare("UPDATE stories SET state='approved', in_progress=0, locked_by_run_id=NULL, locked_at=NULL, updated_at=? WHERE story_key=?").run(now, r.story_key);
|
|
22
|
+
db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, NULL, 'run.aborted', ?, ?)").run(newEventId(), r.run_id, JSON.stringify({ reason: "repair: multiple running runs" }), now);
|
|
23
|
+
push(report, `Aborted extra running run ${r.run_id} (kept ${keep.run_id})`);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
// 2) Fix story locks for the active run
|
|
27
|
+
const active = db.prepare("SELECT * FROM runs WHERE status='running' ORDER BY started_at DESC LIMIT 1").get();
|
|
28
|
+
if (active) {
|
|
29
|
+
const story = db.prepare("SELECT * FROM stories WHERE story_key=?").get(active.story_key);
|
|
30
|
+
if (!story) {
|
|
31
|
+
warn(report, `Active run ${active.run_id} references missing story ${active.story_key}`);
|
|
32
|
+
}
|
|
33
|
+
else {
|
|
34
|
+
if (story.locked_by_run_id !== active.run_id || story.in_progress !== 1 || story.state !== "in_progress") {
|
|
35
|
+
db.prepare("UPDATE stories SET state='in_progress', in_progress=1, locked_by_run_id=?, locked_at=COALESCE(locked_at, ?), updated_at=? WHERE story_key=?").run(active.run_id, now, now, story.story_key);
|
|
36
|
+
push(report, `Repaired story lock for ${story.story_key} to run ${active.run_id}`);
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
// 3) Ensure stage_runs exist for pipeline
|
|
40
|
+
const pipeline = JSON.parse(active.pipeline_stages_json ?? "[]");
|
|
41
|
+
const stageRuns = db
|
|
42
|
+
.prepare("SELECT * FROM stage_runs WHERE run_id=? ORDER BY stage_index ASC")
|
|
43
|
+
.all(active.run_id);
|
|
44
|
+
if (stageRuns.length < pipeline.length) {
|
|
45
|
+
const existingKeys = new Set(stageRuns.map((s) => s.stage_key));
|
|
46
|
+
const insert = db.prepare("INSERT INTO stage_runs (stage_run_id, run_id, stage_key, stage_index, status, updated_at) VALUES (?, ?, ?, ?, 'pending', ?)");
|
|
47
|
+
pipeline.forEach((key, idx) => {
|
|
48
|
+
if (!existingKeys.has(key)) {
|
|
49
|
+
insert.run(newStageRunId(), active.run_id, key, idx, now);
|
|
50
|
+
push(report, `Inserted missing stage_run ${key} for run ${active.run_id}`);
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
}
|
|
54
|
+
// 4) If current_stage_key missing, set to first incomplete
|
|
55
|
+
const refreshed = db
|
|
56
|
+
.prepare("SELECT * FROM stage_runs WHERE run_id=? ORDER BY stage_index ASC")
|
|
57
|
+
.all(active.run_id);
|
|
58
|
+
const cur = refreshed.find((s) => s.status !== "completed" && s.status !== "skipped");
|
|
59
|
+
if (cur && active.current_stage_key !== cur.stage_key) {
|
|
60
|
+
db.prepare("UPDATE runs SET current_stage_key=?, updated_at=? WHERE run_id=?").run(cur.stage_key, now, active.run_id);
|
|
61
|
+
push(report, `Set run.current_stage_key to ${cur.stage_key} for ${active.run_id}`);
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
// 5) Orphaned story locks (in_progress=1 but no running run)
|
|
65
|
+
const lockedStories = db
|
|
66
|
+
.prepare("SELECT * FROM stories WHERE in_progress=1")
|
|
67
|
+
.all();
|
|
68
|
+
for (const s of lockedStories) {
|
|
69
|
+
const run = s.locked_by_run_id
|
|
70
|
+
? db.prepare("SELECT * FROM runs WHERE run_id=?").get(s.locked_by_run_id)
|
|
71
|
+
: undefined;
|
|
72
|
+
if (!run || run.status !== "running") {
|
|
73
|
+
db.prepare("UPDATE stories SET in_progress=0, locked_by_run_id=NULL, locked_at=NULL, updated_at=? WHERE story_key=?").run(now, s.story_key);
|
|
74
|
+
// Keep state conservative: if it was in_progress but no run, revert to approved.
|
|
75
|
+
db.prepare("UPDATE stories SET state='approved', updated_at=? WHERE story_key=? AND state='in_progress'").run(now, s.story_key);
|
|
76
|
+
push(report, `Cleared orphaned lock on story ${s.story_key}`);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
if (!report.actions.length && !report.warnings.length) {
|
|
80
|
+
report.actions.push("No repairs needed.");
|
|
81
|
+
}
|
|
82
|
+
// Event marker
|
|
83
|
+
db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, NULL, NULL, 'repair.completed', ?, ?)").run(newEventId(), JSON.stringify({ actions: report.actions, warnings: report.warnings, schema: config.db.schema_version_required }), now);
|
|
84
|
+
return report;
|
|
85
|
+
}
|
|
86
|
+
export function formatRepairReport(report) {
|
|
87
|
+
const lines = [];
|
|
88
|
+
lines.push("# Astrocode repair report");
|
|
89
|
+
lines.push("");
|
|
90
|
+
lines.push("## Actions");
|
|
91
|
+
for (const a of report.actions)
|
|
92
|
+
lines.push(`- ${a}`);
|
|
93
|
+
if (report.warnings.length) {
|
|
94
|
+
lines.push("", "## Warnings");
|
|
95
|
+
for (const w of report.warnings)
|
|
96
|
+
lines.push(`- ${w}`);
|
|
97
|
+
}
|
|
98
|
+
return lines.join("\n").trim();
|
|
99
|
+
}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import type { AstrocodeConfig } from "../config/schema";
|
|
2
|
+
import type { SqliteDb } from "../state/db";
|
|
3
|
+
import type { RunRow, StageKey, StageRunRow, StoryRow } from "../state/types";
|
|
4
|
+
export declare const EVENT_TYPES: {
|
|
5
|
+
readonly RUN_STARTED: "run.started";
|
|
6
|
+
readonly RUN_COMPLETED: "run.completed";
|
|
7
|
+
readonly RUN_FAILED: "run.failed";
|
|
8
|
+
readonly RUN_ABORTED: "run.aborted";
|
|
9
|
+
readonly RUN_GENESIS_PLANNING_ATTACHED: "run.genesis_planning_attached";
|
|
10
|
+
readonly STAGE_STARTED: "stage.started";
|
|
11
|
+
readonly WORKFLOW_PROCEED: "workflow.proceed";
|
|
12
|
+
};
|
|
13
|
+
export type UiEmitEvent = {
|
|
14
|
+
kind: "stage_started";
|
|
15
|
+
run_id: string;
|
|
16
|
+
stage_key: StageKey;
|
|
17
|
+
agent_name?: string;
|
|
18
|
+
} | {
|
|
19
|
+
kind: "run_completed";
|
|
20
|
+
run_id: string;
|
|
21
|
+
story_key: string;
|
|
22
|
+
} | {
|
|
23
|
+
kind: "run_failed";
|
|
24
|
+
run_id: string;
|
|
25
|
+
story_key: string;
|
|
26
|
+
stage_key: StageKey;
|
|
27
|
+
error_text: string;
|
|
28
|
+
};
|
|
29
|
+
export type UiEmit = (e: UiEmitEvent) => void;
|
|
30
|
+
/**
|
|
31
|
+
* PLANNING-FIRST REDESIGN
|
|
32
|
+
* ----------------------
|
|
33
|
+
* Never mutate story title/body.
|
|
34
|
+
*
|
|
35
|
+
* Deterministic trigger:
|
|
36
|
+
* - config.workflow.genesis_planning:
|
|
37
|
+
* - "off" => never attach directive
|
|
38
|
+
* - "first_story_only"=> only when story_key === "S-0001"
|
|
39
|
+
* - "always" => attach for every run
|
|
40
|
+
*
|
|
41
|
+
* Contract: DB is already initialized before workflow is used:
|
|
42
|
+
* - schema tables exist
|
|
43
|
+
* - repo_state singleton row (id=1) exists
|
|
44
|
+
*
|
|
45
|
+
* IMPORTANT: Do NOT call withTx() in here. The caller owns transaction boundaries.
|
|
46
|
+
*/
|
|
47
|
+
export type NextAction = {
|
|
48
|
+
kind: "idle";
|
|
49
|
+
reason: "no_approved_stories";
|
|
50
|
+
} | {
|
|
51
|
+
kind: "start_run";
|
|
52
|
+
story_key: string;
|
|
53
|
+
} | {
|
|
54
|
+
kind: "delegate_stage";
|
|
55
|
+
run_id: string;
|
|
56
|
+
stage_key: StageKey;
|
|
57
|
+
stage_run_id: string;
|
|
58
|
+
} | {
|
|
59
|
+
kind: "await_stage_completion";
|
|
60
|
+
run_id: string;
|
|
61
|
+
stage_key: StageKey;
|
|
62
|
+
stage_run_id: string;
|
|
63
|
+
} | {
|
|
64
|
+
kind: "complete_run";
|
|
65
|
+
run_id: string;
|
|
66
|
+
} | {
|
|
67
|
+
kind: "failed";
|
|
68
|
+
run_id: string;
|
|
69
|
+
stage_key: StageKey;
|
|
70
|
+
error_text: string;
|
|
71
|
+
};
|
|
72
|
+
export declare function getActiveRun(db: SqliteDb): RunRow | null;
|
|
73
|
+
export declare function getStory(db: SqliteDb, storyKey: string): StoryRow | null;
|
|
74
|
+
export declare function getStageRuns(db: SqliteDb, runId: string): StageRunRow[];
|
|
75
|
+
export declare function getCurrentStageRun(stageRuns: StageRunRow[]): StageRunRow | null;
|
|
76
|
+
export declare function decideNextAction(db: SqliteDb, config: AstrocodeConfig): NextAction;
|
|
77
|
+
export declare function createRunForStory(db: SqliteDb, config: AstrocodeConfig, storyKey: string): {
|
|
78
|
+
run_id: string;
|
|
79
|
+
};
|
|
80
|
+
export declare function startStage(db: SqliteDb, runId: string, stageKey: StageKey, meta?: {
|
|
81
|
+
subagent_type?: string;
|
|
82
|
+
subagent_session_id?: string;
|
|
83
|
+
}, emit?: UiEmit): void;
|
|
84
|
+
export declare function completeRun(db: SqliteDb, runId: string, emit?: UiEmit): void;
|
|
85
|
+
export declare function failRun(db: SqliteDb, runId: string, stageKey: StageKey, errorText: string, emit?: UiEmit): void;
|
|
86
|
+
export declare function abortRun(db: SqliteDb, runId: string, reason: string): void;
|
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
import { nowISO } from "../shared/time";
|
|
2
|
+
import { newEventId, newRunId, newStageRunId } from "../state/ids";
|
|
3
|
+
import { warn } from "../shared/log";
|
|
4
|
+
import { sha256Hex } from "../shared/hash";
|
|
5
|
+
import { SCHEMA_VERSION } from "../state/schema";
|
|
6
|
+
export const EVENT_TYPES = {
|
|
7
|
+
RUN_STARTED: "run.started",
|
|
8
|
+
RUN_COMPLETED: "run.completed",
|
|
9
|
+
RUN_FAILED: "run.failed",
|
|
10
|
+
RUN_ABORTED: "run.aborted",
|
|
11
|
+
RUN_GENESIS_PLANNING_ATTACHED: "run.genesis_planning_attached",
|
|
12
|
+
STAGE_STARTED: "stage.started",
|
|
13
|
+
WORKFLOW_PROCEED: "workflow.proceed",
|
|
14
|
+
};
|
|
15
|
+
function tableExists(db, tableName) {
|
|
16
|
+
try {
|
|
17
|
+
const row = db
|
|
18
|
+
.prepare("SELECT name FROM sqlite_master WHERE type='table' AND name=?")
|
|
19
|
+
.get(tableName);
|
|
20
|
+
return row?.name === tableName;
|
|
21
|
+
}
|
|
22
|
+
catch {
|
|
23
|
+
return false;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
export function getActiveRun(db) {
|
|
27
|
+
const row = db
|
|
28
|
+
.prepare("SELECT * FROM runs WHERE status = 'running' ORDER BY started_at DESC, created_at DESC LIMIT 1")
|
|
29
|
+
.get();
|
|
30
|
+
return row ?? null;
|
|
31
|
+
}
|
|
32
|
+
export function getStory(db, storyKey) {
|
|
33
|
+
const row = db.prepare("SELECT * FROM stories WHERE story_key = ?").get(storyKey);
|
|
34
|
+
return row ?? null;
|
|
35
|
+
}
|
|
36
|
+
export function getStageRuns(db, runId) {
|
|
37
|
+
return db.prepare("SELECT * FROM stage_runs WHERE run_id = ? ORDER BY stage_index ASC").all(runId);
|
|
38
|
+
}
|
|
39
|
+
export function getCurrentStageRun(stageRuns) {
|
|
40
|
+
const cur = stageRuns.find((s) => s.status !== "completed" && s.status !== "skipped");
|
|
41
|
+
return cur ?? null;
|
|
42
|
+
}
|
|
43
|
+
export function decideNextAction(db, config) {
|
|
44
|
+
const activeRun = getActiveRun(db);
|
|
45
|
+
if (!activeRun) {
|
|
46
|
+
const story = db
|
|
47
|
+
.prepare("SELECT * FROM stories WHERE state = 'approved' AND (locked_by_run_id IS NULL) ORDER BY priority DESC, approved_at ASC, created_at ASC LIMIT 1")
|
|
48
|
+
.get();
|
|
49
|
+
if (!story)
|
|
50
|
+
return { kind: "idle", reason: "no_approved_stories" };
|
|
51
|
+
return { kind: "start_run", story_key: story.story_key };
|
|
52
|
+
}
|
|
53
|
+
const stageRuns = getStageRuns(db, activeRun.run_id);
|
|
54
|
+
const current = getCurrentStageRun(stageRuns);
|
|
55
|
+
if (!current)
|
|
56
|
+
return { kind: "complete_run", run_id: activeRun.run_id };
|
|
57
|
+
if (current.status === "pending") {
|
|
58
|
+
return { kind: "delegate_stage", run_id: activeRun.run_id, stage_key: current.stage_key, stage_run_id: current.stage_run_id };
|
|
59
|
+
}
|
|
60
|
+
if (current.status === "running") {
|
|
61
|
+
return { kind: "await_stage_completion", run_id: activeRun.run_id, stage_key: current.stage_key, stage_run_id: current.stage_run_id };
|
|
62
|
+
}
|
|
63
|
+
if (current.status === "failed") {
|
|
64
|
+
return { kind: "failed", run_id: activeRun.run_id, stage_key: current.stage_key, error_text: current.error_text ?? "stage failed" };
|
|
65
|
+
}
|
|
66
|
+
warn("Unexpected stage status in decideNextAction", { status: current.status, stage_key: current.stage_key });
|
|
67
|
+
return { kind: "await_stage_completion", run_id: activeRun.run_id, stage_key: current.stage_key, stage_run_id: current.stage_run_id };
|
|
68
|
+
}
|
|
69
|
+
function getPipelineFromConfig(config) {
|
|
70
|
+
const pipeline = config?.workflow?.pipeline;
|
|
71
|
+
if (!Array.isArray(pipeline) || pipeline.length === 0) {
|
|
72
|
+
throw new Error("Invalid config: workflow.pipeline must be a non-empty array of stage keys.");
|
|
73
|
+
}
|
|
74
|
+
return pipeline;
|
|
75
|
+
}
|
|
76
|
+
function getGenesisPlanningMode(config) {
|
|
77
|
+
const raw = config?.workflow?.genesis_planning;
|
|
78
|
+
if (raw === "off" || raw === "first_story_only" || raw === "always")
|
|
79
|
+
return raw;
|
|
80
|
+
warn(`Invalid genesis_planning config: ${String(raw)}. Using default "first_story_only".`);
|
|
81
|
+
return "first_story_only";
|
|
82
|
+
}
|
|
83
|
+
function shouldAttachPlanningDirective(config, story) {
|
|
84
|
+
const mode = getGenesisPlanningMode(config);
|
|
85
|
+
if (mode === "off")
|
|
86
|
+
return false;
|
|
87
|
+
if (mode === "always")
|
|
88
|
+
return true;
|
|
89
|
+
return story.story_key === "S-0001";
|
|
90
|
+
}
|
|
91
|
+
function attachRunPlanningDirective(db, runId, story, pipeline) {
|
|
92
|
+
if (!tableExists(db, "injects"))
|
|
93
|
+
return;
|
|
94
|
+
const now = nowISO();
|
|
95
|
+
const injectId = `inj_${runId}_genesis_plan`;
|
|
96
|
+
const body = [
|
|
97
|
+
`# Genesis planning directive`,
|
|
98
|
+
``,
|
|
99
|
+
`This run is configured to perform a planning/decomposition pass before implementation.`,
|
|
100
|
+
`Do not edit the origin story title/body. Create additional stories instead.`,
|
|
101
|
+
``,
|
|
102
|
+
`## Required output`,
|
|
103
|
+
`- Produce 50–200 granular implementation stories with clear acceptance criteria.`,
|
|
104
|
+
`- Each story: single focused change, explicit done conditions, dependencies listed.`,
|
|
105
|
+
``,
|
|
106
|
+
`## Context`,
|
|
107
|
+
`- Origin story: ${story.story_key} — ${story.title ?? ""}`,
|
|
108
|
+
`- Pipeline: ${pipeline.join(" → ")}`,
|
|
109
|
+
``,
|
|
110
|
+
].join("\n");
|
|
111
|
+
const hash = sha256Hex(body);
|
|
112
|
+
try {
|
|
113
|
+
db.prepare(`
|
|
114
|
+
INSERT OR IGNORE INTO injects (
|
|
115
|
+
inject_id, type, title, body_md, tags_json, scope, source, priority,
|
|
116
|
+
expires_at, sha256, created_at, updated_at
|
|
117
|
+
) VALUES (
|
|
118
|
+
?, 'note', ?, ?, '["genesis","planning","decompose"]', ?, 'tool', 100,
|
|
119
|
+
NULL, ?, ?, ?
|
|
120
|
+
)
|
|
121
|
+
`).run(injectId, "Genesis planning: decompose into stories", body, `run:${runId}`, hash, now, now);
|
|
122
|
+
db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, NULL, ?, ?, ?)").run(newEventId(), runId, EVENT_TYPES.RUN_GENESIS_PLANNING_ATTACHED, JSON.stringify({ story_key: story.story_key, inject_id: injectId }), now);
|
|
123
|
+
}
|
|
124
|
+
catch (e) {
|
|
125
|
+
warn("Failed to attach genesis planning inject", { run_id: runId, story_key: story.story_key, err: e });
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
export function createRunForStory(db, config, storyKey) {
|
|
129
|
+
const story = getStory(db, storyKey);
|
|
130
|
+
if (!story)
|
|
131
|
+
throw new Error(`Story not found: ${storyKey}`);
|
|
132
|
+
if (story.state !== "approved")
|
|
133
|
+
throw new Error(`Story must be approved to run: ${storyKey} (state=${story.state})`);
|
|
134
|
+
const run_id = newRunId();
|
|
135
|
+
const now = nowISO();
|
|
136
|
+
const pipeline = getPipelineFromConfig(config);
|
|
137
|
+
db.prepare("UPDATE stories SET state='in_progress', in_progress=1, locked_by_run_id=?, locked_at=?, updated_at=? WHERE story_key=?").run(run_id, now, now, storyKey);
|
|
138
|
+
db.prepare("INSERT INTO runs (run_id, story_key, status, pipeline_stages_json, current_stage_key, created_at, started_at, updated_at) VALUES (?, ?, 'running', ?, ?, ?, ?, ?)").run(run_id, storyKey, JSON.stringify(pipeline), pipeline[0] ?? null, now, now, now);
|
|
139
|
+
const insertStage = db.prepare("INSERT INTO stage_runs (stage_run_id, run_id, stage_key, stage_index, status, created_at, updated_at) VALUES (?, ?, ?, ?, 'pending', ?, ?)");
|
|
140
|
+
pipeline.forEach((stageKey, idx) => {
|
|
141
|
+
insertStage.run(newStageRunId(), run_id, stageKey, idx, now, now);
|
|
142
|
+
});
|
|
143
|
+
db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, NULL, ?, ?, ?)").run(newEventId(), run_id, EVENT_TYPES.RUN_STARTED, JSON.stringify({ story_key: storyKey, pipeline }), now);
|
|
144
|
+
if (shouldAttachPlanningDirective(config, story)) {
|
|
145
|
+
attachRunPlanningDirective(db, run_id, story, pipeline);
|
|
146
|
+
}
|
|
147
|
+
db.prepare(`
|
|
148
|
+
INSERT INTO repo_state (id, schema_version, created_at, updated_at, last_run_id, last_story_key, last_event_at)
|
|
149
|
+
VALUES (1, ?, ?, ?, ?, ?, ?)
|
|
150
|
+
ON CONFLICT(id) DO UPDATE SET
|
|
151
|
+
last_run_id=excluded.last_run_id,
|
|
152
|
+
last_story_key=excluded.last_story_key,
|
|
153
|
+
last_event_at=excluded.last_event_at,
|
|
154
|
+
updated_at=excluded.updated_at
|
|
155
|
+
`).run(SCHEMA_VERSION, now, now, now, run_id, storyKey, now);
|
|
156
|
+
return { run_id };
|
|
157
|
+
}
|
|
158
|
+
export function startStage(db, runId, stageKey, meta, emit) {
|
|
159
|
+
const now = nowISO();
|
|
160
|
+
const run = db.prepare("SELECT * FROM runs WHERE run_id=?").get(runId);
|
|
161
|
+
if (!run)
|
|
162
|
+
throw new Error(`Run not found: ${runId}`);
|
|
163
|
+
if (run.status !== "running")
|
|
164
|
+
throw new Error(`Run is not running: ${runId} (status=${run.status})`);
|
|
165
|
+
const stage = db.prepare("SELECT * FROM stage_runs WHERE run_id=? AND stage_key=?").get(runId, stageKey);
|
|
166
|
+
if (!stage)
|
|
167
|
+
throw new Error(`Stage run not found: ${runId}/${stageKey}`);
|
|
168
|
+
if (stage.status !== "pending")
|
|
169
|
+
throw new Error(`Stage is not pending: ${stageKey} (status=${stage.status})`);
|
|
170
|
+
db.prepare("UPDATE stage_runs SET status='running', started_at=?, updated_at=?, subagent_type=COALESCE(?, subagent_type), subagent_session_id=COALESCE(?, subagent_session_id) WHERE stage_run_id=?").run(now, now, meta?.subagent_type ?? null, meta?.subagent_session_id ?? null, stage.stage_run_id);
|
|
171
|
+
db.prepare("UPDATE runs SET current_stage_key=?, updated_at=? WHERE run_id=?").run(stageKey, now, runId);
|
|
172
|
+
db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, ?, ?, ?, ?)").run(newEventId(), runId, stageKey, EVENT_TYPES.STAGE_STARTED, JSON.stringify({ subagent_type: meta?.subagent_type ?? null }), now);
|
|
173
|
+
db.prepare("UPDATE repo_state SET last_event_at=?, updated_at=? WHERE id=1").run(now, now);
|
|
174
|
+
// ✅ Explicit wiring point (requested): stage movement
|
|
175
|
+
emit?.({ kind: "stage_started", run_id: runId, stage_key: stageKey, agent_name: meta?.subagent_type });
|
|
176
|
+
}
|
|
177
|
+
export function completeRun(db, runId, emit) {
|
|
178
|
+
const now = nowISO();
|
|
179
|
+
const run = db.prepare("SELECT * FROM runs WHERE run_id=?").get(runId);
|
|
180
|
+
if (!run)
|
|
181
|
+
throw new Error(`Run not found: ${runId}`);
|
|
182
|
+
if (run.status !== "running")
|
|
183
|
+
throw new Error(`Run not running: ${runId} (status=${run.status})`);
|
|
184
|
+
const stageRuns = getStageRuns(db, runId);
|
|
185
|
+
const incomplete = stageRuns.find((s) => s.status !== "completed" && s.status !== "skipped");
|
|
186
|
+
if (incomplete)
|
|
187
|
+
throw new Error(`Cannot complete run: stage ${incomplete.stage_key} is ${incomplete.status}`);
|
|
188
|
+
db.prepare("UPDATE runs SET status='completed', completed_at=?, updated_at=?, current_stage_key=NULL WHERE run_id=?").run(now, now, runId);
|
|
189
|
+
db.prepare("UPDATE stories SET state='done', in_progress=0, locked_by_run_id=NULL, locked_at=NULL, updated_at=? WHERE story_key=?").run(now, run.story_key);
|
|
190
|
+
db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, NULL, ?, ?, ?)").run(newEventId(), runId, EVENT_TYPES.RUN_COMPLETED, JSON.stringify({ story_key: run.story_key }), now);
|
|
191
|
+
db.prepare("UPDATE repo_state SET last_event_at=?, updated_at=? WHERE id=1").run(now, now);
|
|
192
|
+
// ✅ Explicit wiring point (requested): run closed success
|
|
193
|
+
emit?.({ kind: "run_completed", run_id: runId, story_key: run.story_key });
|
|
194
|
+
}
|
|
195
|
+
export function failRun(db, runId, stageKey, errorText, emit) {
|
|
196
|
+
const now = nowISO();
|
|
197
|
+
const run = db.prepare("SELECT * FROM runs WHERE run_id=?").get(runId);
|
|
198
|
+
if (!run)
|
|
199
|
+
throw new Error(`Run not found: ${runId}`);
|
|
200
|
+
db.prepare("UPDATE runs SET status='failed', error_text=?, updated_at=?, completed_at=? WHERE run_id=?").run(errorText, now, now, runId);
|
|
201
|
+
db.prepare("UPDATE stories SET state='blocked', in_progress=0, locked_by_run_id=NULL, locked_at=NULL, updated_at=? WHERE story_key=?").run(now, run.story_key);
|
|
202
|
+
db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, ?, ?, ?, ?)").run(newEventId(), runId, stageKey, EVENT_TYPES.RUN_FAILED, JSON.stringify({ error_text: errorText }), now);
|
|
203
|
+
db.prepare("UPDATE repo_state SET last_event_at=?, updated_at=? WHERE id=1").run(now, now);
|
|
204
|
+
// ✅ Explicit wiring point (requested): run closed failure
|
|
205
|
+
emit?.({ kind: "run_failed", run_id: runId, story_key: run.story_key, stage_key: stageKey, error_text: errorText });
|
|
206
|
+
}
|
|
207
|
+
export function abortRun(db, runId, reason) {
|
|
208
|
+
const now = nowISO();
|
|
209
|
+
const run = db.prepare("SELECT * FROM runs WHERE run_id=?").get(runId);
|
|
210
|
+
if (!run)
|
|
211
|
+
throw new Error(`Run not found: ${runId}`);
|
|
212
|
+
db.prepare("UPDATE runs SET status='aborted', error_text=?, updated_at=?, completed_at=? WHERE run_id=?").run(reason, now, now, runId);
|
|
213
|
+
db.prepare("UPDATE stories SET state='approved', in_progress=0, locked_by_run_id=NULL, locked_at=NULL, updated_at=? WHERE story_key=?").run(now, run.story_key);
|
|
214
|
+
db.prepare("INSERT INTO events (event_id, run_id, stage_key, type, body_json, created_at) VALUES (?, ?, NULL, ?, ?, ?)").run(newEventId(), runId, EVENT_TYPES.RUN_ABORTED, JSON.stringify({ reason }), now);
|
|
215
|
+
db.prepare("UPDATE repo_state SET last_event_at=?, updated_at=? WHERE id=1").run(now, now);
|
|
216
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import type { SqliteDb } from "../state/db";
|
|
2
|
+
export declare function nextStoryKey(db: SqliteDb): string;
|
|
3
|
+
export declare function insertStory(db: SqliteDb, opts: {
|
|
4
|
+
title: string;
|
|
5
|
+
body_md?: string;
|
|
6
|
+
priority?: number;
|
|
7
|
+
epic_key?: string | null;
|
|
8
|
+
state?: string;
|
|
9
|
+
}): string;
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { nowISO } from "../shared/time";
|
|
2
|
+
export function nextStoryKey(db) {
|
|
3
|
+
const row = db.prepare("SELECT next_story_num FROM story_keyseq WHERE id=1").get();
|
|
4
|
+
const n = row?.next_story_num ?? 1;
|
|
5
|
+
db.prepare("UPDATE story_keyseq SET next_story_num=? WHERE id=1").run(n + 1);
|
|
6
|
+
return `S-${String(n).padStart(4, "0")}`;
|
|
7
|
+
}
|
|
8
|
+
export function insertStory(db, opts) {
|
|
9
|
+
const now = nowISO();
|
|
10
|
+
const key = nextStoryKey(db);
|
|
11
|
+
db.prepare("INSERT INTO stories (story_key, epic_key, title, body_md, state, priority, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?)").run(key, opts.epic_key ?? null, opts.title, opts.body_md ?? "", opts.state ?? "queued", opts.priority ?? 0, now, now);
|
|
12
|
+
return key;
|
|
13
|
+
}
|
package/dist/state/db.d.ts
CHANGED
|
@@ -7,6 +7,7 @@ export declare function configurePragmas(db: SqliteDb, pragmas: Record<string, a
|
|
|
7
7
|
/** BEGIN IMMEDIATE transaction helper (re-entrant). */
|
|
8
8
|
export declare function withTx<T>(db: SqliteDb, fn: () => T, opts?: {
|
|
9
9
|
require?: boolean;
|
|
10
|
+
operation?: string;
|
|
10
11
|
}): T;
|
|
11
12
|
export declare function getSchemaVersion(db: SqliteDb): number;
|
|
12
13
|
export declare function ensureSchema(db: SqliteDb, opts?: {
|
package/dist/state/db.js
CHANGED
|
@@ -5,6 +5,7 @@ import { SCHEMA_SQL, SCHEMA_VERSION } from "./schema";
|
|
|
5
5
|
import { nowISO } from "../shared/time";
|
|
6
6
|
import { info, warn } from "../shared/log";
|
|
7
7
|
import { createDatabaseAdapter } from "./adapters";
|
|
8
|
+
import { recordTransaction } from "../shared/metrics";
|
|
8
9
|
/** Ensure directory exists for a file path. */
|
|
9
10
|
function ensureParentDir(filePath) {
|
|
10
11
|
const dir = path.dirname(filePath);
|
|
@@ -79,12 +80,16 @@ export function withTx(db, fn, opts) {
|
|
|
79
80
|
return fn();
|
|
80
81
|
}
|
|
81
82
|
const depth = getDepth(db);
|
|
83
|
+
const isNested = depth > 0;
|
|
84
|
+
const txRecorder = recordTransaction({ nestedDepth: depth, operation: opts?.operation });
|
|
82
85
|
if (depth === 0) {
|
|
86
|
+
const txStart = txRecorder.start();
|
|
83
87
|
db.exec("BEGIN IMMEDIATE");
|
|
84
88
|
setDepth(db, 1);
|
|
85
89
|
try {
|
|
86
90
|
const out = fn();
|
|
87
91
|
db.exec("COMMIT");
|
|
92
|
+
txRecorder.end(txStart, true);
|
|
88
93
|
return out;
|
|
89
94
|
}
|
|
90
95
|
catch (e) {
|
|
@@ -94,6 +99,7 @@ export function withTx(db, fn, opts) {
|
|
|
94
99
|
catch {
|
|
95
100
|
// ignore
|
|
96
101
|
}
|
|
102
|
+
txRecorder.end(txStart, false);
|
|
97
103
|
throw e;
|
|
98
104
|
}
|
|
99
105
|
finally {
|
|
@@ -103,11 +109,13 @@ export function withTx(db, fn, opts) {
|
|
|
103
109
|
// Nested: use SAVEPOINT
|
|
104
110
|
const nextDepth = depth + 1;
|
|
105
111
|
const sp = savepointName(nextDepth);
|
|
112
|
+
const txStart = txRecorder.start();
|
|
106
113
|
db.exec(`SAVEPOINT ${sp}`);
|
|
107
114
|
setDepth(db, nextDepth);
|
|
108
115
|
try {
|
|
109
116
|
const out = fn();
|
|
110
117
|
db.exec(`RELEASE SAVEPOINT ${sp}`);
|
|
118
|
+
txRecorder.end(txStart, true);
|
|
111
119
|
return out;
|
|
112
120
|
}
|
|
113
121
|
catch (e) {
|
|
@@ -123,6 +131,7 @@ export function withTx(db, fn, opts) {
|
|
|
123
131
|
catch {
|
|
124
132
|
// ignore
|
|
125
133
|
}
|
|
134
|
+
txRecorder.end(txStart, false);
|
|
126
135
|
throw e;
|
|
127
136
|
}
|
|
128
137
|
finally {
|
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
// src/state/repo-lock.ts
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
export function acquireRepoLock(lockPath) {
|
|
5
|
+
fs.mkdirSync(path.dirname(lockPath), { recursive: true });
|
|
6
|
+
let fd;
|
|
7
|
+
try {
|
|
8
|
+
fd = fs.openSync(lockPath, "wx"); // exclusive create
|
|
9
|
+
}
|
|
10
|
+
catch (e) {
|
|
11
|
+
const msg = e?.code === "EEXIST"
|
|
12
|
+
? `Astrocode lock is already held (${lockPath}). Another opencode process is running in this repo.`
|
|
13
|
+
: `Failed to acquire lock (${lockPath}): ${e?.message ?? String(e)}`;
|
|
14
|
+
throw new Error(msg);
|
|
15
|
+
}
|
|
16
|
+
fs.writeFileSync(fd, `${process.pid}\n`, "utf8");
|
|
17
|
+
return {
|
|
18
|
+
release: () => {
|
|
19
|
+
try {
|
|
20
|
+
fs.closeSync(fd);
|
|
21
|
+
}
|
|
22
|
+
catch { }
|
|
23
|
+
try {
|
|
24
|
+
fs.unlinkSync(lockPath);
|
|
25
|
+
}
|
|
26
|
+
catch { }
|
|
27
|
+
},
|
|
28
|
+
};
|
|
29
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
|
2
|
+
import { openSqlite, withTx, ensureSchema } from '../../src/state/db';
|
|
3
|
+
import { metrics } from '../../src/shared/metrics';
|
|
4
|
+
import fs from 'node:fs';
|
|
5
|
+
describe('Database Transaction Integration Tests', () => {
|
|
6
|
+
const testDbPath = '.astro/test.db';
|
|
7
|
+
let db;
|
|
8
|
+
beforeEach(() => {
|
|
9
|
+
// Clean up any existing test DB
|
|
10
|
+
if (fs.existsSync(testDbPath)) {
|
|
11
|
+
fs.unlinkSync(testDbPath);
|
|
12
|
+
}
|
|
13
|
+
db = openSqlite(testDbPath);
|
|
14
|
+
ensureSchema(db);
|
|
15
|
+
metrics.clear(); // Reset metrics before each test
|
|
16
|
+
});
|
|
17
|
+
afterEach(() => {
|
|
18
|
+
if (db) {
|
|
19
|
+
db.close();
|
|
20
|
+
}
|
|
21
|
+
if (fs.existsSync(testDbPath)) {
|
|
22
|
+
fs.unlinkSync(testDbPath);
|
|
23
|
+
}
|
|
24
|
+
metrics.clear();
|
|
25
|
+
});
|
|
26
|
+
it('should handle nested transactions correctly', () => {
|
|
27
|
+
let outerExecuted = false;
|
|
28
|
+
let innerExecuted = false;
|
|
29
|
+
const result = withTx(db, () => {
|
|
30
|
+
outerExecuted = true;
|
|
31
|
+
// Create a table to test nested operations
|
|
32
|
+
db.exec('CREATE TABLE test_table (id INTEGER PRIMARY KEY, value TEXT)');
|
|
33
|
+
const innerResult = withTx(db, () => {
|
|
34
|
+
innerExecuted = true;
|
|
35
|
+
db.prepare('INSERT INTO test_table (value) VALUES (?)').run('nested_value');
|
|
36
|
+
return 'inner_success';
|
|
37
|
+
});
|
|
38
|
+
db.prepare('INSERT INTO test_table (value) VALUES (?)').run('outer_value');
|
|
39
|
+
return `outer_success_${innerResult}`;
|
|
40
|
+
});
|
|
41
|
+
expect(outerExecuted).toBe(true);
|
|
42
|
+
expect(innerExecuted).toBe(true);
|
|
43
|
+
expect(result).toBe('outer_success_inner_success');
|
|
44
|
+
// Verify both inserts succeeded
|
|
45
|
+
const rows = db.prepare('SELECT value FROM test_table ORDER BY id').all();
|
|
46
|
+
expect(rows).toHaveLength(2);
|
|
47
|
+
expect(rows[0].value).toBe('nested_value');
|
|
48
|
+
expect(rows[1].value).toBe('outer_value');
|
|
49
|
+
});
|
|
50
|
+
it('should collect transaction metrics', () => {
|
|
51
|
+
withTx(db, () => {
|
|
52
|
+
db.exec('CREATE TABLE metrics_test (id INTEGER PRIMARY KEY)');
|
|
53
|
+
db.prepare('INSERT INTO metrics_test (id) VALUES (?)').run(1);
|
|
54
|
+
}, { operation: 'test_operation' });
|
|
55
|
+
const txStats = metrics.getTransactionStats();
|
|
56
|
+
expect(txStats).toBeTruthy();
|
|
57
|
+
expect(txStats.total).toBe(1);
|
|
58
|
+
expect(txStats.successful).toBe(1);
|
|
59
|
+
expect(txStats.failed).toBe(0);
|
|
60
|
+
expect(txStats.successRate).toBe(1);
|
|
61
|
+
expect(txStats.avgDuration).toBeGreaterThan(0);
|
|
62
|
+
});
|
|
63
|
+
it('should handle transaction failures correctly', () => {
|
|
64
|
+
let outerExecuted = false;
|
|
65
|
+
expect(() => {
|
|
66
|
+
withTx(db, () => {
|
|
67
|
+
outerExecuted = true;
|
|
68
|
+
db.exec('CREATE TABLE failure_test (id INTEGER PRIMARY KEY)');
|
|
69
|
+
// This should cause a failure
|
|
70
|
+
withTx(db, () => {
|
|
71
|
+
throw new Error('Intentional failure');
|
|
72
|
+
});
|
|
73
|
+
});
|
|
74
|
+
}).toThrow('Intentional failure');
|
|
75
|
+
expect(outerExecuted).toBe(true);
|
|
76
|
+
// Verify transaction was rolled back - table should not exist
|
|
77
|
+
const tableExists = db.prepare(`
|
|
78
|
+
SELECT name FROM sqlite_master
|
|
79
|
+
WHERE type='table' AND name='failure_test'
|
|
80
|
+
`).get();
|
|
81
|
+
expect(tableExists).toBeUndefined();
|
|
82
|
+
});
|
|
83
|
+
it('should handle SAVEPOINT nesting correctly', () => {
|
|
84
|
+
let level1Executed = false;
|
|
85
|
+
let level2Executed = false;
|
|
86
|
+
let level3Executed = false;
|
|
87
|
+
withTx(db, () => {
|
|
88
|
+
level1Executed = true;
|
|
89
|
+
db.exec('CREATE TABLE nesting_test (level INTEGER, value TEXT)');
|
|
90
|
+
withTx(db, () => {
|
|
91
|
+
level2Executed = true;
|
|
92
|
+
db.prepare('INSERT INTO nesting_test (level, value) VALUES (?, ?)').run(2, 'level2');
|
|
93
|
+
withTx(db, () => {
|
|
94
|
+
level3Executed = true;
|
|
95
|
+
db.prepare('INSERT INTO nesting_test (level, value) VALUES (?, ?)').run(3, 'level3');
|
|
96
|
+
});
|
|
97
|
+
db.prepare('INSERT INTO nesting_test (level, value) VALUES (?, ?)').run(2, 'level2_after');
|
|
98
|
+
});
|
|
99
|
+
db.prepare('INSERT INTO nesting_test (level, value) VALUES (?, ?)').run(1, 'level1');
|
|
100
|
+
});
|
|
101
|
+
expect(level1Executed).toBe(true);
|
|
102
|
+
expect(level2Executed).toBe(true);
|
|
103
|
+
expect(level3Executed).toBe(true);
|
|
104
|
+
const rows = db.prepare('SELECT level, value FROM nesting_test ORDER BY rowid').all();
|
|
105
|
+
expect(rows).toHaveLength(4);
|
|
106
|
+
expect(rows.map(r => r.value)).toEqual(['level2', 'level3', 'level2_after', 'level1']);
|
|
107
|
+
});
|
|
108
|
+
it('should maintain metrics across multiple nested transactions', () => {
|
|
109
|
+
// Execute multiple nested transactions
|
|
110
|
+
for (let i = 0; i < 3; i++) {
|
|
111
|
+
withTx(db, () => {
|
|
112
|
+
withTx(db, () => {
|
|
113
|
+
withTx(db, () => {
|
|
114
|
+
// Do some work
|
|
115
|
+
db.prepare('SELECT 1').get();
|
|
116
|
+
});
|
|
117
|
+
});
|
|
118
|
+
}, { operation: `test_op_${i}` });
|
|
119
|
+
}
|
|
120
|
+
const txStats = metrics.getTransactionStats();
|
|
121
|
+
expect(txStats).toBeTruthy();
|
|
122
|
+
expect(txStats.total).toBe(9); // 3 outer + 6 inner (2 per outer) + 0 nested (nested count as separate)
|
|
123
|
+
expect(txStats.successful).toBe(9);
|
|
124
|
+
expect(txStats.avgNestedDepth).toBeGreaterThan(0);
|
|
125
|
+
});
|
|
126
|
+
});
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|