astrocode-workflow 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -0
- package/README.md +85 -0
- package/dist/agents/commands.d.ts +9 -0
- package/dist/agents/commands.js +121 -0
- package/dist/agents/prompts.d.ts +2 -0
- package/dist/agents/prompts.js +27 -0
- package/dist/agents/registry.d.ts +6 -0
- package/dist/agents/registry.js +223 -0
- package/dist/agents/types.d.ts +14 -0
- package/dist/agents/types.js +8 -0
- package/dist/config/config-handler.d.ts +4 -0
- package/dist/config/config-handler.js +46 -0
- package/dist/config/defaults.d.ts +3 -0
- package/dist/config/defaults.js +3 -0
- package/dist/config/loader.d.ts +11 -0
- package/dist/config/loader.js +48 -0
- package/dist/config/schema.d.ts +176 -0
- package/dist/config/schema.js +198 -0
- package/dist/hooks/continuation-enforcer.d.ts +26 -0
- package/dist/hooks/continuation-enforcer.js +166 -0
- package/dist/hooks/tool-output-truncator.d.ts +17 -0
- package/dist/hooks/tool-output-truncator.js +56 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +108 -0
- package/dist/shared/deep-merge.d.ts +8 -0
- package/dist/shared/deep-merge.js +25 -0
- package/dist/shared/hash.d.ts +1 -0
- package/dist/shared/hash.js +4 -0
- package/dist/shared/log.d.ts +7 -0
- package/dist/shared/log.js +24 -0
- package/dist/shared/model-tuning.d.ts +9 -0
- package/dist/shared/model-tuning.js +28 -0
- package/dist/shared/paths.d.ts +19 -0
- package/dist/shared/paths.js +51 -0
- package/dist/shared/text.d.ts +4 -0
- package/dist/shared/text.js +19 -0
- package/dist/shared/time.d.ts +1 -0
- package/dist/shared/time.js +3 -0
- package/dist/state/adapters/index.d.ts +39 -0
- package/dist/state/adapters/index.js +119 -0
- package/dist/state/db.d.ts +17 -0
- package/dist/state/db.js +83 -0
- package/dist/state/ids.d.ts +8 -0
- package/dist/state/ids.js +25 -0
- package/dist/state/schema.d.ts +2 -0
- package/dist/state/schema.js +247 -0
- package/dist/state/types.d.ts +70 -0
- package/dist/state/types.js +1 -0
- package/dist/tools/artifacts.d.ts +18 -0
- package/dist/tools/artifacts.js +71 -0
- package/dist/tools/index.d.ts +8 -0
- package/dist/tools/index.js +100 -0
- package/dist/tools/init.d.ts +8 -0
- package/dist/tools/init.js +41 -0
- package/dist/tools/injects.d.ts +23 -0
- package/dist/tools/injects.js +99 -0
- package/dist/tools/repair.d.ts +8 -0
- package/dist/tools/repair.js +25 -0
- package/dist/tools/run.d.ts +13 -0
- package/dist/tools/run.js +54 -0
- package/dist/tools/spec.d.ts +13 -0
- package/dist/tools/spec.js +41 -0
- package/dist/tools/stage.d.ts +23 -0
- package/dist/tools/stage.js +284 -0
- package/dist/tools/status.d.ts +8 -0
- package/dist/tools/status.js +107 -0
- package/dist/tools/story.d.ts +23 -0
- package/dist/tools/story.js +85 -0
- package/dist/tools/workflow.d.ts +8 -0
- package/dist/tools/workflow.js +197 -0
- package/dist/ui/inject.d.ts +5 -0
- package/dist/ui/inject.js +9 -0
- package/dist/ui/toasts.d.ts +13 -0
- package/dist/ui/toasts.js +39 -0
- package/dist/workflow/artifacts.d.ts +24 -0
- package/dist/workflow/artifacts.js +45 -0
- package/dist/workflow/baton.d.ts +66 -0
- package/dist/workflow/baton.js +101 -0
- package/dist/workflow/context.d.ts +12 -0
- package/dist/workflow/context.js +67 -0
- package/dist/workflow/directives.d.ts +37 -0
- package/dist/workflow/directives.js +111 -0
- package/dist/workflow/repair.d.ts +8 -0
- package/dist/workflow/repair.js +99 -0
- package/dist/workflow/state-machine.d.ts +43 -0
- package/dist/workflow/state-machine.js +127 -0
- package/dist/workflow/story-helpers.d.ts +9 -0
- package/dist/workflow/story-helpers.js +13 -0
- package/package.json +32 -0
- package/src/agents/commands.ts +137 -0
- package/src/agents/prompts.ts +28 -0
- package/src/agents/registry.ts +310 -0
- package/src/agents/types.ts +31 -0
- package/src/config/config-handler.ts +48 -0
- package/src/config/defaults.ts +4 -0
- package/src/config/loader.ts +55 -0
- package/src/config/schema.ts +236 -0
- package/src/hooks/continuation-enforcer.ts +217 -0
- package/src/hooks/tool-output-truncator.ts +82 -0
- package/src/index.ts +131 -0
- package/src/shared/deep-merge.ts +28 -0
- package/src/shared/hash.ts +5 -0
- package/src/shared/log.ts +30 -0
- package/src/shared/model-tuning.ts +48 -0
- package/src/shared/paths.ts +70 -0
- package/src/shared/text.ts +20 -0
- package/src/shared/time.ts +3 -0
- package/src/shims.node.d.ts +20 -0
- package/src/state/adapters/index.ts +155 -0
- package/src/state/db.ts +105 -0
- package/src/state/ids.ts +33 -0
- package/src/state/schema.ts +249 -0
- package/src/state/types.ts +76 -0
- package/src/tools/artifacts.ts +83 -0
- package/src/tools/index.ts +111 -0
- package/src/tools/init.ts +50 -0
- package/src/tools/injects.ts +108 -0
- package/src/tools/repair.ts +31 -0
- package/src/tools/run.ts +62 -0
- package/src/tools/spec.ts +50 -0
- package/src/tools/stage.ts +361 -0
- package/src/tools/status.ts +119 -0
- package/src/tools/story.ts +106 -0
- package/src/tools/workflow.ts +241 -0
- package/src/ui/inject.ts +13 -0
- package/src/ui/toasts.ts +48 -0
- package/src/workflow/artifacts.ts +69 -0
- package/src/workflow/baton.ts +141 -0
- package/src/workflow/context.ts +86 -0
- package/src/workflow/directives.ts +170 -0
- package/src/workflow/repair.ts +138 -0
- package/src/workflow/state-machine.ts +194 -0
- package/src/workflow/story-helpers.ts +18 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
import { loadAstrocodeConfig } from "./config/loader";
|
|
2
|
+
import { createConfigHandler } from "./config/config-handler";
|
|
3
|
+
import { openSqlite, configurePragmas, ensureSchema } from "./state/db";
|
|
4
|
+
import { getAstroPaths, ensureAstroDirs } from "./shared/paths";
|
|
5
|
+
import { createAstroTools } from "./tools";
|
|
6
|
+
import { createContinuationEnforcer } from "./hooks/continuation-enforcer";
|
|
7
|
+
import { createToolOutputTruncatorHook } from "./hooks/tool-output-truncator";
|
|
8
|
+
import { createToastManager } from "./ui/toasts";
|
|
9
|
+
console.log("Astrocode plugin loading...");
|
|
10
|
+
const Astrocode = async (ctx) => {
|
|
11
|
+
const repoRoot = ctx.directory;
|
|
12
|
+
// Always load config first - this provides defaults even in limited mode
|
|
13
|
+
let pluginConfig = loadAstrocodeConfig(repoRoot);
|
|
14
|
+
// Always ensure .astro directories exist, even in limited mode
|
|
15
|
+
const paths = getAstroPaths(repoRoot, pluginConfig.db.path);
|
|
16
|
+
ensureAstroDirs(paths);
|
|
17
|
+
let db = null;
|
|
18
|
+
let tools = null;
|
|
19
|
+
let configHandler = null;
|
|
20
|
+
let continuation = null;
|
|
21
|
+
let truncatorHook = null;
|
|
22
|
+
let toasts = null;
|
|
23
|
+
try {
|
|
24
|
+
db = openSqlite(paths.dbPath, { busyTimeoutMs: pluginConfig.db.busy_timeout_ms });
|
|
25
|
+
configurePragmas(db, pluginConfig.db.pragmas);
|
|
26
|
+
ensureSchema(db, { allowAutoMigrate: pluginConfig.db.allow_auto_migrate, failOnDowngrade: pluginConfig.db.fail_on_downgrade });
|
|
27
|
+
// Database initialized successfully
|
|
28
|
+
configHandler = createConfigHandler({ pluginConfig });
|
|
29
|
+
tools = createAstroTools({ ctx, config: pluginConfig, db });
|
|
30
|
+
continuation = createContinuationEnforcer({ ctx, config: pluginConfig, db });
|
|
31
|
+
truncatorHook = createToolOutputTruncatorHook({ ctx, config: pluginConfig, db });
|
|
32
|
+
toasts = createToastManager({ ctx, throttleMs: pluginConfig.ui.toasts.throttle_ms });
|
|
33
|
+
}
|
|
34
|
+
catch (e) {
|
|
35
|
+
// Database initialization failed - setup limited mode
|
|
36
|
+
// Reload config to ensure all defaults are present
|
|
37
|
+
pluginConfig = loadAstrocodeConfig(repoRoot);
|
|
38
|
+
// Modify config for limited mode
|
|
39
|
+
pluginConfig.disabled_hooks = [...(pluginConfig.disabled_hooks || []), "continuation-enforcer", "tool-output-truncator"];
|
|
40
|
+
pluginConfig.ui.toasts.enabled = false;
|
|
41
|
+
// Create limited functionality
|
|
42
|
+
db = null;
|
|
43
|
+
configHandler = createConfigHandler({ pluginConfig });
|
|
44
|
+
tools = createAstroTools({ ctx, config: pluginConfig, db });
|
|
45
|
+
continuation = null;
|
|
46
|
+
truncatorHook = null;
|
|
47
|
+
toasts = null;
|
|
48
|
+
}
|
|
49
|
+
return {
|
|
50
|
+
name: "Astrocode",
|
|
51
|
+
// Merge agents + slash commands into system config
|
|
52
|
+
config: configHandler,
|
|
53
|
+
// Register tools
|
|
54
|
+
tool: tools,
|
|
55
|
+
// Limit created subagents from spawning more subagents (OMO-style).
|
|
56
|
+
"tool.execute.before": async (input, output) => {
|
|
57
|
+
if (!pluginConfig.permissions.enforce_task_tool_restrictions)
|
|
58
|
+
return;
|
|
59
|
+
if (input.tool !== "task")
|
|
60
|
+
return;
|
|
61
|
+
output.args = output.args ?? {};
|
|
62
|
+
const toolsMap = { ...(output.args.tools ?? {}) };
|
|
63
|
+
if (pluginConfig.permissions.deny_delegate_task_in_subagents) {
|
|
64
|
+
toolsMap.delegate_task = false;
|
|
65
|
+
}
|
|
66
|
+
output.args.tools = toolsMap;
|
|
67
|
+
},
|
|
68
|
+
"tool.execute.after": async (input, output) => {
|
|
69
|
+
// Truncate huge tool outputs to artifacts
|
|
70
|
+
if (truncatorHook && !pluginConfig.disabled_hooks.includes("tool-output-truncator")) {
|
|
71
|
+
await truncatorHook(input, output);
|
|
72
|
+
}
|
|
73
|
+
// Schedule continuation (do not immediately spam)
|
|
74
|
+
if (continuation && !pluginConfig.disabled_hooks.includes("continuation-enforcer")) {
|
|
75
|
+
await continuation.onToolAfter(input);
|
|
76
|
+
}
|
|
77
|
+
},
|
|
78
|
+
"chat.message": async (input, output) => {
|
|
79
|
+
if (continuation && !pluginConfig.disabled_hooks.includes("continuation-enforcer")) {
|
|
80
|
+
await continuation.onChatMessage(input);
|
|
81
|
+
}
|
|
82
|
+
return output;
|
|
83
|
+
},
|
|
84
|
+
event: async (input) => {
|
|
85
|
+
if (continuation && !pluginConfig.disabled_hooks.includes("continuation-enforcer")) {
|
|
86
|
+
await continuation.onEvent(input);
|
|
87
|
+
}
|
|
88
|
+
},
|
|
89
|
+
// Best-effort cleanup
|
|
90
|
+
close: async () => {
|
|
91
|
+
try {
|
|
92
|
+
db.close();
|
|
93
|
+
}
|
|
94
|
+
catch {
|
|
95
|
+
// ignore
|
|
96
|
+
}
|
|
97
|
+
if (toasts && pluginConfig.ui.toasts.enabled) {
|
|
98
|
+
try {
|
|
99
|
+
await toasts.show({ title: "Astrocode", message: "Plugin closed", variant: "info" });
|
|
100
|
+
}
|
|
101
|
+
catch {
|
|
102
|
+
// ignore
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
},
|
|
106
|
+
};
|
|
107
|
+
};
|
|
108
|
+
export default Astrocode;
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
export function isPlainObject(v) {
|
|
2
|
+
return typeof v === "object" && v !== null && !Array.isArray(v);
|
|
3
|
+
}
|
|
4
|
+
/**
|
|
5
|
+
* Deep merge:
|
|
6
|
+
* - objects merge recursively
|
|
7
|
+
* - arrays replace
|
|
8
|
+
* - primitives overwrite
|
|
9
|
+
*/
|
|
10
|
+
export function deepMerge(base, patch) {
|
|
11
|
+
if (!isPlainObject(base) || !isPlainObject(patch)) {
|
|
12
|
+
return patch ?? base;
|
|
13
|
+
}
|
|
14
|
+
const out = { ...base };
|
|
15
|
+
for (const [k, v] of Object.entries(patch)) {
|
|
16
|
+
const cur = out[k];
|
|
17
|
+
if (isPlainObject(cur) && isPlainObject(v)) {
|
|
18
|
+
out[k] = deepMerge(cur, v);
|
|
19
|
+
}
|
|
20
|
+
else {
|
|
21
|
+
out[k] = v;
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
return out;
|
|
25
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function sha256Hex(input: string | Buffer): string;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
export type LogLevel = "debug" | "info" | "warn" | "error";
|
|
2
|
+
export declare function setLogLevel(level: LogLevel): void;
|
|
3
|
+
export declare function log(level: LogLevel, message: string, meta?: unknown): void;
|
|
4
|
+
export declare const debug: (msg: string, meta?: unknown) => void;
|
|
5
|
+
export declare const info: (msg: string, meta?: unknown) => void;
|
|
6
|
+
export declare const warn: (msg: string, meta?: unknown) => void;
|
|
7
|
+
export declare const error: (msg: string, meta?: unknown) => void;
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
let CURRENT_LEVEL = process.env.ASTRO_LOG_LEVEL ?? "info";
|
|
2
|
+
const ORDER = { debug: 10, info: 20, warn: 30, error: 40 };
|
|
3
|
+
export function setLogLevel(level) {
|
|
4
|
+
CURRENT_LEVEL = level;
|
|
5
|
+
}
|
|
6
|
+
function shouldLog(level) {
|
|
7
|
+
return ORDER[level] >= ORDER[CURRENT_LEVEL];
|
|
8
|
+
}
|
|
9
|
+
export function log(level, message, meta) {
|
|
10
|
+
if (!shouldLog(level))
|
|
11
|
+
return;
|
|
12
|
+
const prefix = `[astrocode:${level}]`;
|
|
13
|
+
if (meta === undefined) {
|
|
14
|
+
// eslint-disable-next-line no-console
|
|
15
|
+
console.log(prefix, message);
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
// eslint-disable-next-line no-console
|
|
19
|
+
console.log(prefix, message, meta);
|
|
20
|
+
}
|
|
21
|
+
export const debug = (msg, meta) => log("debug", msg, meta);
|
|
22
|
+
export const info = (msg, meta) => log("info", msg, meta);
|
|
23
|
+
export const warn = (msg, meta) => log("warn", msg, meta);
|
|
24
|
+
export const error = (msg, meta) => log("error", msg, meta);
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
import type { AgentConfig } from "@opencode-ai/sdk";
|
|
2
|
+
export type AstroCognitionPreset = "orchestrator" | "frame" | "plan" | "spec" | "implement" | "review" | "verify" | "close" | "utility";
|
|
3
|
+
/**
|
|
4
|
+
* OMO-style: if GPT model => reasoningEffort/textVerbosity.
|
|
5
|
+
* Otherwise => Anthropic-style thinking budget.
|
|
6
|
+
*
|
|
7
|
+
* Not all providers honor these fields; safe to include.
|
|
8
|
+
*/
|
|
9
|
+
export declare function applyModelTuning(base: AgentConfig, preset: AstroCognitionPreset): AgentConfig;
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { isGptModel } from "../agents/types";
|
|
2
|
+
/**
|
|
3
|
+
* OMO-style: if GPT model => reasoningEffort/textVerbosity.
|
|
4
|
+
* Otherwise => Anthropic-style thinking budget.
|
|
5
|
+
*
|
|
6
|
+
* Not all providers honor these fields; safe to include.
|
|
7
|
+
*/
|
|
8
|
+
export function applyModelTuning(base, preset) {
|
|
9
|
+
const model = base.model ?? "";
|
|
10
|
+
if (!model)
|
|
11
|
+
return base;
|
|
12
|
+
if (isGptModel(model)) {
|
|
13
|
+
const reasoningEffort = preset === "orchestrator" ? "medium"
|
|
14
|
+
: preset === "implement" ? "high"
|
|
15
|
+
: "medium";
|
|
16
|
+
const textVerbosity = preset === "orchestrator" ? "low"
|
|
17
|
+
: preset === "review" ? "high"
|
|
18
|
+
: "medium";
|
|
19
|
+
const { thinking, ...rest } = base;
|
|
20
|
+
return { ...rest, reasoningEffort, textVerbosity };
|
|
21
|
+
}
|
|
22
|
+
const budgetTokens = preset === "implement" ? 32_000
|
|
23
|
+
: preset === "review" ? 24_000
|
|
24
|
+
: preset === "orchestrator" ? 16_000
|
|
25
|
+
: 12_000;
|
|
26
|
+
const { reasoningEffort, textVerbosity, ...rest } = base;
|
|
27
|
+
return { ...rest, thinking: { type: "enabled", budgetTokens } };
|
|
28
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
/** Normalize to posix-like separators for DB paths. */
|
|
2
|
+
export declare function toPosix(p: string): string;
|
|
3
|
+
export declare function ensureDir(p: string): void;
|
|
4
|
+
export declare function joinRepo(root: string, ...parts: string[]): string;
|
|
5
|
+
export type AstroPaths = {
|
|
6
|
+
repoRoot: string;
|
|
7
|
+
astroRoot: string;
|
|
8
|
+
dbPath: string;
|
|
9
|
+
runsDir: string;
|
|
10
|
+
specPath: string;
|
|
11
|
+
toolOutputDir: string;
|
|
12
|
+
configPathPreferred: string;
|
|
13
|
+
configPathFallback: string;
|
|
14
|
+
};
|
|
15
|
+
export declare function getAstroPaths(repoRoot: string, dbPathOverride?: string): AstroPaths;
|
|
16
|
+
export declare function ensureAstroDirs(paths: AstroPaths): void;
|
|
17
|
+
export declare function runDir(paths: AstroPaths, runId: string): string;
|
|
18
|
+
export declare function stageDir(paths: AstroPaths, runId: string, stageKey: string): string;
|
|
19
|
+
export declare function assertInsideAstro(repoRoot: string, filePath: string): void;
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import path from "node:path";
|
|
2
|
+
import fs from "node:fs";
|
|
3
|
+
/** Normalize to posix-like separators for DB paths. */
|
|
4
|
+
export function toPosix(p) {
|
|
5
|
+
return p.split(path.sep).join("/");
|
|
6
|
+
}
|
|
7
|
+
export function ensureDir(p) {
|
|
8
|
+
fs.mkdirSync(p, { recursive: true });
|
|
9
|
+
}
|
|
10
|
+
export function joinRepo(root, ...parts) {
|
|
11
|
+
return path.join(root, ...parts);
|
|
12
|
+
}
|
|
13
|
+
export function getAstroPaths(repoRoot, dbPathOverride) {
|
|
14
|
+
const astroRoot = joinRepo(repoRoot, ".astro");
|
|
15
|
+
const runsDir = joinRepo(repoRoot, ".astro", "runs");
|
|
16
|
+
const dbPath = dbPathOverride ? joinRepo(repoRoot, dbPathOverride) : joinRepo(repoRoot, ".astro", "astro.db");
|
|
17
|
+
const specPath = joinRepo(repoRoot, ".astro", "spec.md");
|
|
18
|
+
const toolOutputDir = joinRepo(repoRoot, ".astro", "tool_output");
|
|
19
|
+
return {
|
|
20
|
+
repoRoot,
|
|
21
|
+
astroRoot,
|
|
22
|
+
dbPath,
|
|
23
|
+
runsDir,
|
|
24
|
+
specPath,
|
|
25
|
+
toolOutputDir,
|
|
26
|
+
configPathPreferred: joinRepo(repoRoot, ".astro", "astrocode.config.jsonc"),
|
|
27
|
+
configPathFallback: joinRepo(repoRoot, "astrocode.config.jsonc"),
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
export function ensureAstroDirs(paths) {
|
|
31
|
+
ensureDir(paths.astroRoot);
|
|
32
|
+
ensureDir(paths.runsDir);
|
|
33
|
+
ensureDir(paths.toolOutputDir);
|
|
34
|
+
}
|
|
35
|
+
export function runDir(paths, runId) {
|
|
36
|
+
return joinRepo(paths.repoRoot, ".astro", "runs", runId);
|
|
37
|
+
}
|
|
38
|
+
export function stageDir(paths, runId, stageKey) {
|
|
39
|
+
return joinRepo(paths.repoRoot, ".astro", "runs", runId, stageKey);
|
|
40
|
+
}
|
|
41
|
+
export function assertInsideAstro(repoRoot, filePath) {
|
|
42
|
+
const absRepo = path.resolve(repoRoot);
|
|
43
|
+
const abs = path.resolve(filePath);
|
|
44
|
+
const astroRoot = path.resolve(path.join(repoRoot, ".astro"));
|
|
45
|
+
if (!abs.startsWith(astroRoot + path.sep) && abs !== astroRoot) {
|
|
46
|
+
throw new Error(`Refusing to write outside .astro: ${filePath}`);
|
|
47
|
+
}
|
|
48
|
+
if (!abs.startsWith(absRepo + path.sep) && abs !== absRepo) {
|
|
49
|
+
throw new Error(`Refusing to write outside repo root: ${filePath}`);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export declare function normalizeNewlines(s: string): string;
|
|
2
|
+
export declare function clampLines(md: string, maxLines: number): string;
|
|
3
|
+
export declare function clampChars(s: string, maxChars: number): string;
|
|
4
|
+
export declare function stripCodeFences(md: string): string;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
export function normalizeNewlines(s) {
|
|
2
|
+
return s.replace(/\r\n/g, "\n").replace(/\r/g, "\n");
|
|
3
|
+
}
|
|
4
|
+
export function clampLines(md, maxLines) {
|
|
5
|
+
const lines = normalizeNewlines(md).split("\n");
|
|
6
|
+
if (lines.length <= maxLines)
|
|
7
|
+
return md.trimEnd();
|
|
8
|
+
return lines.slice(0, maxLines).join("\n").trimEnd() + "\n…";
|
|
9
|
+
}
|
|
10
|
+
export function clampChars(s, maxChars) {
|
|
11
|
+
if (s.length <= maxChars)
|
|
12
|
+
return s;
|
|
13
|
+
return s.slice(0, maxChars) + "\n…(truncated)";
|
|
14
|
+
}
|
|
15
|
+
export function stripCodeFences(md) {
|
|
16
|
+
// Light helper: remove surrounding triple backticks if present
|
|
17
|
+
const m = md.match(/^```[a-zA-Z0-9_-]*\n([\s\S]*)\n```\s*$/);
|
|
18
|
+
return m ? m[1] : md;
|
|
19
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function nowISO(): string;
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
export interface DatabaseConnection {
|
|
2
|
+
pragma(sql: string): void;
|
|
3
|
+
exec(sql: string): void;
|
|
4
|
+
prepare(sql: string): Statement;
|
|
5
|
+
close(): void;
|
|
6
|
+
}
|
|
7
|
+
export interface Statement {
|
|
8
|
+
run(...params: any[]): {
|
|
9
|
+
changes: number;
|
|
10
|
+
lastInsertRowid: any;
|
|
11
|
+
};
|
|
12
|
+
get(...params: any[]): any;
|
|
13
|
+
all(...params: any[]): any[];
|
|
14
|
+
}
|
|
15
|
+
export interface DatabaseAdapter {
|
|
16
|
+
isAvailable(): boolean;
|
|
17
|
+
open(path: string, opts?: {
|
|
18
|
+
busyTimeoutMs?: number;
|
|
19
|
+
}): DatabaseConnection;
|
|
20
|
+
}
|
|
21
|
+
export declare class MockDatabaseAdapter implements DatabaseAdapter {
|
|
22
|
+
isAvailable(): boolean;
|
|
23
|
+
open(): DatabaseConnection;
|
|
24
|
+
}
|
|
25
|
+
export declare class BunSqliteAdapter implements DatabaseAdapter {
|
|
26
|
+
isAvailable(): boolean;
|
|
27
|
+
open(path: string, opts?: {
|
|
28
|
+
busyTimeoutMs?: number;
|
|
29
|
+
}): DatabaseConnection;
|
|
30
|
+
}
|
|
31
|
+
export declare class BetterSqliteAdapter implements DatabaseAdapter {
|
|
32
|
+
private Database;
|
|
33
|
+
constructor();
|
|
34
|
+
isAvailable(): boolean;
|
|
35
|
+
open(path: string, opts?: {
|
|
36
|
+
busyTimeoutMs?: number;
|
|
37
|
+
}): DatabaseConnection;
|
|
38
|
+
}
|
|
39
|
+
export declare function createDatabaseAdapter(): DatabaseAdapter;
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
import { warn } from "../../shared/log";
|
|
2
|
+
// Mock adapter for when no database is available
|
|
3
|
+
export class MockDatabaseAdapter {
|
|
4
|
+
isAvailable() {
|
|
5
|
+
return true; // Mock is always available
|
|
6
|
+
}
|
|
7
|
+
open() {
|
|
8
|
+
warn("Using mock database - no persistence available");
|
|
9
|
+
return {
|
|
10
|
+
pragma: () => { },
|
|
11
|
+
exec: () => { },
|
|
12
|
+
prepare: () => ({
|
|
13
|
+
run: () => ({ changes: 0, lastInsertRowid: null }),
|
|
14
|
+
get: () => null,
|
|
15
|
+
all: () => []
|
|
16
|
+
}),
|
|
17
|
+
close: () => { }
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
// Bun SQLite adapter - singleton pattern to avoid multiple initializations
|
|
22
|
+
let bunDatabase = null;
|
|
23
|
+
let bunDatabaseInitialized = false;
|
|
24
|
+
function initializeBunDatabase() {
|
|
25
|
+
if (bunDatabaseInitialized) {
|
|
26
|
+
return bunDatabase;
|
|
27
|
+
}
|
|
28
|
+
bunDatabaseInitialized = true;
|
|
29
|
+
try {
|
|
30
|
+
const isBun = typeof globalThis.Bun !== 'undefined';
|
|
31
|
+
if (isBun) {
|
|
32
|
+
const { Database } = require('bun:sqlite');
|
|
33
|
+
bunDatabase = Database;
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
catch (e) {
|
|
37
|
+
// bun:sqlite not available
|
|
38
|
+
}
|
|
39
|
+
return bunDatabase;
|
|
40
|
+
}
|
|
41
|
+
// Bun SQLite adapter
|
|
42
|
+
export class BunSqliteAdapter {
|
|
43
|
+
isAvailable() {
|
|
44
|
+
return !!initializeBunDatabase();
|
|
45
|
+
}
|
|
46
|
+
open(path, opts) {
|
|
47
|
+
const Database = initializeBunDatabase();
|
|
48
|
+
if (!Database) {
|
|
49
|
+
throw new Error("bun:sqlite not available");
|
|
50
|
+
}
|
|
51
|
+
const db = new Database(path);
|
|
52
|
+
// Configure database
|
|
53
|
+
if (opts?.busyTimeoutMs) {
|
|
54
|
+
db.exec(`PRAGMA busy_timeout = ${opts.busyTimeoutMs}`);
|
|
55
|
+
}
|
|
56
|
+
return {
|
|
57
|
+
pragma: (sql) => db.exec(`PRAGMA ${sql}`),
|
|
58
|
+
exec: (sql) => db.exec(sql),
|
|
59
|
+
prepare: (sql) => {
|
|
60
|
+
const stmt = db.prepare(sql);
|
|
61
|
+
return {
|
|
62
|
+
run: (...params) => stmt.run(...params),
|
|
63
|
+
get: (...params) => stmt.get(...params),
|
|
64
|
+
all: (...params) => stmt.all(...params)
|
|
65
|
+
};
|
|
66
|
+
},
|
|
67
|
+
close: () => db.close()
|
|
68
|
+
};
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
// Better SQLite3 adapter
|
|
72
|
+
export class BetterSqliteAdapter {
|
|
73
|
+
Database = null;
|
|
74
|
+
constructor() {
|
|
75
|
+
try {
|
|
76
|
+
this.Database = require("better-sqlite3");
|
|
77
|
+
}
|
|
78
|
+
catch (e) {
|
|
79
|
+
// better-sqlite3 not available
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
isAvailable() {
|
|
83
|
+
return !!this.Database;
|
|
84
|
+
}
|
|
85
|
+
open(path, opts) {
|
|
86
|
+
if (!this.Database) {
|
|
87
|
+
throw new Error("better-sqlite3 not available");
|
|
88
|
+
}
|
|
89
|
+
const db = new this.Database(path);
|
|
90
|
+
// Set busy timeout if specified
|
|
91
|
+
if (opts?.busyTimeoutMs) {
|
|
92
|
+
db.pragma(`busy_timeout = ${opts.busyTimeoutMs}`);
|
|
93
|
+
}
|
|
94
|
+
return {
|
|
95
|
+
pragma: (sql) => db.pragma(sql.replace('PRAGMA ', '')),
|
|
96
|
+
exec: (sql) => db.exec(sql),
|
|
97
|
+
prepare: (sql) => db.prepare(sql),
|
|
98
|
+
close: () => db.close()
|
|
99
|
+
};
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
// Factory function to create the appropriate adapter
|
|
103
|
+
export function createDatabaseAdapter() {
|
|
104
|
+
const isBun = typeof globalThis.Bun !== 'undefined';
|
|
105
|
+
if (isBun) {
|
|
106
|
+
const adapter = new BunSqliteAdapter();
|
|
107
|
+
if (adapter.isAvailable()) {
|
|
108
|
+
return adapter;
|
|
109
|
+
}
|
|
110
|
+
throw new Error("Bun runtime detected but bun:sqlite not available");
|
|
111
|
+
}
|
|
112
|
+
else {
|
|
113
|
+
const adapter = new BetterSqliteAdapter();
|
|
114
|
+
if (adapter.isAvailable()) {
|
|
115
|
+
return adapter;
|
|
116
|
+
}
|
|
117
|
+
throw new Error("Node.js runtime detected but better-sqlite3 not available");
|
|
118
|
+
}
|
|
119
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { DatabaseConnection } from "./adapters";
|
|
2
|
+
export type SqliteDb = DatabaseConnection;
|
|
3
|
+
export declare function openSqlite(dbPath: string, opts?: {
|
|
4
|
+
busyTimeoutMs?: number;
|
|
5
|
+
}): SqliteDb;
|
|
6
|
+
export declare function configurePragmas(db: SqliteDb, pragmas: {
|
|
7
|
+
journal_mode?: "WAL" | "DELETE";
|
|
8
|
+
synchronous?: "NORMAL" | "FULL" | "OFF";
|
|
9
|
+
foreign_keys?: boolean;
|
|
10
|
+
temp_store?: "DEFAULT" | "MEMORY" | "FILE";
|
|
11
|
+
}): void;
|
|
12
|
+
/** BEGIN IMMEDIATE transaction helper. */
|
|
13
|
+
export declare function withTx<T>(db: SqliteDb, fn: () => T): T;
|
|
14
|
+
export declare function ensureSchema(db: SqliteDb, opts?: {
|
|
15
|
+
allowAutoMigrate?: boolean;
|
|
16
|
+
failOnDowngrade?: boolean;
|
|
17
|
+
}): void;
|
package/dist/state/db.js
ADDED
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import { SCHEMA_SQL, SCHEMA_VERSION } from "./schema";
|
|
4
|
+
import { nowISO } from "../shared/time";
|
|
5
|
+
import { createDatabaseAdapter } from "./adapters";
|
|
6
|
+
/** Ensure directory exists for a file path. */
|
|
7
|
+
function ensureParentDir(filePath) {
|
|
8
|
+
const dir = path.dirname(filePath);
|
|
9
|
+
fs.mkdirSync(dir, { recursive: true });
|
|
10
|
+
}
|
|
11
|
+
export function openSqlite(dbPath, opts) {
|
|
12
|
+
ensureParentDir(dbPath);
|
|
13
|
+
const adapter = createDatabaseAdapter();
|
|
14
|
+
const db = adapter.open(dbPath, opts);
|
|
15
|
+
return db;
|
|
16
|
+
}
|
|
17
|
+
export function configurePragmas(db, pragmas) {
|
|
18
|
+
if (pragmas.journal_mode)
|
|
19
|
+
db.pragma(`journal_mode = ${pragmas.journal_mode}`);
|
|
20
|
+
if (pragmas.synchronous)
|
|
21
|
+
db.pragma(`synchronous = ${pragmas.synchronous}`);
|
|
22
|
+
if (typeof pragmas.foreign_keys === "boolean")
|
|
23
|
+
db.pragma(`foreign_keys = ${pragmas.foreign_keys ? "ON" : "OFF"}`);
|
|
24
|
+
if (pragmas.temp_store)
|
|
25
|
+
db.pragma(`temp_store = ${pragmas.temp_store}`);
|
|
26
|
+
}
|
|
27
|
+
/** BEGIN IMMEDIATE transaction helper. */
|
|
28
|
+
export function withTx(db, fn) {
|
|
29
|
+
const adapter = createDatabaseAdapter();
|
|
30
|
+
if (!adapter.isAvailable()) {
|
|
31
|
+
// No database available, just run the function
|
|
32
|
+
return fn();
|
|
33
|
+
}
|
|
34
|
+
db.exec("BEGIN IMMEDIATE");
|
|
35
|
+
try {
|
|
36
|
+
const out = fn();
|
|
37
|
+
db.exec("COMMIT");
|
|
38
|
+
return out;
|
|
39
|
+
}
|
|
40
|
+
catch (e) {
|
|
41
|
+
try {
|
|
42
|
+
db.exec("ROLLBACK");
|
|
43
|
+
}
|
|
44
|
+
catch {
|
|
45
|
+
// ignore
|
|
46
|
+
}
|
|
47
|
+
throw e;
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
export function ensureSchema(db, opts) {
|
|
51
|
+
const adapter = createDatabaseAdapter();
|
|
52
|
+
if (!adapter.isAvailable()) {
|
|
53
|
+
// Silent skip for mock adapter
|
|
54
|
+
return;
|
|
55
|
+
}
|
|
56
|
+
try {
|
|
57
|
+
db.exec(SCHEMA_SQL);
|
|
58
|
+
const row = db.prepare("SELECT schema_version FROM repo_state WHERE id = 1").get();
|
|
59
|
+
if (!row) {
|
|
60
|
+
const now = nowISO();
|
|
61
|
+
db.prepare("INSERT INTO repo_state (id, schema_version, created_at, updated_at) VALUES (1, ?, ?, ?)").run(SCHEMA_VERSION, now, now);
|
|
62
|
+
// Initialize story key seq
|
|
63
|
+
db.prepare("INSERT OR IGNORE INTO story_keyseq (id, next_story_num) VALUES (1, 1)").run();
|
|
64
|
+
return;
|
|
65
|
+
}
|
|
66
|
+
const currentVersion = row.schema_version ?? 0;
|
|
67
|
+
if (currentVersion === SCHEMA_VERSION)
|
|
68
|
+
return;
|
|
69
|
+
if (currentVersion > SCHEMA_VERSION && (opts?.failOnDowngrade ?? true)) {
|
|
70
|
+
throw new Error(`Astrocode DB schema_version ${currentVersion} is newer than this plugin (${SCHEMA_VERSION}). Refusing to downgrade.`);
|
|
71
|
+
}
|
|
72
|
+
if (currentVersion < SCHEMA_VERSION) {
|
|
73
|
+
if (!(opts?.allowAutoMigrate ?? true)) {
|
|
74
|
+
throw new Error(`Astrocode DB schema_version ${currentVersion} is older than required (${SCHEMA_VERSION}). Auto-migrate disabled.`);
|
|
75
|
+
}
|
|
76
|
+
// Additive schema: SCHEMA_SQL already created new tables/indexes if missing.
|
|
77
|
+
db.prepare("UPDATE repo_state SET schema_version = ?, updated_at = ? WHERE id = 1").run(SCHEMA_VERSION, nowISO());
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
catch (e) {
|
|
81
|
+
// Schema operations might fail on mock adapter, silently ignore
|
|
82
|
+
}
|
|
83
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
export declare function newId(prefix: string): string;
|
|
2
|
+
export declare function newRunId(): string;
|
|
3
|
+
export declare function newStageRunId(): string;
|
|
4
|
+
export declare function newArtifactId(): string;
|
|
5
|
+
export declare function newToolRunId(): string;
|
|
6
|
+
export declare function newEventId(): string;
|
|
7
|
+
export declare function newSnapshotId(): string;
|
|
8
|
+
export declare function newBatchId(): string;
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { randomUUID } from "node:crypto";
|
|
2
|
+
export function newId(prefix) {
|
|
3
|
+
return `${prefix}_${randomUUID()}`;
|
|
4
|
+
}
|
|
5
|
+
export function newRunId() {
|
|
6
|
+
return newId("run");
|
|
7
|
+
}
|
|
8
|
+
export function newStageRunId() {
|
|
9
|
+
return newId("stage");
|
|
10
|
+
}
|
|
11
|
+
export function newArtifactId() {
|
|
12
|
+
return newId("art");
|
|
13
|
+
}
|
|
14
|
+
export function newToolRunId() {
|
|
15
|
+
return newId("tool");
|
|
16
|
+
}
|
|
17
|
+
export function newEventId() {
|
|
18
|
+
return newId("evt");
|
|
19
|
+
}
|
|
20
|
+
export function newSnapshotId() {
|
|
21
|
+
return newId("snap");
|
|
22
|
+
}
|
|
23
|
+
export function newBatchId() {
|
|
24
|
+
return newId("batch");
|
|
25
|
+
}
|
|
@@ -0,0 +1,2 @@
|
|
|
1
|
+
export declare const SCHEMA_VERSION = 2;
|
|
2
|
+
export declare const SCHEMA_SQL = "\nPRAGMA foreign_keys = ON;\n\nCREATE TABLE IF NOT EXISTS repo_state (\n id INTEGER PRIMARY KEY CHECK (id = 1),\n schema_version INTEGER NOT NULL,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL,\n spec_hash_before TEXT,\n spec_hash_after TEXT,\n last_run_id TEXT,\n last_story_key TEXT,\n last_event_at TEXT\n);\n\nCREATE TABLE IF NOT EXISTS settings (\n key TEXT PRIMARY KEY,\n value TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS epics (\n epic_key TEXT PRIMARY KEY,\n title TEXT NOT NULL,\n body_md TEXT NOT NULL DEFAULT '',\n state TEXT NOT NULL DEFAULT 'active',\n priority INTEGER NOT NULL DEFAULT 0,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS story_drafts (\n draft_id TEXT PRIMARY KEY,\n title TEXT NOT NULL,\n body_md TEXT NOT NULL DEFAULT '',\n meta_json TEXT NOT NULL DEFAULT '{}',\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS story_keyseq (\n id INTEGER PRIMARY KEY CHECK (id = 1),\n next_story_num INTEGER NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS stories (\n story_key TEXT PRIMARY KEY,\n epic_key TEXT,\n title TEXT NOT NULL,\n body_md TEXT NOT NULL DEFAULT '',\n state TEXT NOT NULL DEFAULT 'queued', -- queued|approved|in_progress|done|blocked|archived\n priority INTEGER NOT NULL DEFAULT 0,\n approved_at TEXT,\n locked_by_run_id TEXT,\n locked_at TEXT,\n in_progress INTEGER NOT NULL DEFAULT 0,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL,\n FOREIGN KEY (epic_key) REFERENCES epics(epic_key)\n);\n\nCREATE TABLE IF NOT EXISTS runs (\n run_id TEXT PRIMARY KEY,\n story_key TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'created', -- created|running|completed|failed|aborted\n pipeline_stages_json TEXT NOT NULL DEFAULT '[]',\n current_stage_key TEXT,\n created_at TEXT NOT NULL,\n started_at TEXT,\n completed_at TEXT,\n updated_at TEXT NOT NULL,\n error_text TEXT,\n FOREIGN KEY (story_key) REFERENCES stories(story_key)\n);\n\nCREATE TABLE IF NOT EXISTS stage_runs (\n stage_run_id TEXT PRIMARY KEY,\n run_id TEXT NOT NULL,\n stage_key TEXT NOT NULL,\n stage_index INTEGER NOT NULL,\n status TEXT NOT NULL DEFAULT 'pending', -- pending|running|completed|failed|skipped\n subagent_type TEXT,\n subagent_session_id TEXT,\n started_at TEXT,\n completed_at TEXT,\n updated_at TEXT NOT NULL,\n baton_path TEXT,\n summary_md TEXT,\n output_json TEXT,\n error_text TEXT,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS artifacts (\n artifact_id TEXT PRIMARY KEY,\n run_id TEXT,\n stage_key TEXT,\n type TEXT NOT NULL, -- plan|baton|evidence|diff|log|summary|commit|tool_output|snapshot\n path TEXT NOT NULL,\n sha256 TEXT,\n meta_json TEXT NOT NULL DEFAULT '{}',\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS tool_runs (\n tool_run_id TEXT PRIMARY KEY,\n run_id TEXT,\n stage_key TEXT,\n tool_name TEXT NOT NULL,\n args_json TEXT NOT NULL DEFAULT '{}',\n output_summary TEXT NOT NULL DEFAULT '',\n output_artifact_id TEXT,\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS events (\n event_id TEXT PRIMARY KEY,\n run_id TEXT,\n stage_key TEXT,\n type TEXT NOT NULL,\n body_json TEXT NOT NULL DEFAULT '{}',\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS injects (\n inject_id TEXT PRIMARY KEY,\n type TEXT NOT NULL DEFAULT 'note',\n title TEXT NOT NULL,\n body_md TEXT NOT NULL,\n tags_json TEXT NOT NULL DEFAULT '[]',\n scope TEXT NOT NULL DEFAULT 'repo', -- repo|run:<id>|story:<key>|global\n source TEXT NOT NULL DEFAULT 'user', -- user|tool|agent|import\n priority INTEGER NOT NULL DEFAULT 50,\n expires_at TEXT,\n sha256 TEXT,\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\nCREATE TABLE IF NOT EXISTS running_batches (\n batch_id TEXT PRIMARY KEY,\n run_id TEXT,\n session_id TEXT,\n status TEXT NOT NULL DEFAULT 'running', -- running|completed|failed|aborted\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS workflow_metrics (\n metric_id TEXT PRIMARY KEY,\n run_id TEXT,\n stage_key TEXT,\n name TEXT NOT NULL,\n value_num REAL,\n value_text TEXT,\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE TABLE IF NOT EXISTS template_intents (\n intent_key TEXT PRIMARY KEY,\n body_md TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\n-- vNext tables\n\nCREATE TABLE IF NOT EXISTS story_relations (\n parent_story_key TEXT NOT NULL,\n child_story_key TEXT NOT NULL,\n relation_type TEXT NOT NULL DEFAULT 'split',\n reason TEXT NOT NULL DEFAULT '',\n created_at TEXT NOT NULL,\n PRIMARY KEY (parent_story_key, child_story_key),\n FOREIGN KEY (parent_story_key) REFERENCES stories(story_key),\n FOREIGN KEY (child_story_key) REFERENCES stories(story_key)\n);\n\nCREATE TABLE IF NOT EXISTS continuations (\n continuation_id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id TEXT NOT NULL,\n run_id TEXT,\n directive_hash TEXT NOT NULL,\n kind TEXT NOT NULL, -- continue|stage|blocked|repair\n reason TEXT NOT NULL DEFAULT '',\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE INDEX IF NOT EXISTS idx_continuations_session_created ON continuations(session_id, created_at DESC);\nCREATE INDEX IF NOT EXISTS idx_continuations_run_created ON continuations(run_id, created_at DESC);\n\nCREATE TABLE IF NOT EXISTS context_snapshots (\n snapshot_id TEXT PRIMARY KEY,\n run_id TEXT NOT NULL,\n stage_key TEXT NOT NULL,\n summary_md TEXT NOT NULL,\n created_at TEXT NOT NULL,\n FOREIGN KEY (run_id) REFERENCES runs(run_id)\n);\n\nCREATE INDEX IF NOT EXISTS idx_context_snapshots_run_created ON context_snapshots(run_id, created_at DESC);\n\nCREATE TABLE IF NOT EXISTS agent_sessions (\n session_id TEXT PRIMARY KEY,\n parent_session_id TEXT,\n agent_name TEXT NOT NULL,\n run_id TEXT,\n stage_key TEXT,\n status TEXT NOT NULL DEFAULT 'active',\n created_at TEXT NOT NULL,\n updated_at TEXT NOT NULL\n);\n\n-- Indexes\n\nCREATE INDEX IF NOT EXISTS idx_stories_state ON stories(state);\nCREATE INDEX IF NOT EXISTS idx_runs_story ON runs(story_key);\nCREATE INDEX IF NOT EXISTS idx_runs_status ON runs(status);\nCREATE INDEX IF NOT EXISTS idx_stage_runs_run ON stage_runs(run_id, stage_index);\nCREATE INDEX IF NOT EXISTS idx_artifacts_run_stage ON artifacts(run_id, stage_key, created_at DESC);\nCREATE INDEX IF NOT EXISTS idx_events_run ON events(run_id, created_at DESC);\nCREATE INDEX IF NOT EXISTS idx_tool_runs_run ON tool_runs(run_id, created_at DESC);\nCREATE INDEX IF NOT EXISTS idx_injects_scope_priority ON injects(scope, priority DESC, created_at DESC);\n\n-- Stronger invariants (SQLite partial indexes)\n-- Only one run may be 'running' at a time (single-repo harness by default).\nCREATE UNIQUE INDEX IF NOT EXISTS uniq_single_running_run\n ON runs(status)\n WHERE status = 'running';\n\n-- Only one story may be in_progress=1 at a time (pairs with single running run).\nCREATE UNIQUE INDEX IF NOT EXISTS uniq_single_in_progress_story\n ON stories(in_progress)\n WHERE in_progress = 1;\n\n";
|